Procházet zdrojové kódy

Updating TF submodule to latest and adding a Dockerfile example. (#117)

* Updating TF submodule to latest and adding a Dockerfile example.

* Extra \n in ./configure.

* Updating Dockerfile to use bazel 0.2.2b.

* Replacing a mention of treebank_union with PMP in the README.
calberti před 9 roky
rodič
revize
f1a489261b

+ 33 - 0
syntaxnet/Dockerfile

@@ -0,0 +1,33 @@
+FROM java:8
+
+ENV SYNTAXNETDIR=/opt/tensorflow PATH=$PATH:/root/bin
+
+RUN mkdir -p $SYNTAXNETDIR \
+    && cd $SYNTAXNETDIR \
+    && apt-get update \
+    && apt-get install git zlib1g-dev file swig python2.7 python-dev python-pip -y \
+    && pip install --upgrade pip \
+    && pip install -U protobuf==3.0.0b2 \
+    && pip install asciitree \
+    && pip install numpy \
+    && wget https://github.com/bazelbuild/bazel/releases/download/0.2.2b/bazel-0.2.2b-installer-linux-x86_64.sh \
+    && chmod +x bazel-0.2.2b-installer-linux-x86_64.sh \
+    && ./bazel-0.2.2b-installer-linux-x86_64.sh --user \
+    && git clone --recursive https://github.com/tensorflow/models.git \
+    && cd $SYNTAXNETDIR/models/syntaxnet/tensorflow \
+    && echo "\n\n\n" | ./configure
+
+RUN cd $SYNTAXNETDIR/models/syntaxnet \
+    && bazel test --genrule_strategy=standalone syntaxnet/... util/utf8/... \
+    && apt-get autoremove -y \
+    && apt-get clean
+
+WORKDIR $SYNTAXNETDIR/models/syntaxnet
+
+CMD [ "sh", "-c", "echo 'Bob brought the pizza to Alice.' | syntaxnet/demo.sh" ]
+
+# COMMANDS to build and run
+# ===============================
+# mkdir build && cp Dockerfile build/ && cd build
+# docker build -t syntaxnet .
+# docker run syntaxnet

+ 7 - 4
syntaxnet/README.md

@@ -73,9 +73,8 @@ Running and training SyntaxNet models requires building this package from
 source. You'll need to install:
 
 *   bazel:
+    *   **versions 0.2.0 - 0.2.2b, NOT 0.2.3**
     *   follow the instructions [here](http://bazel.io/docs/install.html)
-    *   **Note: You must use bazel version 0.2.2, NOT 0.2.2b, due to a WORKSPACE
-        issue**
 *   swig:
     *   `apt-get install swig` on Ubuntu
     *   `brew install swig` on OSX
@@ -86,7 +85,7 @@ source. You'll need to install:
     *   `pip install asciitree`
 *   numpy, package for scientific computing:
     *   `pip install numpy`
-    
+
 Once you completed the above steps, you can build and test SyntaxNet with the
 following commands:
 
@@ -103,6 +102,9 @@ following commands:
 
 Bazel should complete reporting all tests passed.
 
+You can also compile SyntaxNet in a [Docker](https://www.docker.com/what-docker)
+container using this [Dockerfile](Dockerfile).
+
 ## Getting Started
 
 Once you have successfully built SyntaxNet, you can start parsing text right
@@ -144,7 +146,8 @@ To change the pipeline to read and write to specific files (as opposed to piping
 through stdin and stdout), we have to modify the `demo.sh` to point to the files
 we want. The SyntaxNet models are configured via a combination of run-time flags
 (which are easy to change) and a text format `TaskSpec` protocol buffer. The
-spec file used in the demo is in `syntaxnet/models/treebank_union/context`.
+spec file used in the demo is in
+`syntaxnet/models/parsey_mcparseface/context.pbtxt`.
 
 To use corpora instead of stdin/stdout, we have to:
 

+ 1 - 1
syntaxnet/syntaxnet/BUILD

@@ -626,7 +626,7 @@ py_test(
 
 sh_test(
     name = "parser_trainer_test",
-    size = "medium",
+    size = "large",
     srcs = ["parser_trainer_test.sh"],
     data = [
         ":parser_eval",

+ 1 - 1
syntaxnet/syntaxnet/beam_reader_ops_test.py

@@ -23,7 +23,7 @@ import tensorflow as tf
 
 from tensorflow.python.framework import test_util
 from tensorflow.python.platform import googletest
-from tensorflow.python.platform import logging
+from tensorflow.python.platform import tf_logging as logging
 
 from syntaxnet import structured_graph_builder
 from syntaxnet.ops import gen_parser_ops

+ 1 - 1
syntaxnet/syntaxnet/conll2tree.py

@@ -21,7 +21,7 @@ import tensorflow as tf
 
 import syntaxnet.load_parser_ops
 
-from tensorflow.python.platform import logging
+from tensorflow.python.platform import tf_logging as logging
 from syntaxnet import sentence_pb2
 from syntaxnet.ops import gen_parser_ops
 

+ 1 - 1
syntaxnet/syntaxnet/graph_builder.py

@@ -21,7 +21,7 @@ import syntaxnet.load_parser_ops
 
 from tensorflow.python.ops import control_flow_ops as cf
 from tensorflow.python.ops import state_ops
-from tensorflow.python.platform import logging
+from tensorflow.python.platform import tf_logging as logging
 
 from syntaxnet.ops import gen_parser_ops
 

+ 1 - 1
syntaxnet/syntaxnet/lexicon_builder_test.py

@@ -26,7 +26,7 @@ import syntaxnet.load_parser_ops
 
 from tensorflow.python.framework import test_util
 from tensorflow.python.platform import googletest
-from tensorflow.python.platform import logging
+from tensorflow.python.platform import tf_logging as logging
 
 from syntaxnet import sentence_pb2
 from syntaxnet import task_spec_pb2

+ 1 - 1
syntaxnet/syntaxnet/parser_eval.py

@@ -23,7 +23,7 @@ import time
 import tensorflow as tf
 
 from tensorflow.python.platform import gfile
-from tensorflow.python.platform import logging
+from tensorflow.python.platform import tf_logging as logging
 from syntaxnet import sentence_pb2
 from syntaxnet import graph_builder
 from syntaxnet import structured_graph_builder

+ 1 - 1
syntaxnet/syntaxnet/parser_trainer.py

@@ -24,7 +24,7 @@ import time
 import tensorflow as tf
 
 from tensorflow.python.platform import gfile
-from tensorflow.python.platform import logging
+from tensorflow.python.platform import tf_logging as logging
 
 from google.protobuf import text_format
 

+ 1 - 1
syntaxnet/syntaxnet/reader_ops_test.py

@@ -24,7 +24,7 @@ import tensorflow as tf
 from tensorflow.python.framework import test_util
 from tensorflow.python.ops import control_flow_ops as cf
 from tensorflow.python.platform import googletest
-from tensorflow.python.platform import logging
+from tensorflow.python.platform import tf_logging as logging
 
 from syntaxnet import dictionary_pb2
 from syntaxnet import graph_builder

+ 12 - 5
syntaxnet/syntaxnet/syntaxnet.bzl

@@ -16,11 +16,18 @@
 load("@tf//google/protobuf:protobuf.bzl", "cc_proto_library")
 load("@tf//google/protobuf:protobuf.bzl", "py_proto_library")
 
-def if_cuda(a, b=[]):
-  return select({
-      "@tf//third_party/gpus/cuda:cuda_crosstool_condition": a,
-      "//conditions:default": b,
-  })
+def if_cuda(if_true, if_false = []):
+    """Shorthand for select()'ing on whether we're building with CUDA.
+
+    Returns a select statement which evaluates to if_true if we're building
+    with CUDA enabled.  Otherwise, the select statement evaluates to if_false.
+
+    """
+    return select({
+        "@tf//third_party/gpus/cuda:using_nvcc": if_true,
+        "@tf//third_party/gpus/cuda:using_gcudacc": if_true,
+        "//conditions:default": if_false
+    })
 
 def tf_copts():
   return (["-fno-exceptions", "-DEIGEN_AVOID_STL_ARRAY",] +

+ 1 - 1
syntaxnet/syntaxnet/text_formats_test.py

@@ -26,7 +26,7 @@ import syntaxnet.load_parser_ops
 
 from tensorflow.python.framework import test_util
 from tensorflow.python.platform import googletest
-from tensorflow.python.platform import logging
+from tensorflow.python.platform import tf_logging as logging
 
 from syntaxnet import sentence_pb2
 from syntaxnet import task_spec_pb2

+ 1 - 1
syntaxnet/tensorflow

@@ -1 +1 @@
-Subproject commit 3402f51ecd11a26d0c071b1d06b4edab1b0ef351
+Subproject commit 712e41cf8b316ef2c33c6dd7fd6ade2b4e93ddc0