parser_eval.py 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162
  1. # Copyright 2016 Google Inc. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. """A program to annotate a conll file with a tensorflow neural net parser."""
  16. import os
  17. import os.path
  18. import time
  19. import tempfile
  20. import tensorflow as tf
  21. from tensorflow.python.platform import gfile
  22. from tensorflow.python.platform import tf_logging as logging
  23. from google.protobuf import text_format
  24. from syntaxnet import sentence_pb2
  25. from syntaxnet import graph_builder
  26. from syntaxnet import structured_graph_builder
  27. from syntaxnet.ops import gen_parser_ops
  28. from syntaxnet import task_spec_pb2
  29. flags = tf.app.flags
  30. FLAGS = flags.FLAGS
  31. flags.DEFINE_string('task_context', '',
  32. 'Path to a task context with inputs and parameters for '
  33. 'feature extractors.')
  34. flags.DEFINE_string('resource_dir', '',
  35. 'Optional base directory for task context resources.')
  36. flags.DEFINE_string('model_path', '', 'Path to model parameters.')
  37. flags.DEFINE_string('arg_prefix', None, 'Prefix for context parameters.')
  38. flags.DEFINE_string('graph_builder', 'greedy',
  39. 'Which graph builder to use, either greedy or structured.')
  40. flags.DEFINE_string('input', 'stdin',
  41. 'Name of the context input to read data from.')
  42. flags.DEFINE_string('output', 'stdout',
  43. 'Name of the context input to write data to.')
  44. flags.DEFINE_string('hidden_layer_sizes', '200,200',
  45. 'Comma separated list of hidden layer sizes.')
  46. flags.DEFINE_integer('batch_size', 32,
  47. 'Number of sentences to process in parallel.')
  48. flags.DEFINE_integer('beam_size', 8, 'Number of slots for beam parsing.')
  49. flags.DEFINE_integer('max_steps', 1000, 'Max number of steps to take.')
  50. flags.DEFINE_bool('slim_model', False,
  51. 'Whether to expect only averaged variables.')
  52. def RewriteContext(task_context):
  53. context = task_spec_pb2.TaskSpec()
  54. with gfile.FastGFile(task_context, 'rb') as fin:
  55. text_format.Merge(fin.read(), context)
  56. for resource in context.input:
  57. for part in resource.part:
  58. if part.file_pattern != '-':
  59. part.file_pattern = os.path.join(FLAGS.resource_dir, part.file_pattern)
  60. with tempfile.NamedTemporaryFile(delete=False) as fout:
  61. fout.write(str(context))
  62. return fout.name
  63. def Eval(sess):
  64. """Builds and evaluates a network."""
  65. task_context = FLAGS.task_context
  66. if FLAGS.resource_dir:
  67. task_context = RewriteContext(task_context)
  68. feature_sizes, domain_sizes, embedding_dims, num_actions = sess.run(
  69. gen_parser_ops.feature_size(task_context=task_context,
  70. arg_prefix=FLAGS.arg_prefix))
  71. t = time.time()
  72. hidden_layer_sizes = map(int, FLAGS.hidden_layer_sizes.split(','))
  73. logging.info('Building training network with parameters: feature_sizes: %s '
  74. 'domain_sizes: %s', feature_sizes, domain_sizes)
  75. if FLAGS.graph_builder == 'greedy':
  76. parser = graph_builder.GreedyParser(num_actions,
  77. feature_sizes,
  78. domain_sizes,
  79. embedding_dims,
  80. hidden_layer_sizes,
  81. gate_gradients=True,
  82. arg_prefix=FLAGS.arg_prefix)
  83. else:
  84. parser = structured_graph_builder.StructuredGraphBuilder(
  85. num_actions,
  86. feature_sizes,
  87. domain_sizes,
  88. embedding_dims,
  89. hidden_layer_sizes,
  90. gate_gradients=True,
  91. arg_prefix=FLAGS.arg_prefix,
  92. beam_size=FLAGS.beam_size,
  93. max_steps=FLAGS.max_steps)
  94. parser.AddEvaluation(task_context,
  95. FLAGS.batch_size,
  96. corpus_name=FLAGS.input,
  97. evaluation_max_steps=FLAGS.max_steps)
  98. parser.AddSaver(FLAGS.slim_model)
  99. sess.run(parser.inits.values())
  100. parser.saver.restore(sess, FLAGS.model_path)
  101. sink_documents = tf.placeholder(tf.string)
  102. sink = gen_parser_ops.document_sink(sink_documents,
  103. task_context=task_context,
  104. corpus_name=FLAGS.output)
  105. t = time.time()
  106. num_epochs = None
  107. num_tokens = 0
  108. num_correct = 0
  109. num_documents = 0
  110. while True:
  111. tf_eval_epochs, tf_eval_metrics, tf_documents = sess.run([
  112. parser.evaluation['epochs'],
  113. parser.evaluation['eval_metrics'],
  114. parser.evaluation['documents'],
  115. ])
  116. if len(tf_documents):
  117. logging.info('Processed %d documents', len(tf_documents))
  118. num_documents += len(tf_documents)
  119. sess.run(sink, feed_dict={sink_documents: tf_documents})
  120. num_tokens += tf_eval_metrics[0]
  121. num_correct += tf_eval_metrics[1]
  122. if num_epochs is None:
  123. num_epochs = tf_eval_epochs
  124. elif num_epochs < tf_eval_epochs:
  125. break
  126. logging.info('Total processed documents: %d', num_documents)
  127. if num_tokens > 0:
  128. eval_metric = 100.0 * num_correct / num_tokens
  129. logging.info('num correct tokens: %d', num_correct)
  130. logging.info('total tokens: %d', num_tokens)
  131. logging.info('Seconds elapsed in evaluation: %.2f, '
  132. 'eval metric: %.2f%%', time.time() - t, eval_metric)
  133. def main(unused_argv):
  134. logging.set_verbosity(logging.INFO)
  135. with tf.Session() as sess:
  136. Eval(sess)
  137. if __name__ == '__main__':
  138. tf.app.run()