segmenter_trainer.py 6.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177
  1. # Copyright 2016 Google Inc. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. """A program to train a tensorflow neural net segmenter from a conll file."""
  16. import base64
  17. import os
  18. import os.path
  19. import random
  20. import time
  21. import tensorflow as tf
  22. from tensorflow.python.platform import gfile
  23. from tensorflow.python.platform import tf_logging as logging
  24. from google.protobuf import text_format
  25. from syntaxnet.ops import gen_parser_ops
  26. from syntaxnet import task_spec_pb2
  27. from syntaxnet import sentence_pb2
  28. from dragnn.protos import spec_pb2
  29. from dragnn.python.sentence_io import ConllSentenceReader
  30. from dragnn.python import evaluation
  31. from dragnn.python import graph_builder
  32. from dragnn.python import lexicon
  33. from dragnn.python import spec_builder
  34. from dragnn.python import trainer_lib
  35. import dragnn.python.load_dragnn_cc_impl
  36. import syntaxnet.load_parser_ops
  37. flags = tf.app.flags
  38. FLAGS = flags.FLAGS
  39. flags.DEFINE_string('tf_master', '',
  40. 'TensorFlow execution engine to connect to.')
  41. flags.DEFINE_string('resource_path', '', 'Path to constructed resources.')
  42. flags.DEFINE_string('tensorboard_dir', '',
  43. 'Directory for TensorBoard logs output.')
  44. flags.DEFINE_string('checkpoint_filename', '',
  45. 'Filename to save the best checkpoint to.')
  46. flags.DEFINE_string('training_corpus_path', '', 'Path to training data.')
  47. flags.DEFINE_string('dev_corpus_path', '', 'Path to development set data.')
  48. flags.DEFINE_bool('compute_lexicon', False, '')
  49. flags.DEFINE_bool('projectivize_training_set', True, '')
  50. flags.DEFINE_integer('num_epochs', 10, 'Number of epochs to train for.')
  51. flags.DEFINE_integer('batch_size', 4, 'Batch size.')
  52. flags.DEFINE_integer('report_every', 500,
  53. 'Report cost and training accuracy every this many steps.')
  54. flags.DEFINE_string('hyperparams',
  55. 'decay_steps:32000 dropout_rate:0.8 gradient_clip_norm:1 '
  56. 'learning_method:"momentum" learning_rate:0.1 seed:1 '
  57. 'momentum:0.95 use_moving_average:true',
  58. 'Hyperparameters of the model to train, either in ProtoBuf'
  59. 'text format or base64-encoded ProtoBuf text format.')
  60. def main(unused_argv):
  61. logging.set_verbosity(logging.INFO)
  62. if not gfile.IsDirectory(FLAGS.resource_path):
  63. gfile.MakeDirs(FLAGS.resource_path)
  64. # Constructs lexical resources for SyntaxNet in the given resource path, from
  65. # the training data.
  66. if FLAGS.compute_lexicon:
  67. logging.info('Computing lexicon...')
  68. lexicon.build_lexicon(FLAGS.resource_path, FLAGS.training_corpus_path)
  69. # Construct the "lookahead" ComponentSpec. This is a simple right-to-left RNN
  70. # sequence model, which encodes the context to the right of each token. It has
  71. # no loss except for the downstream components.
  72. lookahead = spec_builder.ComponentSpecBuilder('lookahead')
  73. lookahead.set_network_unit(
  74. name='wrapped_units.LayerNormBasicLSTMNetwork', hidden_layer_sizes='256')
  75. lookahead.set_transition_system(name='shift-only', left_to_right='false')
  76. lookahead.add_fixed_feature(name='char',
  77. fml='input(-1).char input.char input(1).char',
  78. embedding_dim=32)
  79. lookahead.add_fixed_feature(name='char-bigram',
  80. fml='input.char-bigram',
  81. embedding_dim=32)
  82. lookahead.fill_from_resources(FLAGS.resource_path, FLAGS.tf_master)
  83. # Construct the ComponentSpec for segmentation.
  84. segmenter = spec_builder.ComponentSpecBuilder('segmenter')
  85. segmenter.set_network_unit(
  86. name='wrapped_units.LayerNormBasicLSTMNetwork', hidden_layer_sizes='128')
  87. segmenter.set_transition_system(name='binary-segment-transitions')
  88. segmenter.add_token_link(
  89. source=lookahead, fml='input.focus stack.focus',
  90. embedding_dim=64)
  91. segmenter.fill_from_resources(FLAGS.resource_path, FLAGS.tf_master)
  92. # Build and write master_spec.
  93. master_spec = spec_pb2.MasterSpec()
  94. master_spec.component.extend([lookahead.spec, segmenter.spec])
  95. logging.info('Constructed master spec: %s', str(master_spec))
  96. with gfile.GFile(FLAGS.resource_path + '/master_spec', 'w') as f:
  97. f.write(str(master_spec).encode('utf-8'))
  98. hyperparam_config = spec_pb2.GridPoint()
  99. try:
  100. text_format.Parse(FLAGS.hyperparams, hyperparam_config)
  101. except text_format.ParseError:
  102. text_format.Parse(base64.b64decode(FLAGS.hyperparams), hyperparam_config)
  103. # Build the TensorFlow graph.
  104. graph = tf.Graph()
  105. with graph.as_default():
  106. builder = graph_builder.MasterBuilder(master_spec, hyperparam_config)
  107. component_targets = spec_builder.default_targets_from_spec(master_spec)
  108. trainers = [
  109. builder.add_training_from_config(target) for target in component_targets
  110. ]
  111. assert len(trainers) == 1
  112. annotator = builder.add_annotation()
  113. builder.add_saver()
  114. # Read in serialized protos from training data.
  115. training_set = ConllSentenceReader(
  116. FLAGS.training_corpus_path, projectivize=False).corpus()
  117. dev_set = ConllSentenceReader(
  118. FLAGS.dev_corpus_path, projectivize=False).corpus()
  119. # Convert word-based docs to char-based documents for segmentation training
  120. # and evaluation.
  121. with tf.Session(graph=tf.Graph()) as tmp_session:
  122. char_training_set_op = gen_parser_ops.segmenter_training_data_constructor(
  123. training_set)
  124. char_dev_set_op = gen_parser_ops.char_token_generator(dev_set)
  125. char_training_set = tmp_session.run(char_training_set_op)
  126. char_dev_set = tmp_session.run(char_dev_set_op)
  127. # Ready to train!
  128. logging.info('Training on %d sentences.', len(training_set))
  129. logging.info('Tuning on %d sentences.', len(dev_set))
  130. pretrain_steps = [0]
  131. train_steps = [FLAGS.num_epochs * len(training_set)]
  132. tf.logging.info('Creating TensorFlow checkpoint dir...')
  133. gfile.MakeDirs(os.path.dirname(FLAGS.checkpoint_filename))
  134. summary_writer = trainer_lib.get_summary_writer(FLAGS.tensorboard_dir)
  135. with tf.Session(FLAGS.tf_master, graph=graph) as sess:
  136. # Make sure to re-initialize all underlying state.
  137. sess.run(tf.global_variables_initializer())
  138. trainer_lib.run_training(
  139. sess, trainers, annotator, evaluation.segmentation_summaries,
  140. pretrain_steps, train_steps, char_training_set, char_dev_set, dev_set,
  141. FLAGS.batch_size, summary_writer, FLAGS.report_every, builder.saver,
  142. FLAGS.checkpoint_filename)
  143. if __name__ == '__main__':
  144. tf.app.run()