Christopher Shallue 8 лет назад
Родитель
Сommit
7fb8b0f24d

+ 1 - 1
im2txt/im2txt/ops/image_processing.py

@@ -92,7 +92,7 @@ def process_image(encoded_image,
   # only logged in thread 0.
   def image_summary(name, image):
     if not thread_id:
-      tf.image_summary(name, tf.expand_dims(image, 0))
+      tf.summary.image(name, tf.expand_dims(image, 0))
 
   # Decode image into a float32 Tensor of shape [?, ?, 3] with values in [0, 1).
   with tf.name_scope("decode", values=[encoded_image]):

+ 4 - 4
im2txt/im2txt/ops/inputs.py

@@ -116,7 +116,7 @@ def prefetch_input_data(reader,
     enqueue_ops.append(values_queue.enqueue([value]))
   tf.train.queue_runner.add_queue_runner(tf.train.queue_runner.QueueRunner(
       values_queue, enqueue_ops))
-  tf.scalar_summary(
+  tf.summary.scalar(
       "queue/%s/fraction_of_%d_full" % (values_queue.name, capacity),
       tf.cast(values_queue.size(), tf.float32) * (1. / capacity))
 
@@ -197,8 +197,8 @@ def batch_with_dynamic_pad(images_and_captions,
 
   if add_summaries:
     lengths = tf.add(tf.reduce_sum(mask, 1), 1)
-    tf.scalar_summary("caption_length/batch_min", tf.reduce_min(lengths))
-    tf.scalar_summary("caption_length/batch_max", tf.reduce_max(lengths))
-    tf.scalar_summary("caption_length/batch_mean", tf.reduce_mean(lengths))
+    tf.summary.scalar("caption_length/batch_min", tf.reduce_min(lengths))
+    tf.summary.scalar("caption_length/batch_max", tf.reduce_max(lengths))
+    tf.summary.scalar("caption_length/batch_mean", tf.reduce_mean(lengths))
 
   return images, input_seqs, target_seqs, mask

+ 5 - 5
im2txt/im2txt/show_and_tell_model.py

@@ -311,14 +311,14 @@ class ShowAndTellModel(object):
       batch_loss = tf.div(tf.reduce_sum(tf.mul(losses, weights)),
                           tf.reduce_sum(weights),
                           name="batch_loss")
-      tf.contrib.losses.add_loss(batch_loss)
-      total_loss = tf.contrib.losses.get_total_loss()
+      tf.losses.add_loss(batch_loss)
+      total_loss = tf.losses.get_total_loss()
 
       # Add summaries.
-      tf.scalar_summary("batch_loss", batch_loss)
-      tf.scalar_summary("total_loss", total_loss)
+      tf.summary.scalar("losses/batch_loss", batch_loss)
+      tf.summary.scalar("losses/total_loss", total_loss)
       for var in tf.trainable_variables():
-        tf.histogram_summary(var.op.name, var)
+        tf.summary.histogram("parameters/" + var.op.name, var)
 
       self.total_loss = total_loss
       self.target_cross_entropy_losses = losses  # Used in evaluation.

+ 2 - 2
im2txt/im2txt/show_and_tell_model_test.py

@@ -63,7 +63,7 @@ class ShowAndTellModelTest(tf.test.TestCase):
   def _countModelParameters(self):
     """Counts the number of parameters in the model at top level scope."""
     counter = {}
-    for v in tf.all_variables():
+    for v in tf.global_variables():
       name = v.op.name.split("/")[0]
       num_params = v.get_shape().num_elements()
       assert num_params
@@ -98,7 +98,7 @@ class ShowAndTellModelTest(tf.test.TestCase):
     fetches = expected_shapes.keys()
 
     with self.test_session() as sess:
-      sess.run(tf.initialize_all_variables())
+      sess.run(tf.global_variables_initializer())
       outputs = sess.run(fetches, feed_dict)
 
     for index, output in enumerate(outputs):