|
|
@@ -93,7 +93,7 @@ def tower_loss(scope):
|
|
|
# Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
|
|
|
# session. This helps the clarity of presentation on tensorboard.
|
|
|
loss_name = re.sub('%s_[0-9]*/' % cifar10.TOWER_NAME, '', l.op.name)
|
|
|
- tf.contrib.deprecated.scalar_summary(loss_name, l)
|
|
|
+ tf.scalar_summary(loss_name, l)
|
|
|
|
|
|
return total_loss
|
|
|
|
|
|
@@ -187,13 +187,13 @@ def train():
|
|
|
grads = average_gradients(tower_grads)
|
|
|
|
|
|
# Add a summary to track the learning rate.
|
|
|
- summaries.append(tf.contrib.deprecated.scalar_summary('learning_rate', lr))
|
|
|
+ summaries.append(tf.scalar_summary('learning_rate', lr))
|
|
|
|
|
|
# Add histograms for gradients.
|
|
|
for grad, var in grads:
|
|
|
if grad is not None:
|
|
|
summaries.append(
|
|
|
- tf.contrib.deprecated.histogram_summary(var.op.name + '/gradients',
|
|
|
+ tf.histogram_summary(var.op.name + '/gradients',
|
|
|
grad))
|
|
|
|
|
|
# Apply the gradients to adjust the shared variables.
|
|
|
@@ -202,7 +202,7 @@ def train():
|
|
|
# Add histograms for trainable variables.
|
|
|
for var in tf.trainable_variables():
|
|
|
summaries.append(
|
|
|
- tf.contrib.deprecated.histogram_summary(var.op.name, var))
|
|
|
+ tf.histogram_summary(var.op.name, var))
|
|
|
|
|
|
# Track the moving averages of all trainable variables.
|
|
|
variable_averages = tf.train.ExponentialMovingAverage(
|
|
|
@@ -216,7 +216,7 @@ def train():
|
|
|
saver = tf.train.Saver(tf.global_variables())
|
|
|
|
|
|
# Build the summary operation from the last tower summaries.
|
|
|
- summary_op = tf.contrib.deprecated.merge_summary(summaries)
|
|
|
+ summary_op = tf.merge_summary(summaries)
|
|
|
|
|
|
# Build an initialization operation to run below.
|
|
|
init = tf.global_variables_initializer()
|