|
|
@@ -286,7 +286,7 @@ def loss(logits, labels):
|
|
|
# Calculate the average cross entropy loss across the batch.
|
|
|
labels = tf.cast(labels, tf.int64)
|
|
|
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
|
|
|
- logits, labels, name='cross_entropy_per_example')
|
|
|
+ logits=logits, labels=labels, name='cross_entropy_per_example')
|
|
|
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
|
|
|
tf.add_to_collection('losses', cross_entropy_mean)
|
|
|
|