Browse Source

Logits and labels were out of order when computing loss

Neal Wu 8 years ago
parent
commit
bf321a6f16
1 changed files with 3 additions and 2 deletions
  1. 3 2
      slim/train_image_classifier.py

+ 3 - 2
slim/train_image_classifier.py

@@ -470,10 +470,11 @@ def main(_):
       #############################
       if 'AuxLogits' in end_points:
         tf.losses.softmax_cross_entropy(
-            end_points['AuxLogits'], labels,
+            logits=end_points['AuxLogits'], onehot_labels=labels,
             label_smoothing=FLAGS.label_smoothing, weights=0.4, scope='aux_loss')
       tf.losses.softmax_cross_entropy(
-          logits, labels, label_smoothing=FLAGS.label_smoothing, weights=1.0)
+          logits=logits, onehot_labels=labels,
+          label_smoothing=FLAGS.label_smoothing, weights=1.0)
       return end_points
 
     # Gather initial summaries.