浏览代码

Merge pull request #783 from dweekly/master

Update sum_of_squares (TF 0.10) to mean_squared_error (TF 0.12)
Neal Wu 8 年之前
父节点
当前提交
37d31ec131
共有 1 个文件被更改,包括 4 次插入4 次删除
  1. 4 4
      slim/slim_walkthough.ipynb

+ 4 - 4
slim/slim_walkthough.ipynb

@@ -232,7 +232,7 @@
    },
    "outputs": [],
    "source": [
-    "# The following snippet trains the regression model using a sum_of_squares loss.\n",
+    "# The following snippet trains the regression model using a mean_squared_error loss.\n",
     "ckpt_dir = '/tmp/regression_model/'\n",
     "\n",
     "with tf.Graph().as_default():\n",
@@ -244,7 +244,7 @@
     "    predictions, nodes = regression_model(inputs, is_training=True)\n",
     "\n",
     "    # Add the loss function to the graph.\n",
-    "    loss = slim.losses.sum_of_squares(predictions, targets)\n",
+    "    loss = tf.losses.mean_squared_error(labels=targets, predictions=predictions)\n",
     "    \n",
     "    # The total loss is the uers's loss plus any regularization losses.\n",
     "    total_loss = slim.losses.get_total_loss()\n",
@@ -289,12 +289,12 @@
     "    predictions, end_points = regression_model(inputs, is_training=True)\n",
     "\n",
     "    # Add multiple loss nodes.\n",
-    "    sum_of_squares_loss = slim.losses.sum_of_squares(predictions, targets)\n",
+    "    mean_squared_error_loss = tf.losses.mean_squared_error(labels=targets, predictions=predictions)\n",
     "    absolute_difference_loss = slim.losses.absolute_difference(predictions, targets)\n",
     "\n",
     "    # The following two ways to compute the total loss are equivalent\n",
     "    regularization_loss = tf.add_n(slim.losses.get_regularization_losses())\n",
-    "    total_loss1 = sum_of_squares_loss + absolute_difference_loss + regularization_loss\n",
+    "    total_loss1 = mean_squared_error_loss + absolute_difference_loss + regularization_loss\n",
     "\n",
     "    # Regularization Loss is included in the total loss by default.\n",
     "    # This is good for training, but not for testing.\n",