Davis Vigneault 8 лет назад
Родитель
Сommit
b1b47a1d8c

+ 1 - 1
examples/2_BasicModels/linear_regression.py

@@ -41,7 +41,7 @@ cost = tf.reduce_sum(tf.pow(pred-Y, 2))/(2*n_samples)
 optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
 
 # Initializing the variables
-init = tf.initialize_all_variables()
+init = tf.global_variables_initializer()
 
 # Launch the graph
 with tf.Session() as sess:

+ 1 - 1
examples/2_BasicModels/logistic_regression.py

@@ -38,7 +38,7 @@ cost = tf.reduce_mean(-tf.reduce_sum(y*tf.log(pred), reduction_indices=1))
 optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
 
 # Initializing the variables
-init = tf.initialize_all_variables()
+init = tf.global_variables_initializer()
 
 # Launch the graph
 with tf.Session() as sess:

+ 1 - 1
examples/2_BasicModels/nearest_neighbor.py

@@ -33,7 +33,7 @@ pred = tf.arg_min(distance, 0)
 accuracy = 0.
 
 # Initializing the variables
-init = tf.initialize_all_variables()
+init = tf.global_variables_initializer()
 
 # Launch the graph
 with tf.Session() as sess:

+ 1 - 1
examples/3_NeuralNetworks/autoencoder.py

@@ -83,7 +83,7 @@ cost = tf.reduce_mean(tf.pow(y_true - y_pred, 2))
 optimizer = tf.train.RMSPropOptimizer(learning_rate).minimize(cost)
 
 # Initializing the variables
-init = tf.initialize_all_variables()
+init = tf.global_variables_initializer()
 
 # Launch the graph
 with tf.Session() as sess:

+ 1 - 1
examples/3_NeuralNetworks/bidirectional_rnn.py

@@ -90,7 +90,7 @@ correct_pred = tf.equal(tf.argmax(pred,1), tf.argmax(y,1))
 accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
 
 # Initializing the variables
-init = tf.initialize_all_variables()
+init = tf.global_variables_initializer()
 
 # Launch the graph
 with tf.Session() as sess:

+ 1 - 1
examples/3_NeuralNetworks/convolutional_network.py

@@ -104,7 +104,7 @@ correct_pred = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
 accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
 
 # Initializing the variables
-init = tf.initialize_all_variables()
+init = tf.global_variables_initializer()
 
 # Launch the graph
 with tf.Session() as sess:

+ 1 - 1
examples/3_NeuralNetworks/dynamic_rnn.py

@@ -162,7 +162,7 @@ correct_pred = tf.equal(tf.argmax(pred,1), tf.argmax(y,1))
 accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
 
 # Initializing the variables
-init = tf.initialize_all_variables()
+init = tf.global_variables_initializer()
 
 # Launch the graph
 with tf.Session() as sess:

+ 1 - 1
examples/3_NeuralNetworks/multilayer_perceptron.py

@@ -64,7 +64,7 @@ cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, y))
 optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
 
 # Initializing the variables
-init = tf.initialize_all_variables()
+init = tf.global_variables_initializer()
 
 # Launch the graph
 with tf.Session() as sess:

+ 1 - 1
examples/3_NeuralNetworks/recurrent_network.py

@@ -80,7 +80,7 @@ correct_pred = tf.equal(tf.argmax(pred,1), tf.argmax(y,1))
 accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
 
 # Initializing the variables
-init = tf.initialize_all_variables()
+init = tf.global_variables_initializer()
 
 # Launch the graph
 with tf.Session() as sess:

+ 1 - 1
examples/4_Utils/save_restore_model.py

@@ -64,7 +64,7 @@ cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, y))
 optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
 
 # Initializing the variables
-init = tf.initialize_all_variables()
+init = tf.global_variables_initializer()
 
 # 'Saver' op to save and restore all the variables
 saver = tf.train.Saver()

+ 1 - 1
examples/4_Utils/tensorboard_advanced.py

@@ -88,7 +88,7 @@ with tf.name_scope('Accuracy'):
     acc = tf.reduce_mean(tf.cast(acc, tf.float32))
 
 # Initializing the variables
-init = tf.initialize_all_variables()
+init = tf.global_variables_initializer()
 
 # Create a summary to monitor cost tensor
 tf.scalar_summary("loss", loss)

+ 1 - 1
examples/4_Utils/tensorboard_basic.py

@@ -49,7 +49,7 @@ with tf.name_scope('Accuracy'):
     acc = tf.reduce_mean(tf.cast(acc, tf.float32))
 
 # Initializing the variables
-init = tf.initialize_all_variables()
+init = tf.global_variables_initializer()
 
 # Create a summary to monitor cost tensor
 tf.scalar_summary("loss", cost)

+ 1 - 1
notebooks/2_BasicModels/linear_regression.ipynb

@@ -110,7 +110,7 @@
    "outputs": [],
    "source": [
     "# Initializing the variables\n",
-    "init = tf.initialize_all_variables()"
+    "init = tf.global_variables_initializer()"
    ]
   },
   {

+ 2 - 2
notebooks/2_BasicModels/logistic_regression.ipynb

@@ -73,7 +73,7 @@
     "optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)\n",
     "\n",
     "# Initializing the variables\n",
-    "init = tf.initialize_all_variables()"
+    "init = tf.global_variables_initializer()"
    ]
   },
   {
@@ -169,4 +169,4 @@
  },
  "nbformat": 4,
  "nbformat_minor": 0
-}
+}

+ 2 - 2
notebooks/2_BasicModels/nearest_neighbor.ipynb

@@ -68,7 +68,7 @@
     "accuracy = 0.\n",
     "\n",
     "# Initializing the variables\n",
-    "init = tf.initialize_all_variables()"
+    "init = tf.global_variables_initializer()"
    ]
   },
   {
@@ -328,4 +328,4 @@
  },
  "nbformat": 4,
  "nbformat_minor": 0
-}
+}

+ 1 - 1
notebooks/3_NeuralNetworks/autoencoder.ipynb

@@ -129,7 +129,7 @@
     "optimizer = tf.train.RMSPropOptimizer(learning_rate).minimize(cost)\n",
     "\n",
     "# Initializing the variables\n",
-    "init = tf.initialize_all_variables()"
+    "init = tf.global_variables_initializer()"
    ]
   },
   {

+ 1 - 1
notebooks/3_NeuralNetworks/bidirectional_rnn.ipynb

@@ -134,7 +134,7 @@
     "accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))\n",
     "\n",
     "# Initializing the variables\n",
-    "init = tf.initialize_all_variables()"
+    "init = tf.global_variables_initializer()"
    ]
   },
   {

+ 1 - 1
notebooks/3_NeuralNetworks/convolutional_network.ipynb

@@ -158,7 +158,7 @@
     "accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))\n",
     "\n",
     "# Initializing the variables\n",
-    "init = tf.initialize_all_variables()"
+    "init = tf.global_variables_initializer()"
    ]
   },
   {

+ 2 - 2
notebooks/3_NeuralNetworks/multilayer_perceptron.ipynb

@@ -118,7 +118,7 @@
     "optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)\n",
     "\n",
     "# Initializing the variables\n",
-    "init = tf.initialize_all_variables()"
+    "init = tf.global_variables_initializer()"
    ]
   },
   {
@@ -204,4 +204,4 @@
  },
  "nbformat": 4,
  "nbformat_minor": 0
-}
+}

+ 1 - 1
notebooks/3_NeuralNetworks/recurrent_network.ipynb

@@ -125,7 +125,7 @@
     "accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))\n",
     "\n",
     "# Initializing the variables\n",
-    "init = tf.initialize_all_variables()"
+    "init = tf.global_variables_initializer()"
    ]
   },
   {

+ 2 - 2
notebooks/4_Utils/save_restore_model.ipynb

@@ -101,7 +101,7 @@
     "optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)\n",
     "\n",
     "# Initializing the variables\n",
-    "init = tf.initialize_all_variables()"
+    "init = tf.global_variables_initializer()"
    ]
   },
   {
@@ -268,4 +268,4 @@
  },
  "nbformat": 4,
  "nbformat_minor": 0
-}
+}

+ 1 - 1
notebooks/4_Utils/tensorboard_basic.ipynb

@@ -95,7 +95,7 @@
     "    acc = tf.reduce_mean(tf.cast(acc, tf.float32))\n",
     "\n",
     "# Initializing the variables\n",
-    "init = tf.initialize_all_variables()\n",
+    "init = tf.global_variables_initializer()\n",
     "\n",
     "# Create a summary to monitor cost tensor\n",
     "tf.scalar_summary(\"loss\", cost)\n",

+ 1 - 1
notebooks/5_MultiGPU/multigpu_basics.ipynb

@@ -175,4 +175,4 @@
  },
  "nbformat": 4,
  "nbformat_minor": 0
-}
+}