浏览代码

fixed shape errors in the rnn model comments (#43)

Simanta Gautam 8 年之前
父节点
当前提交
1f58605650

+ 2 - 2
examples/3_NeuralNetworks/bidirectional_rnn.py

@@ -53,13 +53,13 @@ def BiRNN(x, weights, biases):
 
     # Prepare data shape to match `bidirectional_rnn` function requirements
     # Current data input shape: (batch_size, n_steps, n_input)
-    # Required shape: 'n_steps' tensors list of shape (batch_size, n_hidden)
+    # Required shape: 'n_steps' tensors list of shape (batch_size, n_input)
 
     # Permuting batch_size and n_steps
     x = tf.transpose(x, [1, 0, 2])
     # Reshape to (n_steps*batch_size, n_input)
     x = tf.reshape(x, [-1, n_input])
-    # Split to get a list of 'n_steps' tensors of shape (batch_size, n_hidden)
+    # Split to get a list of 'n_steps' tensors of shape (batch_size, n_input)
     x = tf.split(0, n_steps, x)
 
     # Define lstm cells with tensorflow

+ 2 - 2
examples/3_NeuralNetworks/recurrent_network.py

@@ -50,13 +50,13 @@ def RNN(x, weights, biases):
 
     # Prepare data shape to match `rnn` function requirements
     # Current data input shape: (batch_size, n_steps, n_input)
-    # Required shape: 'n_steps' tensors list of shape (batch_size, n_hidden)
+    # Required shape: 'n_steps' tensors list of shape (batch_size, n_input)
 
     # Permuting batch_size and n_steps
     x = tf.transpose(x, [1, 0, 2])
     # Reshaping to (n_steps*batch_size, n_input)
     x = tf.reshape(x, [-1, n_input])
-    # Split to get a list of 'n_steps' tensors of shape (batch_size, n_hidden)
+    # Split to get a list of 'n_steps' tensors of shape (batch_size, n_input)
     x = tf.split(0, n_steps, x)
 
     # Define a lstm cell with tensorflow

+ 2 - 2
notebooks/3_NeuralNetworks/bidirectional_rnn.ipynb

@@ -99,13 +99,13 @@
     "\n",
     "    # Prepare data shape to match `bidirectional_rnn` function requirements\n",
     "    # Current data input shape: (batch_size, n_steps, n_input)\n",
-    "    # Required shape: 'n_steps' tensors list of shape (batch_size, n_hidden)\n",
+    "    # Required shape: 'n_steps' tensors list of shape (batch_size, n_input)\n",
     "    \n",
     "    # Permuting batch_size and n_steps\n",
     "    x = tf.transpose(x, [1, 0, 2])\n",
     "    # Reshape to (n_steps*batch_size, n_input)\n",
     "    x = tf.reshape(x, [-1, n_input])\n",
-    "    # Split to get a list of 'n_steps' tensors of shape (batch_size, n_hidden)\n",
+    "    # Split to get a list of 'n_steps' tensors of shape (batch_size, n_input)\n",
     "    x = tf.split(0, n_steps, x)\n",
     "\n",
     "    # Define lstm cells with tensorflow\n",

+ 2 - 2
notebooks/3_NeuralNetworks/recurrent_network.ipynb

@@ -96,13 +96,13 @@
     "\n",
     "    # Prepare data shape to match `rnn` function requirements\n",
     "    # Current data input shape: (batch_size, n_steps, n_input)\n",
-    "    # Required shape: 'n_steps' tensors list of shape (batch_size, n_hidden)\n",
+    "    # Required shape: 'n_steps' tensors list of shape (batch_size, n_input)\n",
     "    \n",
     "    # Permuting batch_size and n_steps\n",
     "    x = tf.transpose(x, [1, 0, 2])\n",
     "    # Reshaping to (n_steps*batch_size, n_input)\n",
     "    x = tf.reshape(x, [-1, n_input])\n",
-    "    # Split to get a list of 'n_steps' tensors of shape (batch_size, n_hidden)\n",
+    "    # Split to get a list of 'n_steps' tensors of shape (batch_size, n_input)\n",
     "    x = tf.split(0, n_steps, x)\n",
     "\n",
     "    # Define a lstm cell with tensorflow\n",