Explorar o código

updated rnn examples

Aymeric Damien %!s(int64=9) %!d(string=hai) anos
pai
achega
4351a03376

+ 8 - 2
examples/3_NeuralNetworks/bidirectional_rnn.py

@@ -53,6 +53,8 @@ def BiRNN(x, weights, biases):
 
     # Prepare data shape to match `bidirectional_rnn` function requirements
     # Current data input shape: (batch_size, n_steps, n_input)
+    # Required shape: 'n_steps' tensors list of shape (batch_size, n_hidden)
+
     # Permuting batch_size and n_steps
     x = tf.transpose(x, [1, 0, 2])
     # Reshape to (n_steps*batch_size, n_input)
@@ -67,8 +69,12 @@ def BiRNN(x, weights, biases):
     lstm_bw_cell = rnn_cell.BasicLSTMCell(n_hidden, forget_bias=1.0)
 
     # Get lstm cell output
-    outputs = rnn.bidirectional_rnn(lstm_fw_cell, lstm_bw_cell, x,
-                                    dtype=tf.float32)
+    try:
+        outputs, _, _ = rnn.bidirectional_rnn(lstm_fw_cell, lstm_bw_cell, x,
+                                              dtype=tf.float32)
+    except Exception: # Old TensorFlow version only returns outputs not states
+        outputs = rnn.bidirectional_rnn(lstm_fw_cell, lstm_bw_cell, x,
+                                        dtype=tf.float32)
 
     # Linear activation, using rnn inner loop last output
     return tf.matmul(outputs[-1], weights['out']) + biases['out']

+ 2 - 1
examples/3_NeuralNetworks/recurrent_network.py

@@ -52,12 +52,13 @@ def RNN(x, weights, biases):
 
     # Prepare data shape to match `rnn` function requirements
     # Current data input shape: (batch_size, n_steps, n_input)
+    # Required shape: 'n_steps' tensors list of shape (batch_size, n_hidden)
+
     # Permuting batch_size and n_steps
     x = tf.transpose(x, [1, 0, 2])
     # Reshaping to (n_steps*batch_size, n_input)
     x = tf.reshape(x, [-1, n_input])
     # Split to get a list of 'n_steps' tensors of shape (batch_size, n_hidden)
-    # This input shape is required by `rnn` function
     x = tf.split(0, n_steps, x)
 
     # Define a lstm cell with tensorflow

+ 14 - 20
notebooks/3_NeuralNetworks/bidirectional_rnn.ipynb

@@ -3,9 +3,7 @@
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {
-    "collapsed": false
-   },
+   "metadata": {},
    "outputs": [],
    "source": [
     "'''\n",
@@ -21,9 +19,7 @@
   {
    "cell_type": "code",
    "execution_count": 1,
-   "metadata": {
-    "collapsed": false
-   },
+   "metadata": {},
    "outputs": [
     {
      "name": "stdout",
@@ -49,9 +45,7 @@
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {
-    "collapsed": false
-   },
+   "metadata": {},
    "outputs": [],
    "source": [
     "'''\n",
@@ -64,9 +58,7 @@
   {
    "cell_type": "code",
    "execution_count": 2,
-   "metadata": {
-    "collapsed": true
-   },
+   "metadata": {},
    "outputs": [],
    "source": [
     "# Parameters\n",
@@ -100,15 +92,15 @@
   {
    "cell_type": "code",
    "execution_count": 3,
-   "metadata": {
-    "collapsed": true
-   },
+   "metadata": {},
    "outputs": [],
    "source": [
     "def BiRNN(x, weights, biases):\n",
     "\n",
     "    # Prepare data shape to match `bidirectional_rnn` function requirements\n",
     "    # Current data input shape: (batch_size, n_steps, n_input)\n",
+    "    # Required shape: 'n_steps' tensors list of shape (batch_size, n_hidden)\n",
+    "    \n",
     "    # Permuting batch_size and n_steps\n",
     "    x = tf.transpose(x, [1, 0, 2])\n",
     "    # Reshape to (n_steps*batch_size, n_input)\n",
@@ -123,8 +115,12 @@
     "    lstm_bw_cell = rnn_cell.BasicLSTMCell(n_hidden, forget_bias=1.0)\n",
     "\n",
     "    # Get lstm cell output\n",
-    "    outputs = rnn.bidirectional_rnn(lstm_fw_cell, lstm_bw_cell, x,\n",
-    "                                    dtype=tf.float32)\n",
+    "    try:\n",
+    "        outputs, _, _ = rnn.bidirectional_rnn(lstm_fw_cell, lstm_bw_cell, x,\n",
+    "                                              dtype=tf.float32)\n",
+    "    except Exception: # Old TensorFlow version only returns outputs not states\n",
+    "        outputs = rnn.bidirectional_rnn(lstm_fw_cell, lstm_bw_cell, x,\n",
+    "                                        dtype=tf.float32)\n",
     "\n",
     "    # Linear activation, using rnn inner loop last output\n",
     "    return tf.matmul(outputs[-1], weights['out']) + biases['out']\n",
@@ -146,9 +142,7 @@
   {
    "cell_type": "code",
    "execution_count": 4,
-   "metadata": {
-    "collapsed": false
-   },
+   "metadata": {},
    "outputs": [
     {
      "name": "stdout",

+ 8 - 19
notebooks/3_NeuralNetworks/recurrent_network.ipynb

@@ -3,9 +3,7 @@
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
+   "metadata": {},
    "outputs": [],
    "source": [
     "'''\n",
@@ -21,9 +19,7 @@
   {
    "cell_type": "code",
    "execution_count": 1,
-   "metadata": {
-    "collapsed": false
-   },
+   "metadata": {},
    "outputs": [
     {
      "name": "stdout",
@@ -49,9 +45,7 @@
   {
    "cell_type": "code",
    "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
+   "metadata": {},
    "outputs": [],
    "source": [
     "'''\n",
@@ -64,9 +58,7 @@
   {
    "cell_type": "code",
    "execution_count": 2,
-   "metadata": {
-    "collapsed": true
-   },
+   "metadata": {},
    "outputs": [],
    "source": [
     "# Parameters\n",
@@ -99,21 +91,20 @@
   {
    "cell_type": "code",
    "execution_count": 3,
-   "metadata": {
-    "collapsed": true
-   },
+   "metadata": {},
    "outputs": [],
    "source": [
     "def RNN(x, weights, biases):\n",
     "\n",
     "    # Prepare data shape to match `rnn` function requirements\n",
     "    # Current data input shape: (batch_size, n_steps, n_input)\n",
+    "    # Required shape: 'n_steps' tensors list of shape (batch_size, n_hidden)\n",
+    "    \n",
     "    # Permuting batch_size and n_steps\n",
     "    x = tf.transpose(x, [1, 0, 2])\n",
     "    # Reshaping to (n_steps*batch_size, n_input)\n",
     "    x = tf.reshape(x, [-1, n_input])\n",
     "    # Split to get a list of 'n_steps' tensors of shape (batch_size, n_hidden)\n",
-    "    # This input shape is required by `rnn` function\n",
     "    x = tf.split(0, n_steps, x)\n",
     "\n",
     "    # Define a lstm cell with tensorflow\n",
@@ -142,9 +133,7 @@
   {
    "cell_type": "code",
    "execution_count": 4,
-   "metadata": {
-    "collapsed": false
-   },
+   "metadata": {},
    "outputs": [
     {
      "name": "stdout",