|
@@ -3,9 +3,7 @@
|
|
|
{
|
|
|
"cell_type": "code",
|
|
|
"execution_count": null,
|
|
|
- "metadata": {
|
|
|
- "collapsed": false
|
|
|
- },
|
|
|
+ "metadata": {},
|
|
|
"outputs": [],
|
|
|
"source": [
|
|
|
"'''\n",
|
|
@@ -21,9 +19,7 @@
|
|
|
{
|
|
|
"cell_type": "code",
|
|
|
"execution_count": 1,
|
|
|
- "metadata": {
|
|
|
- "collapsed": false
|
|
|
- },
|
|
|
+ "metadata": {},
|
|
|
"outputs": [
|
|
|
{
|
|
|
"name": "stdout",
|
|
@@ -49,9 +45,7 @@
|
|
|
{
|
|
|
"cell_type": "code",
|
|
|
"execution_count": null,
|
|
|
- "metadata": {
|
|
|
- "collapsed": false
|
|
|
- },
|
|
|
+ "metadata": {},
|
|
|
"outputs": [],
|
|
|
"source": [
|
|
|
"'''\n",
|
|
@@ -64,9 +58,7 @@
|
|
|
{
|
|
|
"cell_type": "code",
|
|
|
"execution_count": 2,
|
|
|
- "metadata": {
|
|
|
- "collapsed": true
|
|
|
- },
|
|
|
+ "metadata": {},
|
|
|
"outputs": [],
|
|
|
"source": [
|
|
|
"# Parameters\n",
|
|
@@ -100,15 +92,15 @@
|
|
|
{
|
|
|
"cell_type": "code",
|
|
|
"execution_count": 3,
|
|
|
- "metadata": {
|
|
|
- "collapsed": true
|
|
|
- },
|
|
|
+ "metadata": {},
|
|
|
"outputs": [],
|
|
|
"source": [
|
|
|
"def BiRNN(x, weights, biases):\n",
|
|
|
"\n",
|
|
|
" # Prepare data shape to match `bidirectional_rnn` function requirements\n",
|
|
|
" # Current data input shape: (batch_size, n_steps, n_input)\n",
|
|
|
+ " # Required shape: 'n_steps' tensors list of shape (batch_size, n_hidden)\n",
|
|
|
+ " \n",
|
|
|
" # Permuting batch_size and n_steps\n",
|
|
|
" x = tf.transpose(x, [1, 0, 2])\n",
|
|
|
" # Reshape to (n_steps*batch_size, n_input)\n",
|
|
@@ -123,8 +115,12 @@
|
|
|
" lstm_bw_cell = rnn_cell.BasicLSTMCell(n_hidden, forget_bias=1.0)\n",
|
|
|
"\n",
|
|
|
" # Get lstm cell output\n",
|
|
|
- " outputs = rnn.bidirectional_rnn(lstm_fw_cell, lstm_bw_cell, x,\n",
|
|
|
- " dtype=tf.float32)\n",
|
|
|
+ " try:\n",
|
|
|
+ " outputs, _, _ = rnn.bidirectional_rnn(lstm_fw_cell, lstm_bw_cell, x,\n",
|
|
|
+ " dtype=tf.float32)\n",
|
|
|
+ " except Exception: # Old TensorFlow version only returns outputs not states\n",
|
|
|
+ " outputs = rnn.bidirectional_rnn(lstm_fw_cell, lstm_bw_cell, x,\n",
|
|
|
+ " dtype=tf.float32)\n",
|
|
|
"\n",
|
|
|
" # Linear activation, using rnn inner loop last output\n",
|
|
|
" return tf.matmul(outputs[-1], weights['out']) + biases['out']\n",
|
|
@@ -146,9 +142,7 @@
|
|
|
{
|
|
|
"cell_type": "code",
|
|
|
"execution_count": 4,
|
|
|
- "metadata": {
|
|
|
- "collapsed": false
|
|
|
- },
|
|
|
+ "metadata": {},
|
|
|
"outputs": [
|
|
|
{
|
|
|
"name": "stdout",
|