{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Mnist数据集双向LSTM网络"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "导入Mnist数据集"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/shoremei/anaconda3/lib/python3.6/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
      "  from ._conv import register_converters as _register_converters\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From <ipython-input-1-64753445a297>:6: read_data_sets (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n",
      "WARNING:tensorflow:From /home/shoremei/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:260: maybe_download (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please write your own downloading logic.\n",
      "WARNING:tensorflow:From /home/shoremei/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:262: extract_images (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please use tf.data to implement this functionality.\n",
      "Extracting data/train-images-idx3-ubyte.gz\n",
      "WARNING:tensorflow:From /home/shoremei/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:267: extract_labels (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please use tf.data to implement this functionality.\n",
      "Extracting data/train-labels-idx1-ubyte.gz\n",
      "WARNING:tensorflow:From /home/shoremei/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:110: dense_to_one_hot (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please use tf.one_hot on tensors.\n",
      "Extracting data/t10k-images-idx3-ubyte.gz\n",
      "Extracting data/t10k-labels-idx1-ubyte.gz\n",
      "WARNING:tensorflow:From /home/shoremei/anaconda3/lib/python3.6/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:290: DataSet.__init__ (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n",
      "(55000, 784)\n"
     ]
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "import numpy as np\n",
    "from tensorflow.contrib import rnn\n",
    "from tensorflow.examples.tutorials.mnist import input_data\n",
    "\n",
    "mnist = input_data.read_data_sets('data', one_hot=True)\n",
    "print (mnist.train.images.shape)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "设置参数"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From /home/shoremei/anaconda3/lib/python3.6/site-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Colocations handled automatically by placer.\n"
     ]
    }
   ],
   "source": [
    "lr = 1e-2            # 因为优化器选择adam,所以学习率较低\n",
    "max_samples = 400000\n",
    "batch_size = 128\n",
    "display_step = 10    # 每训练十次就展示一次训练情况\n",
    "input_size = 28      # 每行输入28个特征点\n",
    "timestep_size = 28   # 持续输入28行\n",
    "hidden_size = 256    # 隐含层的数量\n",
    "class_num = 10       # 10分类问题\n",
    "\n",
    "x = tf.placeholder(tf.float32, [None, timestep_size, input_size])\n",
    "y = tf.placeholder(tf.float32, [None, class_num])\n",
    "\n",
    "weights = tf.Variable(tf.random_normal([2*hidden_size, class_num]))\n",
    "bias = tf.Variable(tf.random_normal([class_num]))\n",
    "keep_prob = tf.placeholder(tf.float32, [])"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "定义网络结构"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "<img src=\"./img/10.png\" style=\"width:600px;height:300px\">"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "def BiRNN(x, weights, biases):\n",
    "    x = tf.transpose(x, [1,0,2])\n",
    "    x = tf.reshape(x, [-1, input_size])\n",
    "    x = tf.split(x, timestep_size)\n",
    "     \n",
    "    # 为何不这样写?\n",
    "    # x = [[:, i, :] for i in range(x.shape[1])]\n",
    "    \n",
    "    lstm_fw_cell = tf.contrib.rnn.BasicLSTMCell(hidden_size, forget_bias=1.0)\n",
    "    lstm_bw_cell = tf.contrib.rnn.BasicLSTMCell(hidden_size, forget_bias=1.0)\n",
    "    \n",
    "    # 三个返回值.第一个是输出,是每个时间步的输出,是两个lstm输出的堆加;后两个待查,应该是状态输出\n",
    "    outputs, _, _ = tf.contrib.rnn.static_bidirectional_rnn(lstm_fw_cell, lstm_bw_cell, x, dtype=tf.float32)\n",
    "    return tf.matmul(outputs[-1], weights)+biases"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "定义损失,优化器以及准确率"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From <ipython-input-3-1f7c566965c1>:9: BasicLSTMCell.__init__ (from tensorflow.python.ops.rnn_cell_impl) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "This class is equivalent as tf.keras.layers.LSTMCell, and will be replaced by that in Tensorflow 2.0.\n",
      "WARNING:tensorflow:From <ipython-input-3-1f7c566965c1>:13: static_bidirectional_rnn (from tensorflow.python.ops.rnn) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please use `keras.layers.Bidirectional(keras.layers.RNN(cell, unroll=True))`, which is equivalent to this API\n",
      "WARNING:tensorflow:From /home/shoremei/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/rnn.py:1565: static_rnn (from tensorflow.python.ops.rnn) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please use `keras.layers.RNN(cell, unroll=True)`, which is equivalent to this API\n",
      "WARNING:tensorflow:From <ipython-input-4-5d0095d9b3cc>:3: softmax_cross_entropy_with_logits (from tensorflow.python.ops.nn_ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "\n",
      "Future major versions of TensorFlow will allow gradients to flow\n",
      "into the labels input on backprop by default.\n",
      "\n",
      "See `tf.nn.softmax_cross_entropy_with_logits_v2`.\n",
      "\n"
     ]
    }
   ],
   "source": [
    "y_pred = BiRNN(x, weights, bias)\n",
    "\n",
    "cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=y_pred, labels=y))\n",
    "\n",
    "optimizer = tf.train.AdamOptimizer(learning_rate=lr).minimize(cross_entropy)\n",
    "\n",
    "correct_pred = tf.equal(tf.argmax(y, 1), tf.argmax(y_pred, 1))\n",
    "accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))\n",
    "\n",
    "init_op = tf.global_variables_initializer() "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iter 10, minibatch loss:  1.9955654 , trainning accuracy: 0.31250\n",
      "Iter 20, minibatch loss:  1.0454437 , trainning accuracy: 0.61719\n",
      "Iter 30, minibatch loss:  0.9570806 , trainning accuracy: 0.64844\n",
      "Iter 40, minibatch loss:  0.9599416 , trainning accuracy: 0.68750\n",
      "Iter 50, minibatch loss:  0.60566455 , trainning accuracy: 0.78125\n",
      "Iter 60, minibatch loss:  0.44297868 , trainning accuracy: 0.85156\n",
      "Iter 70, minibatch loss:  0.4483085 , trainning accuracy: 0.84375\n",
      "Iter 80, minibatch loss:  0.36864156 , trainning accuracy: 0.85938\n",
      "Iter 90, minibatch loss:  0.25367388 , trainning accuracy: 0.90625\n",
      "Iter 100, minibatch loss:  0.21088904 , trainning accuracy: 0.93750\n",
      "Iter 110, minibatch loss:  0.20803724 , trainning accuracy: 0.92188\n",
      "Iter 120, minibatch loss:  0.23835392 , trainning accuracy: 0.92969\n",
      "Iter 130, minibatch loss:  0.18693823 , trainning accuracy: 0.93750\n",
      "Iter 140, minibatch loss:  0.16500756 , trainning accuracy: 0.94531\n",
      "Iter 150, minibatch loss:  0.27873772 , trainning accuracy: 0.91406\n",
      "Iter 160, minibatch loss:  0.15643573 , trainning accuracy: 0.92969\n",
      "Iter 170, minibatch loss:  0.12371496 , trainning accuracy: 0.96875\n",
      "Iter 180, minibatch loss:  0.22585058 , trainning accuracy: 0.92188\n",
      "Iter 190, minibatch loss:  0.12844637 , trainning accuracy: 0.96094\n",
      "Iter 200, minibatch loss:  0.16510507 , trainning accuracy: 0.96094\n",
      "Iter 210, minibatch loss:  0.077610925 , trainning accuracy: 0.96875\n",
      "Iter 220, minibatch loss:  0.09180078 , trainning accuracy: 0.97656\n",
      "Iter 230, minibatch loss:  0.09748549 , trainning accuracy: 0.97656\n",
      "Iter 240, minibatch loss:  0.12580892 , trainning accuracy: 0.96094\n",
      "Iter 250, minibatch loss:  0.24402209 , trainning accuracy: 0.95312\n",
      "Iter 260, minibatch loss:  0.07503781 , trainning accuracy: 0.97656\n",
      "Iter 270, minibatch loss:  0.10681371 , trainning accuracy: 0.96094\n",
      "Iter 280, minibatch loss:  0.10739208 , trainning accuracy: 0.96094\n",
      "Iter 290, minibatch loss:  0.07220815 , trainning accuracy: 0.96875\n",
      "Iter 300, minibatch loss:  0.10831946 , trainning accuracy: 0.96094\n",
      "Iter 310, minibatch loss:  0.19399573 , trainning accuracy: 0.96875\n",
      "Iter 320, minibatch loss:  0.09477521 , trainning accuracy: 0.96094\n",
      "Iter 330, minibatch loss:  0.15699488 , trainning accuracy: 0.95312\n",
      "Iter 340, minibatch loss:  0.05867505 , trainning accuracy: 0.97656\n",
      "Iter 350, minibatch loss:  0.027608726 , trainning accuracy: 0.99219\n",
      "Iter 360, minibatch loss:  0.11471917 , trainning accuracy: 0.96094\n",
      "Iter 370, minibatch loss:  0.09440671 , trainning accuracy: 0.96875\n",
      "Iter 380, minibatch loss:  0.052482475 , trainning accuracy: 0.98438\n",
      "Iter 390, minibatch loss:  0.06460087 , trainning accuracy: 0.99219\n",
      "Iter 400, minibatch loss:  0.06576688 , trainning accuracy: 0.98438\n",
      "Iter 410, minibatch loss:  0.077899285 , trainning accuracy: 0.97656\n",
      "Iter 420, minibatch loss:  0.08724066 , trainning accuracy: 0.97656\n",
      "Iter 430, minibatch loss:  0.029776057 , trainning accuracy: 0.99219\n",
      "Iter 440, minibatch loss:  0.08672947 , trainning accuracy: 0.97656\n",
      "Iter 450, minibatch loss:  0.13157244 , trainning accuracy: 0.95312\n",
      "Iter 460, minibatch loss:  0.049253814 , trainning accuracy: 0.99219\n",
      "Iter 470, minibatch loss:  0.09450177 , trainning accuracy: 0.98438\n",
      "Iter 480, minibatch loss:  0.07096259 , trainning accuracy: 0.98438\n",
      "Iter 490, minibatch loss:  0.01625933 , trainning accuracy: 0.99219\n",
      "Iter 500, minibatch loss:  0.05429323 , trainning accuracy: 0.98438\n",
      "Iter 510, minibatch loss:  0.060318295 , trainning accuracy: 0.96875\n",
      "Iter 520, minibatch loss:  0.041802205 , trainning accuracy: 0.99219\n",
      "Iter 530, minibatch loss:  0.0148855485 , trainning accuracy: 1.00000\n",
      "Iter 540, minibatch loss:  0.03787308 , trainning accuracy: 0.98438\n",
      "Iter 550, minibatch loss:  0.029043688 , trainning accuracy: 1.00000\n",
      "Iter 560, minibatch loss:  0.043866187 , trainning accuracy: 0.99219\n",
      "Iter 570, minibatch loss:  0.059666336 , trainning accuracy: 0.98438\n",
      "Iter 580, minibatch loss:  0.046195943 , trainning accuracy: 0.98438\n",
      "Iter 590, minibatch loss:  0.13112882 , trainning accuracy: 0.96094\n",
      "Iter 600, minibatch loss:  0.088061385 , trainning accuracy: 0.97656\n",
      "Iter 610, minibatch loss:  0.025297437 , trainning accuracy: 1.00000\n",
      "Iter 620, minibatch loss:  0.058508005 , trainning accuracy: 0.97656\n",
      "Iter 630, minibatch loss:  0.062240567 , trainning accuracy: 0.98438\n",
      "Iter 640, minibatch loss:  0.0882464 , trainning accuracy: 0.96875\n",
      "Iter 650, minibatch loss:  0.04988518 , trainning accuracy: 0.98438\n",
      "Iter 660, minibatch loss:  0.057799213 , trainning accuracy: 0.97656\n",
      "Iter 670, minibatch loss:  0.031463303 , trainning accuracy: 0.98438\n",
      "Iter 680, minibatch loss:  0.033442147 , trainning accuracy: 0.99219\n",
      "Iter 690, minibatch loss:  0.03466548 , trainning accuracy: 0.99219\n",
      "Iter 700, minibatch loss:  0.032738224 , trainning accuracy: 0.99219\n",
      "Iter 710, minibatch loss:  0.05177807 , trainning accuracy: 0.98438\n",
      "Iter 720, minibatch loss:  0.03319422 , trainning accuracy: 0.99219\n",
      "Iter 730, minibatch loss:  0.041291397 , trainning accuracy: 0.98438\n",
      "Iter 740, minibatch loss:  0.17138708 , trainning accuracy: 0.93750\n",
      "Iter 750, minibatch loss:  0.04806689 , trainning accuracy: 0.98438\n",
      "Iter 760, minibatch loss:  0.07955009 , trainning accuracy: 0.98438\n",
      "Iter 770, minibatch loss:  0.090387195 , trainning accuracy: 0.98438\n",
      "Iter 780, minibatch loss:  0.02245921 , trainning accuracy: 0.99219\n",
      "Iter 790, minibatch loss:  0.03713124 , trainning accuracy: 0.98438\n",
      "Iter 800, minibatch loss:  0.022918643 , trainning accuracy: 0.99219\n",
      "Iter 810, minibatch loss:  0.049233172 , trainning accuracy: 0.99219\n",
      "Iter 820, minibatch loss:  0.05228795 , trainning accuracy: 0.98438\n",
      "Iter 830, minibatch loss:  0.0095314365 , trainning accuracy: 1.00000\n",
      "Iter 840, minibatch loss:  0.029036531 , trainning accuracy: 1.00000\n",
      "Iter 850, minibatch loss:  0.0152891595 , trainning accuracy: 0.99219\n",
      "Iter 860, minibatch loss:  0.01485925 , trainning accuracy: 1.00000\n",
      "Iter 870, minibatch loss:  0.024103057 , trainning accuracy: 0.99219\n",
      "Iter 880, minibatch loss:  0.03588181 , trainning accuracy: 0.97656\n",
      "Iter 890, minibatch loss:  0.08977943 , trainning accuracy: 0.96875\n",
      "Iter 900, minibatch loss:  0.033269204 , trainning accuracy: 0.98438\n",
      "Iter 910, minibatch loss:  0.07497755 , trainning accuracy: 0.98438\n",
      "Iter 920, minibatch loss:  0.08026034 , trainning accuracy: 0.97656\n",
      "Iter 930, minibatch loss:  0.06792696 , trainning accuracy: 0.97656\n",
      "Iter 940, minibatch loss:  0.04065689 , trainning accuracy: 0.98438\n",
      "Iter 950, minibatch loss:  0.0458678 , trainning accuracy: 0.99219\n",
      "Iter 960, minibatch loss:  0.08634587 , trainning accuracy: 0.97656\n",
      "Iter 970, minibatch loss:  0.019278673 , trainning accuracy: 0.99219\n",
      "Iter 980, minibatch loss:  0.05176819 , trainning accuracy: 0.98438\n",
      "Iter 990, minibatch loss:  0.017022565 , trainning accuracy: 1.00000\n",
      "Iter 1000, minibatch loss:  0.029363979 , trainning accuracy: 1.00000\n",
      "Iter 1010, minibatch loss:  0.0478934 , trainning accuracy: 0.99219\n",
      "Iter 1020, minibatch loss:  0.011650508 , trainning accuracy: 1.00000\n",
      "Iter 1030, minibatch loss:  0.097466536 , trainning accuracy: 0.97656\n",
      "Iter 1040, minibatch loss:  0.027120303 , trainning accuracy: 0.99219\n",
      "Iter 1050, minibatch loss:  0.084713235 , trainning accuracy: 0.97656\n",
      "Iter 1060, minibatch loss:  0.04133387 , trainning accuracy: 0.98438\n",
      "Iter 1070, minibatch loss:  0.027329078 , trainning accuracy: 0.99219\n",
      "Iter 1080, minibatch loss:  0.05956095 , trainning accuracy: 0.98438\n",
      "Iter 1090, minibatch loss:  0.025748204 , trainning accuracy: 1.00000\n",
      "Iter 1100, minibatch loss:  0.035578463 , trainning accuracy: 0.99219\n",
      "Iter 1110, minibatch loss:  0.048387762 , trainning accuracy: 0.99219\n",
      "Iter 1120, minibatch loss:  0.039846066 , trainning accuracy: 0.98438\n",
      "Iter 1130, minibatch loss:  0.047318265 , trainning accuracy: 0.98438\n",
      "Iter 1140, minibatch loss:  0.01927436 , trainning accuracy: 1.00000\n",
      "Iter 1150, minibatch loss:  0.0039395723 , trainning accuracy: 1.00000\n",
      "Iter 1160, minibatch loss:  0.047212746 , trainning accuracy: 0.98438\n",
      "Iter 1170, minibatch loss:  0.031895343 , trainning accuracy: 0.99219\n",
      "Iter 1180, minibatch loss:  0.03374349 , trainning accuracy: 1.00000\n",
      "Iter 1190, minibatch loss:  0.0037651334 , trainning accuracy: 1.00000\n",
      "Iter 1200, minibatch loss:  0.007670359 , trainning accuracy: 1.00000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iter 1210, minibatch loss:  0.007287067 , trainning accuracy: 1.00000\n",
      "Iter 1220, minibatch loss:  0.064273626 , trainning accuracy: 0.98438\n",
      "Iter 1230, minibatch loss:  0.026194025 , trainning accuracy: 1.00000\n",
      "Iter 1240, minibatch loss:  0.01410237 , trainning accuracy: 1.00000\n",
      "Iter 1250, minibatch loss:  0.0126136765 , trainning accuracy: 1.00000\n",
      "Iter 1260, minibatch loss:  0.09143659 , trainning accuracy: 0.99219\n",
      "Iter 1270, minibatch loss:  0.05294005 , trainning accuracy: 0.98438\n",
      "Iter 1280, minibatch loss:  0.0716417 , trainning accuracy: 0.97656\n",
      "Iter 1290, minibatch loss:  0.099167824 , trainning accuracy: 0.96875\n",
      "Iter 1300, minibatch loss:  0.007986182 , trainning accuracy: 1.00000\n",
      "Iter 1310, minibatch loss:  0.0114070065 , trainning accuracy: 1.00000\n",
      "Iter 1320, minibatch loss:  0.033090737 , trainning accuracy: 0.99219\n",
      "Iter 1330, minibatch loss:  0.022524111 , trainning accuracy: 0.99219\n",
      "Iter 1340, minibatch loss:  0.010580829 , trainning accuracy: 1.00000\n",
      "Iter 1350, minibatch loss:  0.032258943 , trainning accuracy: 0.99219\n",
      "Iter 1360, minibatch loss:  0.0070071425 , trainning accuracy: 1.00000\n",
      "Iter 1370, minibatch loss:  0.008413711 , trainning accuracy: 1.00000\n",
      "Iter 1380, minibatch loss:  0.05871112 , trainning accuracy: 0.98438\n",
      "Iter 1390, minibatch loss:  0.016084157 , trainning accuracy: 1.00000\n",
      "Iter 1400, minibatch loss:  0.041967057 , trainning accuracy: 0.98438\n",
      "Iter 1410, minibatch loss:  0.024068244 , trainning accuracy: 0.99219\n",
      "Iter 1420, minibatch loss:  0.028585916 , trainning accuracy: 0.98438\n",
      "Iter 1430, minibatch loss:  0.007960992 , trainning accuracy: 1.00000\n",
      "Iter 1440, minibatch loss:  0.031964608 , trainning accuracy: 0.99219\n",
      "Iter 1450, minibatch loss:  0.035246987 , trainning accuracy: 0.99219\n",
      "Iter 1460, minibatch loss:  0.072574496 , trainning accuracy: 0.98438\n",
      "Iter 1470, minibatch loss:  0.025784522 , trainning accuracy: 0.99219\n",
      "Iter 1480, minibatch loss:  0.026330832 , trainning accuracy: 1.00000\n",
      "Iter 1490, minibatch loss:  0.016492382 , trainning accuracy: 0.99219\n",
      "Iter 1500, minibatch loss:  0.015839819 , trainning accuracy: 1.00000\n",
      "Iter 1510, minibatch loss:  0.087833256 , trainning accuracy: 0.99219\n",
      "Iter 1520, minibatch loss:  0.027081642 , trainning accuracy: 1.00000\n",
      "Iter 1530, minibatch loss:  0.11598194 , trainning accuracy: 0.98438\n",
      "Iter 1540, minibatch loss:  0.022121005 , trainning accuracy: 0.99219\n",
      "Iter 1550, minibatch loss:  0.065616705 , trainning accuracy: 0.97656\n",
      "Iter 1560, minibatch loss:  0.054643273 , trainning accuracy: 0.97656\n",
      "Iter 1570, minibatch loss:  0.06907408 , trainning accuracy: 0.99219\n",
      "Iter 1580, minibatch loss:  0.022265887 , trainning accuracy: 0.99219\n",
      "Iter 1590, minibatch loss:  0.009204915 , trainning accuracy: 1.00000\n",
      "Iter 1600, minibatch loss:  0.012503823 , trainning accuracy: 1.00000\n",
      "Iter 1610, minibatch loss:  0.017957918 , trainning accuracy: 0.99219\n",
      "Iter 1620, minibatch loss:  0.011850491 , trainning accuracy: 1.00000\n",
      "Iter 1630, minibatch loss:  0.040981837 , trainning accuracy: 0.98438\n",
      "Iter 1640, minibatch loss:  0.023593776 , trainning accuracy: 0.99219\n",
      "Iter 1650, minibatch loss:  0.017047208 , trainning accuracy: 1.00000\n",
      "Iter 1660, minibatch loss:  0.0055412347 , trainning accuracy: 1.00000\n",
      "Iter 1670, minibatch loss:  0.011645379 , trainning accuracy: 1.00000\n",
      "Iter 1680, minibatch loss:  0.003471855 , trainning accuracy: 1.00000\n",
      "Iter 1690, minibatch loss:  0.010728704 , trainning accuracy: 1.00000\n",
      "Iter 1700, minibatch loss:  0.030559197 , trainning accuracy: 0.98438\n",
      "Iter 1710, minibatch loss:  0.021830153 , trainning accuracy: 0.99219\n",
      "Iter 1720, minibatch loss:  0.0059996685 , trainning accuracy: 1.00000\n",
      "Iter 1730, minibatch loss:  0.007257711 , trainning accuracy: 1.00000\n",
      "Iter 1740, minibatch loss:  0.013613933 , trainning accuracy: 1.00000\n",
      "Iter 1750, minibatch loss:  0.018438764 , trainning accuracy: 0.99219\n",
      "Iter 1760, minibatch loss:  0.021672115 , trainning accuracy: 0.99219\n",
      "Iter 1770, minibatch loss:  0.029019943 , trainning accuracy: 0.99219\n",
      "Iter 1780, minibatch loss:  0.037855633 , trainning accuracy: 0.99219\n",
      "Iter 1790, minibatch loss:  0.012681406 , trainning accuracy: 1.00000\n",
      "Iter 1800, minibatch loss:  0.047227927 , trainning accuracy: 0.99219\n",
      "Iter 1810, minibatch loss:  0.020509798 , trainning accuracy: 0.99219\n",
      "Iter 1820, minibatch loss:  0.015664337 , trainning accuracy: 1.00000\n",
      "Iter 1830, minibatch loss:  0.029090721 , trainning accuracy: 0.99219\n",
      "Iter 1840, minibatch loss:  0.011134328 , trainning accuracy: 1.00000\n",
      "Iter 1850, minibatch loss:  0.010021149 , trainning accuracy: 1.00000\n",
      "Iter 1860, minibatch loss:  0.008367843 , trainning accuracy: 1.00000\n",
      "Iter 1870, minibatch loss:  0.010572016 , trainning accuracy: 1.00000\n",
      "Iter 1880, minibatch loss:  0.007848521 , trainning accuracy: 1.00000\n",
      "Iter 1890, minibatch loss:  0.015135319 , trainning accuracy: 1.00000\n",
      "Iter 1900, minibatch loss:  0.018122606 , trainning accuracy: 1.00000\n",
      "Iter 1910, minibatch loss:  0.037582252 , trainning accuracy: 0.99219\n",
      "Iter 1920, minibatch loss:  0.042521104 , trainning accuracy: 0.97656\n",
      "Iter 1930, minibatch loss:  0.010829467 , trainning accuracy: 1.00000\n",
      "Iter 1940, minibatch loss:  0.05931381 , trainning accuracy: 0.99219\n",
      "Iter 1950, minibatch loss:  0.032674223 , trainning accuracy: 0.99219\n",
      "Iter 1960, minibatch loss:  0.019131985 , trainning accuracy: 0.99219\n",
      "Iter 1970, minibatch loss:  0.018945742 , trainning accuracy: 0.99219\n",
      "Iter 1980, minibatch loss:  0.008392636 , trainning accuracy: 1.00000\n",
      "Iter 1990, minibatch loss:  0.004489944 , trainning accuracy: 1.00000\n",
      "Iter 2000, minibatch loss:  0.110850096 , trainning accuracy: 0.97656\n",
      "Iter 2010, minibatch loss:  0.0035502964 , trainning accuracy: 1.00000\n",
      "Iter 2020, minibatch loss:  0.02141634 , trainning accuracy: 0.99219\n",
      "Iter 2030, minibatch loss:  0.08768392 , trainning accuracy: 0.96875\n",
      "Iter 2040, minibatch loss:  0.038701165 , trainning accuracy: 0.99219\n",
      "Iter 2050, minibatch loss:  0.015625779 , trainning accuracy: 1.00000\n",
      "Iter 2060, minibatch loss:  0.028445149 , trainning accuracy: 0.98438\n",
      "Iter 2070, minibatch loss:  0.011097714 , trainning accuracy: 0.99219\n",
      "Iter 2080, minibatch loss:  0.01526973 , trainning accuracy: 1.00000\n",
      "Iter 2090, minibatch loss:  0.031199373 , trainning accuracy: 0.98438\n",
      "Iter 2100, minibatch loss:  0.03609712 , trainning accuracy: 0.99219\n",
      "Iter 2110, minibatch loss:  0.06389341 , trainning accuracy: 0.97656\n",
      "Iter 2120, minibatch loss:  0.027322806 , trainning accuracy: 1.00000\n",
      "Iter 2130, minibatch loss:  0.04315408 , trainning accuracy: 0.98438\n",
      "Iter 2140, minibatch loss:  0.006957216 , trainning accuracy: 1.00000\n",
      "Iter 2150, minibatch loss:  0.009058554 , trainning accuracy: 1.00000\n",
      "Iter 2160, minibatch loss:  0.010870254 , trainning accuracy: 1.00000\n",
      "Iter 2170, minibatch loss:  0.008187124 , trainning accuracy: 1.00000\n",
      "Iter 2180, minibatch loss:  0.006277224 , trainning accuracy: 1.00000\n",
      "Iter 2190, minibatch loss:  0.00465328 , trainning accuracy: 1.00000\n",
      "Iter 2200, minibatch loss:  0.08581854 , trainning accuracy: 0.98438\n",
      "Iter 2210, minibatch loss:  0.018569432 , trainning accuracy: 0.99219\n",
      "Iter 2220, minibatch loss:  0.017661832 , trainning accuracy: 0.99219\n",
      "Iter 2230, minibatch loss:  0.006212919 , trainning accuracy: 1.00000\n",
      "Iter 2240, minibatch loss:  0.019436609 , trainning accuracy: 1.00000\n",
      "Iter 2250, minibatch loss:  0.011262611 , trainning accuracy: 1.00000\n",
      "Iter 2260, minibatch loss:  0.0034683528 , trainning accuracy: 1.00000\n",
      "Iter 2270, minibatch loss:  0.0122014675 , trainning accuracy: 0.99219\n",
      "Iter 2280, minibatch loss:  0.010909394 , trainning accuracy: 1.00000\n",
      "Iter 2290, minibatch loss:  0.017466249 , trainning accuracy: 0.99219\n",
      "Iter 2300, minibatch loss:  0.0169201 , trainning accuracy: 0.99219\n",
      "Iter 2310, minibatch loss:  0.022607405 , trainning accuracy: 0.98438\n",
      "Iter 2320, minibatch loss:  0.0069957813 , trainning accuracy: 1.00000\n",
      "Iter 2330, minibatch loss:  0.0351442 , trainning accuracy: 0.99219\n",
      "Iter 2340, minibatch loss:  0.04700403 , trainning accuracy: 0.98438\n",
      "Iter 2350, minibatch loss:  0.021624519 , trainning accuracy: 0.99219\n",
      "Iter 2360, minibatch loss:  0.012574766 , trainning accuracy: 1.00000\n",
      "Iter 2370, minibatch loss:  0.009052971 , trainning accuracy: 0.99219\n",
      "Iter 2380, minibatch loss:  0.017191064 , trainning accuracy: 0.99219\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iter 2390, minibatch loss:  0.006955209 , trainning accuracy: 1.00000\n",
      "Iter 2400, minibatch loss:  0.04977234 , trainning accuracy: 0.99219\n",
      "Iter 2410, minibatch loss:  0.0070856325 , trainning accuracy: 1.00000\n",
      "Iter 2420, minibatch loss:  0.003011074 , trainning accuracy: 1.00000\n",
      "Iter 2430, minibatch loss:  0.0068282345 , trainning accuracy: 1.00000\n",
      "Iter 2440, minibatch loss:  0.017458495 , trainning accuracy: 1.00000\n",
      "Iter 2450, minibatch loss:  0.04974691 , trainning accuracy: 0.98438\n",
      "Iter 2460, minibatch loss:  0.020353751 , trainning accuracy: 0.99219\n",
      "Iter 2470, minibatch loss:  0.020097304 , trainning accuracy: 1.00000\n",
      "Iter 2480, minibatch loss:  0.04212044 , trainning accuracy: 0.98438\n",
      "Iter 2490, minibatch loss:  0.02140142 , trainning accuracy: 0.99219\n",
      "Iter 2500, minibatch loss:  0.010417877 , trainning accuracy: 1.00000\n",
      "Iter 2510, minibatch loss:  0.013074763 , trainning accuracy: 1.00000\n",
      "Iter 2520, minibatch loss:  0.059563033 , trainning accuracy: 0.98438\n",
      "Iter 2530, minibatch loss:  0.0331704 , trainning accuracy: 0.98438\n",
      "Iter 2540, minibatch loss:  0.009777021 , trainning accuracy: 1.00000\n",
      "Iter 2550, minibatch loss:  0.03438524 , trainning accuracy: 0.99219\n",
      "Iter 2560, minibatch loss:  0.021695653 , trainning accuracy: 0.99219\n",
      "Iter 2570, minibatch loss:  0.008520996 , trainning accuracy: 1.00000\n",
      "Iter 2580, minibatch loss:  0.011270035 , trainning accuracy: 1.00000\n",
      "Iter 2590, minibatch loss:  0.03685184 , trainning accuracy: 0.99219\n",
      "Iter 2600, minibatch loss:  0.01035554 , trainning accuracy: 1.00000\n",
      "Iter 2610, minibatch loss:  0.0047564963 , trainning accuracy: 1.00000\n",
      "Iter 2620, minibatch loss:  0.0035578352 , trainning accuracy: 1.00000\n",
      "Iter 2630, minibatch loss:  0.0030304193 , trainning accuracy: 1.00000\n",
      "Iter 2640, minibatch loss:  0.0091027245 , trainning accuracy: 1.00000\n",
      "Iter 2650, minibatch loss:  0.042249095 , trainning accuracy: 0.98438\n",
      "Iter 2660, minibatch loss:  0.022614086 , trainning accuracy: 0.99219\n",
      "Iter 2670, minibatch loss:  0.0032277482 , trainning accuracy: 1.00000\n",
      "Iter 2680, minibatch loss:  0.0061440663 , trainning accuracy: 1.00000\n",
      "Iter 2690, minibatch loss:  0.010325233 , trainning accuracy: 1.00000\n",
      "Iter 2700, minibatch loss:  0.0033711987 , trainning accuracy: 1.00000\n",
      "Iter 2710, minibatch loss:  0.022048276 , trainning accuracy: 0.99219\n",
      "Iter 2720, minibatch loss:  0.016340872 , trainning accuracy: 0.99219\n",
      "Iter 2730, minibatch loss:  0.033596165 , trainning accuracy: 0.99219\n",
      "Iter 2740, minibatch loss:  0.02474067 , trainning accuracy: 0.99219\n",
      "Iter 2750, minibatch loss:  0.045358382 , trainning accuracy: 0.98438\n",
      "Iter 2760, minibatch loss:  0.010208858 , trainning accuracy: 1.00000\n",
      "Iter 2770, minibatch loss:  0.006065234 , trainning accuracy: 1.00000\n",
      "Iter 2780, minibatch loss:  0.011359379 , trainning accuracy: 1.00000\n",
      "Iter 2790, minibatch loss:  0.00477941 , trainning accuracy: 1.00000\n",
      "Iter 2800, minibatch loss:  0.00238265 , trainning accuracy: 1.00000\n",
      "Iter 2810, minibatch loss:  0.03159908 , trainning accuracy: 0.98438\n",
      "Iter 2820, minibatch loss:  0.017425265 , trainning accuracy: 0.99219\n",
      "Iter 2830, minibatch loss:  0.015694208 , trainning accuracy: 1.00000\n",
      "Iter 2840, minibatch loss:  0.0119898915 , trainning accuracy: 1.00000\n",
      "Iter 2850, minibatch loss:  0.010589762 , trainning accuracy: 1.00000\n",
      "Iter 2860, minibatch loss:  0.009566497 , trainning accuracy: 0.99219\n",
      "Iter 2870, minibatch loss:  0.007391126 , trainning accuracy: 1.00000\n",
      "Iter 2880, minibatch loss:  0.010393932 , trainning accuracy: 1.00000\n",
      "Iter 2890, minibatch loss:  0.039154217 , trainning accuracy: 0.98438\n",
      "Iter 2900, minibatch loss:  0.029351722 , trainning accuracy: 0.99219\n",
      "Iter 2910, minibatch loss:  0.0074087903 , trainning accuracy: 1.00000\n",
      "Iter 2920, minibatch loss:  0.0050501674 , trainning accuracy: 1.00000\n",
      "Iter 2930, minibatch loss:  0.008428144 , trainning accuracy: 1.00000\n",
      "Iter 2940, minibatch loss:  0.011012394 , trainning accuracy: 1.00000\n",
      "Iter 2950, minibatch loss:  0.017328255 , trainning accuracy: 0.99219\n",
      "Iter 2960, minibatch loss:  0.008171138 , trainning accuracy: 1.00000\n",
      "Iter 2970, minibatch loss:  0.005301489 , trainning accuracy: 1.00000\n",
      "Iter 2980, minibatch loss:  0.008661659 , trainning accuracy: 1.00000\n",
      "Iter 2990, minibatch loss:  0.022755919 , trainning accuracy: 0.99219\n",
      "Iter 3000, minibatch loss:  0.016197596 , trainning accuracy: 1.00000\n",
      "Iter 3010, minibatch loss:  0.0176556 , trainning accuracy: 0.99219\n",
      "Iter 3020, minibatch loss:  0.0016254646 , trainning accuracy: 1.00000\n",
      "Iter 3030, minibatch loss:  0.007251393 , trainning accuracy: 1.00000\n",
      "Iter 3040, minibatch loss:  0.013488911 , trainning accuracy: 0.99219\n",
      "Iter 3050, minibatch loss:  0.0015842073 , trainning accuracy: 1.00000\n",
      "Iter 3060, minibatch loss:  0.006572103 , trainning accuracy: 1.00000\n",
      "Iter 3070, minibatch loss:  0.010750104 , trainning accuracy: 1.00000\n",
      "Iter 3080, minibatch loss:  0.0041762963 , trainning accuracy: 1.00000\n",
      "Iter 3090, minibatch loss:  0.029696513 , trainning accuracy: 0.99219\n",
      "Iter 3100, minibatch loss:  0.02408305 , trainning accuracy: 0.99219\n",
      "Iter 3110, minibatch loss:  0.0051533445 , trainning accuracy: 1.00000\n",
      "Iter 3120, minibatch loss:  0.06987572 , trainning accuracy: 0.98438\n",
      "Optimization finished!\n"
     ]
    }
   ],
   "source": [
    "# 开始训练,并输出当前cost以及训练准确率\n",
    "with tf.Session() as sess:\n",
    "    sess.run(init_op)\n",
    "    step = 1\n",
    "    while step * batch_size < max_samples:\n",
    "        batch_x, batch_y = mnist.train.next_batch(batch_size)\n",
    "        batch_x = batch_x.reshape((batch_size, timestep_size, input_size))\n",
    "        sess.run(optimizer, feed_dict={x: batch_x, y: batch_y})\n",
    "        if step % display_step == 0:\n",
    "            acc = sess.run(accuracy, feed_dict={x: batch_x, y: batch_y })\n",
    "            loss = sess.run(cross_entropy, feed_dict={x: batch_x, y: batch_y})\n",
    "            print(\"Iter \" + str(step) + \", minibatch loss: \", loss, \", trainning accuracy: %.5f\" % acc)\n",
    "        step += 1\n",
    "    print(\"Optimization finished!\")\n",
    "        "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "在测试集上测试"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
