{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "\"\"\"A very simple MNIST classifier.\n",
    "See extensive documentation at https://www.tensorflow.org/get_started/mnist/beginners\n",
    "\"\"\"\n",
    "\n",
    "import tensorflow as tf\n",
    "from tensorflow.examples.tutorials.mnist import input_data\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "from sklearn.metrics import confusion_matrix\n",
    "import time\n",
    "from datetime import timedelta\n",
    "import math"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Successfully downloaded train-images-idx3-ubyte.gz 9912422 bytes.\n",
      "Extracting /tmp/tensorflow/mnist/input_data/train-images-idx3-ubyte.gz\n",
      "Successfully downloaded train-labels-idx1-ubyte.gz 28881 bytes.\n",
      "Extracting /tmp/tensorflow/mnist/input_data/train-labels-idx1-ubyte.gz\n",
      "Successfully downloaded t10k-images-idx3-ubyte.gz 1648877 bytes.\n",
      "Extracting /tmp/tensorflow/mnist/input_data/t10k-images-idx3-ubyte.gz\n",
      "Successfully downloaded t10k-labels-idx1-ubyte.gz 4542 bytes.\n",
      "Extracting /tmp/tensorflow/mnist/input_data/t10k-labels-idx1-ubyte.gz\n"
     ]
    }
   ],
   "source": [
    "# Import data\n",
    "data_dir = '/tmp/tensorflow/mnist/input_data'\n",
    "mnist = input_data.read_data_sets(data_dir, one_hot=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Size of:\n",
      "- Training-set:\t\t55000\n",
      "- Test-set:\t\t10000\n",
      "- Validation-set:\t5000\n",
      "10000\n",
      "10\n"
     ]
    }
   ],
   "source": [
    "print(\"Size of:\")\n",
    "print(\"- Training-set:\\t\\t{}\".format(len(mnist.train.labels)))\n",
    "print(\"- Test-set:\\t\\t{}\".format(len(mnist.test.labels)))\n",
    "print(\"- Validation-set:\\t{}\".format(len(mnist.validation.labels)))\n",
    "mnist.test.cls = np.argmax(mnist.test.labels, axis=1)\n",
    "print(len(mnist.test.cls))\n",
    "cls_num = len(np.unique(mnist.test.cls))\n",
    "print(cls_num)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "img_size = 28\n",
    "img_size_total = img_size * img_size\n",
    "img_shape = (img_size, img_size)\n",
    "num_channels = 1\n",
    "num_classes = cls_num\n",
    "n_hidden_1 = 200\n",
    "n_hidden_2 = 100\n",
    "n_hidden_3 = 60\n",
    "n_hidden_4 = 30\n",
    "n_hidden_5 = 100\n",
    "training_epochs = 200"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "x = tf.placeholder(tf.float32, [None, img_size_total])\n",
    "y_true = tf.placeholder(tf.float32, [None, num_classes])\n",
    "y_true_cls = tf.placeholder(tf.int64, [None])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "layer_1: (?, 200) W1: (784, 200) b1: (200,)\n",
      "layer_2: (?, 100) W2: (200, 100) b2: (100,)\n",
      "layer_3: (?, 10) W3: (100, 60) b3: (60,)\n"
     ]
    }
   ],
   "source": [
    "w1 = tf.Variable(tf.truncated_normal([img_size_total, n_hidden_1],stddev=0.1))\n",
    "b1 = tf.Variable(tf.ones([n_hidden_1])/10)\n",
    "y1 = tf.matmul(x, w1) + b1\n",
    "y1 = tf.nn.relu(y1)\n",
    "print( 'layer_1:', y1.get_shape(), 'W1:', w1.get_shape(), 'b1:', b1.get_shape())        \n",
    "\n",
    "w2 = tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2], stddev=0.1))\n",
    "b2 = tf.Variable(tf.ones([n_hidden_2])/10)\n",
    "y2 = tf.matmul(y1, w2) + b2\n",
    "y2 = tf.nn.relu(y2)\n",
    "print( 'layer_2:', y2.get_shape(), 'W2:', w2.get_shape(), 'b2:', b2.get_shape())        \n",
    "\n",
    "w3 = tf.Variable(tf.random_normal([n_hidden_2, n_hidden_3], stddev=0.1))\n",
    "b3 = tf.Variable(tf.ones([n_hidden_3])/10)\n",
    "y3 = tf.matmul(y2, w3) + b3\n",
    "y3 = tf.nn.relu(y3)\n",
    "\n",
    "w4 = tf.Variable(tf.random_normal([n_hidden_3, n_hidden_4], stddev=0.1))\n",
    "b4 = tf.Variable(tf.ones([n_hidden_4])/10)\n",
    "y4 = tf.matmul(y3, w4) + b4\n",
    "y4 = tf.nn.relu(y4)\n",
    "\n",
    "w5 = tf.Variable(tf.random_normal([n_hidden_4, num_classes], stddev=0.1))\n",
    "b5 = tf.Variable(tf.zeros([num_classes]))\n",
    "logits = tf.matmul(y4, w5) + b5\n",
    "print( 'layer_3:', logits.get_shape(), 'W3:', w3.get_shape(), 'b3:', b3.get_shape())        "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "y_pred = tf.nn.softmax(logits)\n",
    "y_pred_cls = tf.argmax(y_pred, axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits=logits,\n",
    "                                                        labels=y_true)\n",
    "cost = tf.reduce_mean(cross_entropy)\n",
    "optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "correct_prediction = tf.equal(y_pred_cls, y_true_cls)\n",
    "accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "session = tf.Session()\n",
    "session.run(tf.global_variables_initializer())\n",
    "batch_size = 100\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "display_step = 1\n",
    "def optimize(training_epochs):\n",
    "    total_iteration = 0\n",
    "\n",
    "    for epoch in range(training_epochs):\n",
    "        # Get a batch of training examples.\n",
    "        # x_batch now holds a batch of images and\n",
    "        # y_true_batch are the true labels for those images.\n",
    "        \n",
    "        #x_batch, y_true_batch = mnist.train.next_batch(batch_size)\n",
    "        \n",
    "        # Put the batch into a dict with the proper names\n",
    "        # for placeholder variables in the TensorFlow graph.\n",
    "        # Note that the placeholder for y_true_cls is not set\n",
    "        # because it is not used during training.\n",
    "        \n",
    "        #feed_dict_train = {x: x_batch,\n",
    "                          # y_true: y_true_batch}\n",
    "\n",
    "        # Run the optimizer using this batch of training data.\n",
    "        # TensorFlow assigns the variables in feed_dict_train\n",
    "        # to the placeholder variables and then runs the optimizer.\n",
    "        #session.run(optimizer, feed_dict=feed_dict_train)\n",
    "        avg_cost = 0.\n",
    "        total_batch = int(mnist.train.num_examples/batch_size)\n",
    "        print(\"Total batch is: \" + str(total_batch))\n",
    "        # Loop over all batches\n",
    "        for i in range(total_batch):\n",
    "            batch_x, batch_y = mnist.train.next_batch(batch_size)\n",
    "            # Run optimization op (backprop) and cost op (to get loss value)\n",
    "            _, c = session.run([optimizer, cost], feed_dict={x: batch_x,\n",
    "                                                            y_true: batch_y})\n",
    "            # Compute average loss\n",
    "            avg_cost += c / total_batch    \n",
    "        # Display logs per epoch step\n",
    "        if epoch % display_step == 0:\n",
    "            print(\"Epoch:\", '%04d' % (epoch+1), \"cost={:.9f}\".format(avg_cost))\n",
    "        "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "feed_dict_test = {x: mnist.test.images,\n",
    "                  y_true: mnist.test.labels,\n",
    "                  y_true_cls: mnist.test.cls}\n",
    "\n",
    "def print_accuracy():\n",
    "    # Use TensorFlow to compute the accuracy.\n",
    "    acc = session.run(accuracy, feed_dict=feed_dict_test)\n",
    "    \n",
    "    # Print the accuracy.\n",
    "    print(\"Accuracy on test-set: {0:.1%}\".format(acc))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Total batch is: 550\n",
      "Epoch: 0001 cost=0.370948775\n",
      "Total batch is: 550\n",
      "Epoch: 0002 cost=0.133807702\n",
      "Total batch is: 550\n",
      "Epoch: 0003 cost=0.090600319\n",
      "Total batch is: 550\n",
      "Epoch: 0004 cost=0.065163070\n",
      "Total batch is: 550\n",
      "Epoch: 0005 cost=0.048994677\n",
      "Total batch is: 550\n",
      "Epoch: 0006 cost=0.038989920\n",
      "Total batch is: 550\n",
      "Epoch: 0007 cost=0.030687504\n",
      "Total batch is: 550\n",
      "Epoch: 0008 cost=0.026167726\n",
      "Total batch is: 550\n",
      "Epoch: 0009 cost=0.022932683\n",
      "Total batch is: 550\n",
      "Epoch: 0010 cost=0.019422765\n",
      "Total batch is: 550\n",
      "Epoch: 0011 cost=0.015949600\n",
      "Total batch is: 550\n",
      "Epoch: 0012 cost=0.016669953\n",
      "Total batch is: 550\n",
      "Epoch: 0013 cost=0.015192673\n",
      "Total batch is: 550\n",
      "Epoch: 0014 cost=0.014384372\n",
      "Total batch is: 550\n",
      "Epoch: 0015 cost=0.014077299\n",
      "Total batch is: 550\n",
      "Epoch: 0016 cost=0.011142751\n",
      "Total batch is: 550\n",
      "Epoch: 0017 cost=0.009048214\n",
      "Total batch is: 550\n",
      "Epoch: 0018 cost=0.013938020\n",
      "Total batch is: 550\n",
      "Epoch: 0019 cost=0.006639174\n",
      "Total batch is: 550\n",
      "Epoch: 0020 cost=0.011478548\n",
      "Total batch is: 550\n",
      "Epoch: 0021 cost=0.009692585\n",
      "Total batch is: 550\n",
      "Epoch: 0022 cost=0.008309373\n",
      "Total batch is: 550\n",
      "Epoch: 0023 cost=0.009629598\n",
      "Total batch is: 550\n",
      "Epoch: 0024 cost=0.008216755\n",
      "Total batch is: 550\n",
      "Epoch: 0025 cost=0.010295248\n",
      "Total batch is: 550\n",
      "Epoch: 0026 cost=0.009365636\n",
      "Total batch is: 550\n",
      "Epoch: 0027 cost=0.005944306\n",
      "Total batch is: 550\n",
      "Epoch: 0028 cost=0.005755701\n",
      "Total batch is: 550\n",
      "Epoch: 0029 cost=0.009462505\n",
      "Total batch is: 550\n",
      "Epoch: 0030 cost=0.005475545\n",
      "Total batch is: 550\n",
      "Epoch: 0031 cost=0.007169202\n",
      "Total batch is: 550\n",
      "Epoch: 0032 cost=0.005773426\n",
      "Total batch is: 550\n",
      "Epoch: 0033 cost=0.006998165\n",
      "Total batch is: 550\n",
      "Epoch: 0034 cost=0.008690100\n",
      "Total batch is: 550\n",
      "Epoch: 0035 cost=0.004547695\n",
      "Total batch is: 550\n",
      "Epoch: 0036 cost=0.010469199\n",
      "Total batch is: 550\n",
      "Epoch: 0037 cost=0.005997569\n",
      "Total batch is: 550\n",
      "Epoch: 0038 cost=0.005417275\n",
      "Total batch is: 550\n",
      "Epoch: 0039 cost=0.005499939\n",
      "Total batch is: 550\n",
      "Epoch: 0040 cost=0.004874430\n",
      "Total batch is: 550\n",
      "Epoch: 0041 cost=0.006592002\n",
      "Total batch is: 550\n",
      "Epoch: 0042 cost=0.003928856\n",
      "Total batch is: 550\n",
      "Epoch: 0043 cost=0.005350043\n",
      "Total batch is: 550\n",
      "Epoch: 0044 cost=0.008753769\n",
      "Total batch is: 550\n",
      "Epoch: 0045 cost=0.004420875\n",
      "Total batch is: 550\n",
      "Epoch: 0046 cost=0.003127419\n",
      "Total batch is: 550\n",
      "Epoch: 0047 cost=0.007242044\n",
      "Total batch is: 550\n",
      "Epoch: 0048 cost=0.002260700\n",
      "Total batch is: 550\n",
      "Epoch: 0049 cost=0.006213566\n",
      "Total batch is: 550\n",
      "Epoch: 0050 cost=0.002923544\n",
      "Total batch is: 550\n",
      "Epoch: 0051 cost=0.005227040\n",
      "Total batch is: 550\n",
      "Epoch: 0052 cost=0.006851313\n",
      "Total batch is: 550\n",
      "Epoch: 0053 cost=0.005615885\n",
      "Total batch is: 550\n",
      "Epoch: 0054 cost=0.003422904\n",
      "Total batch is: 550\n",
      "Epoch: 0055 cost=0.004502958\n",
      "Total batch is: 550\n",
      "Epoch: 0056 cost=0.001411655\n",
      "Total batch is: 550\n",
      "Epoch: 0057 cost=0.002533294\n",
      "Total batch is: 550\n",
      "Epoch: 0058 cost=0.008286537\n",
      "Total batch is: 550\n",
      "Epoch: 0059 cost=0.004974223\n",
      "Total batch is: 550\n",
      "Epoch: 0060 cost=0.004890428\n",
      "Total batch is: 550\n",
      "Epoch: 0061 cost=0.004483028\n",
      "Total batch is: 550\n",
      "Epoch: 0062 cost=0.001764240\n",
      "Total batch is: 550\n",
      "Epoch: 0063 cost=0.005630498\n",
      "Total batch is: 550\n",
      "Epoch: 0064 cost=0.006237102\n",
      "Total batch is: 550\n",
      "Epoch: 0065 cost=0.002265483\n",
      "Total batch is: 550\n",
      "Epoch: 0066 cost=0.002973885\n",
      "Total batch is: 550\n",
      "Epoch: 0067 cost=0.002190333\n",
      "Total batch is: 550\n",
      "Epoch: 0068 cost=0.007219070\n",
      "Total batch is: 550\n",
      "Epoch: 0069 cost=0.004298306\n",
      "Total batch is: 550\n",
      "Epoch: 0070 cost=0.003319167\n",
      "Total batch is: 550\n",
      "Epoch: 0071 cost=0.002816796\n",
      "Total batch is: 550\n",
      "Epoch: 0072 cost=0.005455134\n",
      "Total batch is: 550\n",
      "Epoch: 0073 cost=0.004350879\n",
      "Total batch is: 550\n",
      "Epoch: 0074 cost=0.001383332\n",
      "Total batch is: 550\n",
      "Epoch: 0075 cost=0.001320353\n",
      "Total batch is: 550\n",
      "Epoch: 0076 cost=0.008012715\n",
      "Total batch is: 550\n",
      "Epoch: 0077 cost=0.002167472\n",
      "Total batch is: 550\n",
      "Epoch: 0078 cost=0.004963508\n",
      "Total batch is: 550\n",
      "Epoch: 0079 cost=0.001261945\n",
      "Total batch is: 550\n",
      "Epoch: 0080 cost=0.002498551\n",
      "Total batch is: 550\n",
      "Epoch: 0081 cost=0.003543946\n",
      "Total batch is: 550\n",
      "Epoch: 0082 cost=0.004763678\n",
      "Total batch is: 550\n",
      "Epoch: 0083 cost=0.001695636\n",
      "Total batch is: 550\n",
      "Epoch: 0084 cost=0.003679410\n",
      "Total batch is: 550\n",
      "Epoch: 0085 cost=0.005059035\n",
      "Total batch is: 550\n",
      "Epoch: 0086 cost=0.001564448\n",
      "Total batch is: 550\n",
      "Epoch: 0087 cost=0.000073221\n",
      "Total batch is: 550\n",
      "Epoch: 0088 cost=0.000022759\n",
      "Total batch is: 550\n",
      "Epoch: 0089 cost=0.000011394\n",
      "Total batch is: 550\n",
      "Epoch: 0090 cost=0.000007976\n",
      "Total batch is: 550\n",
      "Epoch: 0091 cost=0.000005736\n",
      "Total batch is: 550\n",
      "Epoch: 0092 cost=0.000003176\n",
      "Total batch is: 550\n",
      "Epoch: 0093 cost=0.000000591\n",
      "Total batch is: 550\n",
      "Epoch: 0094 cost=0.000000215\n",
      "Total batch is: 550\n",
      "Epoch: 0095 cost=0.000000126\n",
      "Total batch is: 550\n",
      "Epoch: 0096 cost=0.000000081\n",
      "Total batch is: 550\n",
      "Epoch: 0097 cost=0.000000055\n",
      "Total batch is: 550\n",
      "Epoch: 0098 cost=0.000000037\n",
      "Total batch is: 550\n",
      "Epoch: 0099 cost=0.000000026\n",
      "Total batch is: 550\n",
      "Epoch: 0100 cost=0.000000019\n",
      "Total batch is: 550\n",
      "Epoch: 0101 cost=0.000000013\n",
      "Total batch is: 550\n",
      "Epoch: 0102 cost=0.000000009\n",
      "Total batch is: 550\n",
      "Epoch: 0103 cost=0.000000007\n",
      "Total batch is: 550\n",
      "Epoch: 0104 cost=0.000000005\n",
      "Total batch is: 550\n",
      "Epoch: 0105 cost=0.000000004\n",
      "Total batch is: 550\n",
      "Epoch: 0106 cost=0.000000004\n",
      "Total batch is: 550\n",
      "Epoch: 0107 cost=0.000000003\n",
      "Total batch is: 550\n",
      "Epoch: 0108 cost=0.000000002\n",
      "Total batch is: 550\n",
      "Epoch: 0109 cost=0.000000002\n",
      "Total batch is: 550\n",
      "Epoch: 0110 cost=0.000000002\n",
      "Total batch is: 550\n",
      "Epoch: 0111 cost=0.000000001\n",
      "Total batch is: 550\n",
      "Epoch: 0112 cost=0.000000001\n",
      "Total batch is: 550\n",
      "Epoch: 0113 cost=0.000000001\n",
      "Total batch is: 550\n",
      "Epoch: 0114 cost=0.000000001\n",
      "Total batch is: 550\n",
      "Epoch: 0115 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0116 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0117 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0118 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0119 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0120 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0121 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0122 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0123 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0124 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0125 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0126 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0127 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0128 cost=0.025141127\n",
      "Total batch is: 550\n",
      "Epoch: 0129 cost=0.006071468\n",
      "Total batch is: 550\n",
      "Epoch: 0130 cost=0.000809164\n",
      "Total batch is: 550\n",
      "Epoch: 0131 cost=0.000101736\n",
      "Total batch is: 550\n",
      "Epoch: 0132 cost=0.000024125\n",
      "Total batch is: 550\n",
      "Epoch: 0133 cost=0.000014834\n",
      "Total batch is: 550\n",
      "Epoch: 0134 cost=0.000010306\n",
      "Total batch is: 550\n",
      "Epoch: 0135 cost=0.000007218\n",
      "Total batch is: 550\n",
      "Epoch: 0136 cost=0.000005167\n",
      "Total batch is: 550\n",
      "Epoch: 0137 cost=0.000003746\n",
      "Total batch is: 550\n",
      "Epoch: 0138 cost=0.000002714\n",
      "Total batch is: 550\n",
      "Epoch: 0139 cost=0.000001977\n",
      "Total batch is: 550\n",
      "Epoch: 0140 cost=0.000001449\n",
      "Total batch is: 550\n",
      "Epoch: 0141 cost=0.000001063\n",
      "Total batch is: 550\n",
      "Epoch: 0142 cost=0.000000776\n",
      "Total batch is: 550\n",
      "Epoch: 0143 cost=0.000000568\n",
      "Total batch is: 550\n",
      "Epoch: 0144 cost=0.000000416\n",
      "Total batch is: 550\n",
      "Epoch: 0145 cost=0.000000305\n",
      "Total batch is: 550\n",
      "Epoch: 0146 cost=0.000000225\n",
      "Total batch is: 550\n",
      "Epoch: 0147 cost=0.000000166\n",
      "Total batch is: 550\n",
      "Epoch: 0148 cost=0.000000122\n",
      "Total batch is: 550\n",
      "Epoch: 0149 cost=0.000000090\n",
      "Total batch is: 550\n",
      "Epoch: 0150 cost=0.000000066\n",
      "Total batch is: 550\n",
      "Epoch: 0151 cost=0.000000049\n",
      "Total batch is: 550\n",
      "Epoch: 0152 cost=0.000000037\n",
      "Total batch is: 550\n",
      "Epoch: 0153 cost=0.000000027\n",
      "Total batch is: 550\n",
      "Epoch: 0154 cost=0.000000020\n",
      "Total batch is: 550\n",
      "Epoch: 0155 cost=0.000000015\n",
      "Total batch is: 550\n",
      "Epoch: 0156 cost=0.000000011\n",
      "Total batch is: 550\n",
      "Epoch: 0157 cost=0.000000008\n",
      "Total batch is: 550\n",
      "Epoch: 0158 cost=0.000000006\n",
      "Total batch is: 550\n",
      "Epoch: 0159 cost=0.000000004\n",
      "Total batch is: 550\n",
      "Epoch: 0160 cost=0.000000003\n",
      "Total batch is: 550\n",
      "Epoch: 0161 cost=0.000000002\n",
      "Total batch is: 550\n",
      "Epoch: 0162 cost=0.000000002\n",
      "Total batch is: 550\n",
      "Epoch: 0163 cost=0.000000001\n",
      "Total batch is: 550\n",
      "Epoch: 0164 cost=0.000000001\n",
      "Total batch is: 550\n",
      "Epoch: 0165 cost=0.000000001\n",
      "Total batch is: 550\n",
      "Epoch: 0166 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0167 cost=0.000000000\n",
      "Total batch is: 550\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 0168 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0169 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0170 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0171 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0172 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0173 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0174 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0175 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0176 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0177 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0178 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0179 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0180 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0181 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0182 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0183 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0184 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0185 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0186 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0187 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0188 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0189 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0190 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0191 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0192 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0193 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0194 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0195 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0196 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0197 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0198 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0199 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0200 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0201 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0202 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0203 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0204 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0205 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0206 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0207 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0208 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0209 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0210 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0211 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0212 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0213 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0214 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0215 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0216 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0217 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0218 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0219 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0220 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0221 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0222 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0223 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0224 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0225 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0226 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0227 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0228 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0229 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0230 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0231 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0232 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0233 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0234 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0235 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0236 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0237 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0238 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0239 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0240 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0241 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0242 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0243 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0244 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0245 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0246 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0247 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0248 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0249 cost=0.000000000\n",
      "Total batch is: 550\n",
      "Epoch: 0250 cost=0.000000000\n"
     ]
    }
   ],
   "source": [
    "optimize(training_epochs=250)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Accuracy on test-set: 98.4%\n"
     ]
    }
   ],
   "source": [
    "print_accuracy()"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
