{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "https://www.easy-tensorflow.com/tf-tutorials/basics/save-and-restore"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "from __future__ import division, print_function, absolute_import\n",
    "\n",
    "import os\n",
    "import pickle\n",
    "\n",
    "import numpy as np\n",
    "import tensorflow as tf"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Load data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "HEIGHT = 32\n",
    "WIDTH = 32\n",
    "DEPTH = 3\n",
    "\n",
    "NUM_CLASSES = 10\n",
    "\n",
    "dataset_dir = '../../data/cifar-10-batches-py'\n",
    "\n",
    "train_filenames = [\n",
    "    'data_batch_1',\n",
    "    'data_batch_2',\n",
    "    'data_batch_3',\n",
    "    'data_batch_4',\n",
    "    'data_batch_5'\n",
    "]\n",
    "\n",
    "test_filenames = [\n",
    "    'test_batch'\n",
    "]\n",
    "\n",
    "def unpickle(filename):\n",
    "    \"\"\"Decode the dataset files.\"\"\"\n",
    "    with open(filename, 'rb') as f:\n",
    "        d = pickle.load(f, encoding='latin1')\n",
    "        return d\n",
    "\n",
    "def load_data(dataset_dir, train_filenames, test_filenames):\n",
    "    train_images = unpickle(os.path.join(dataset_dir, train_filenames[0]))['data']\n",
    "    train_labels = unpickle(os.path.join(dataset_dir, train_filenames[0]))['labels']\n",
    "    test_images = unpickle(os.path.join(dataset_dir, test_filenames[0]))['data']\n",
    "    test_labels = unpickle(os.path.join(dataset_dir, test_filenames[0]))['labels']\n",
    "    for i in range(1, len(train_filenames)):\n",
    "        batch = unpickle(os.path.join(dataset_dir, train_filenames[i]))\n",
    "        train_images = np.concatenate((train_images, batch['data']), axis=0)\n",
    "        train_labels = np.concatenate((train_labels, batch['labels']), axis=0)\n",
    "    return train_images, train_labels, test_images, test_labels\n",
    "\n",
    "\n",
    "def dense_to_one_hot(labels_dense, num_classes):\n",
    "    \"\"\"Convert class labels from scalars to one-hot vectors.\"\"\"\n",
    "    num_labels = labels_dense.shape[0]\n",
    "    index_offset = np.arange(num_labels) * num_classes\n",
    "    labels_one_hot = np.zeros((num_labels, num_classes))\n",
    "    labels_one_hot.flat[index_offset + labels_dense.ravel()] = 1\n",
    "    return labels_one_hot\n",
    "\n",
    "\n",
    "def get_data():\n",
    "    train_images, train_labels, test_images, test_labels = load_data(dataset_dir, train_filenames, test_filenames)\n",
    "\n",
    "    train_images = train_images.reshape((-1, 3, 32, 32))\n",
    "    train_images = train_images.transpose(0, 2, 3, 1)\n",
    "    train_labels = np.array(train_labels)\n",
    "    train_labels = dense_to_one_hot(train_labels, 10)\n",
    "\n",
    "    test_images = test_images.reshape((-1, 3, 32, 32))\n",
    "    test_images = test_images.transpose(0, 2, 3, 1)\n",
    "    test_labels = np.array(test_labels)\n",
    "    test_labels = dense_to_one_hot(test_labels, 10)\n",
    "\n",
    "    return train_images, train_labels, test_images, test_labels"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "((50000, 32, 32, 3), (50000, 10), (10000, 32, 32, 3), (10000, 10))"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_images, train_labels, test_images, test_labels = get_data()\n",
    "train_images.shape, train_labels.shape, test_images.shape, test_labels.shape"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Display operations"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "name: \"X\"\n",
      "op: \"Placeholder\"\n",
      "attr {\n",
      "  key: \"dtype\"\n",
      "  value {\n",
      "    type: DT_FLOAT\n",
      "  }\n",
      "}\n",
      "attr {\n",
      "  key: \"shape\"\n",
      "  value {\n",
      "    shape {\n",
      "      dim {\n",
      "        size: -1\n",
      "      }\n",
      "      dim {\n",
      "        size: 32\n",
      "      }\n",
      "      dim {\n",
      "        size: 32\n",
      "      }\n",
      "      dim {\n",
      "        size: 3\n",
      "      }\n",
      "    }\n",
      "  }\n",
      "}\n",
      "\n"
     ]
    }
   ],
   "source": [
    "model_prefix = '../cnn_raw/cifar10-50'\n",
    "# delete the current graph\n",
    "tf.reset_default_graph()\n",
    "# import the graph from the file\n",
    "imported_graph = tf.train.import_meta_graph(model_prefix + '.meta')\n",
    "operations = tf.get_default_graph().get_operations()\n",
    "print(operations[0])"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Accuracy"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From D:\\Programs\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\training\\saver.py:1276: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use standard file APIs to check for files with this prefix.\n",
      "INFO:tensorflow:Restoring parameters from ../cnn_raw/cifar10-50\n",
      "0.69\n",
      "0.61\n",
      "0.65\n",
      "0.53\n",
      "0.66\n",
      "0.7\n",
      "0.65\n",
      "0.61\n",
      "0.57\n",
      "0.7\n"
     ]
    }
   ],
   "source": [
    "g = tf.get_default_graph()\n",
    "X = g.get_tensor_by_name('X:0')\n",
    "Y = g.get_tensor_by_name('Y:0')\n",
    "keep_prob = g.get_tensor_by_name('keep_prob:0')\n",
    "\n",
    "with tf.Session() as sess:\n",
    "    # restore the saved vairable\n",
    "    imported_graph.restore(sess, model_prefix)\n",
    "    # print the loaded variable\n",
    "    for i in range(10):\n",
    "        batch_start = i * 100\n",
    "        batch_end = batch_start + 100\n",
    "        acc = sess.run('accuracy:0', feed_dict={X: test_images[batch_start:batch_end], \n",
    "                                                Y: test_labels[batch_start:batch_end], \n",
    "                                                keep_prob: 1.0})\n",
    "        print(acc)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Logits"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Restoring parameters from ../cnn_raw/cifar10-50\n",
      "[0] [0]\n",
      "[9] [9]\n",
      "[5] [6]\n",
      "[7] [7]\n",
      "[9] [9]\n",
      "[8] [2]\n",
      "[5] [5]\n",
      "[7] [7]\n",
      "[8] [8]\n",
      "[6] [6]\n"
     ]
    }
   ],
   "source": [
    "with tf.Session() as sess:\n",
    "    imported_graph.restore(sess, model_prefix)\n",
    "    for i in range(10, 20):\n",
    "        sample_x = test_images[i:i+1]\n",
    "        sample_y = test_labels[i:i+1]\n",
    "        logits = sess.run('Predictions/Add:0', feed_dict={X: sample_x,  Y: sample_y,  keep_prob: 1.0})\n",
    "        print(np.argmax(sample_y, 1), np.argmax(logits, 1))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.4"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
