{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "import tensorflow as tf\n",
    "import os\n",
    "from PIL import Image\n",
    "from PIL import ImageFile\n",
    "from DogBreedIdentification_vgg16 import IMAGE_SIZE,inference_op\n",
    "\n",
    "def _testdata_parse_function(filenames):\n",
    "    features = {\"img_raw\": tf.FixedLenFeature([], tf.string)}\n",
    "    parsed_features = tf.parse_single_example(filenames, features)\n",
    "    image = tf.decode_raw(parsed_features['img_raw'], tf.uint8)\n",
    "    return image\n",
    "\n",
    "def predict_read_tfrecord(filename):\n",
    "    read_tfrecord_dataset = tf.contrib.data.TFRecordDataset(filename)\n",
    "    read_tfrecord_dataset = read_tfrecord_dataset.map(_testdata_parse_function)\n",
    "    read_tfrecord_dataset = read_tfrecord_dataset.batch(1)\n",
    "    return read_tfrecord_dataset\n",
    "\n",
    "def testdata_convert_to_tfrecord(rootpath,  target_record_dir):\n",
    "    if os.path.exists(target_record_dir) is True:\n",
    "        print(\"the \"+target_record_dir+\" exist, no need to run the testdata_convert_to_tfrecord\")\n",
    "        return\n",
    "    os.makedirs(target_record_dir, mode=0o775, exist_ok=True)\n",
    "    filenames = os.listdir(rootpath)\n",
    "    predict_id_list=[]\n",
    "    for name in filenames:\n",
    "        writer = tf.python_io.TFRecordWriter(target_record_dir+\"/\"+name+\".tfrecords\")\n",
    "        print(\"Processing image:\" + name)\n",
    "        img = Image.open(rootpath+\"/\"+name)\n",
    "        if img.mode == \"RGB\":\n",
    "            img = img.resize((IMAGE_SIZE, IMAGE_SIZE), Image.LANCZOS)\n",
    "            img_raw = img.tobytes()\n",
    "            example = tf.train.Example(\n",
    "                features=tf.train.Features(feature={ \n",
    "                    \"img_raw\": tf.train.Feature(bytes_list=tf.train.BytesList(value=[img_raw]))}))\n",
    "            writer.write(example.SerializeToString())\n",
    "        writer.close() \n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "the ./process_workspace/TestSet/ exist, no need to run the testdata_convert_to_tfrecord\n",
      "Found checkpoint files , start to training from restored data\n",
      "INFO:tensorflow:Restoring parameters from ./process_workspace/DogBreedIdentificationVgg16_model.ckpt\n",
      "<tf.Variable 'conv1_1/w:0' shape=(3, 3, 3, 64) dtype=float32_ref> [-0.00129731 -0.00098393 -0.0023774  -0.00264734 -0.00202067 -0.00149379\n",
      " -0.00149654 -0.00160056 -0.00215215 -0.00270871 -0.00350231 -0.00260735\n",
      " -0.00203999 -0.00213429 -0.00302842 -0.00145519 -0.0009393  -0.00346301\n",
      " -0.00182204 -0.00191001 -0.0023635  -0.00241915 -0.00147808 -0.00208659\n",
      " -0.00265882 -0.00148558 -0.00227904 -0.00215934 -0.00277669 -0.00235836\n",
      " -0.00232352 -0.00165333 -0.00364198 -0.00150417 -0.00333861 -0.00311953\n",
      " -0.00207614 -0.00095401 -0.00306276 -0.00239172 -0.00197167 -0.00232535\n",
      " -0.00295241 -0.00143772 -0.00176017 -0.00232014 -0.00243433 -0.0018488\n",
      " -0.00208309 -0.00178257 -0.00244262 -0.00161635 -0.00268423 -0.00205626\n",
      " -0.00191509 -0.00313179 -0.00133206 -0.00166168 -0.00100487 -0.00232916\n",
      " -0.00120794 -0.00140989 -0.00198615 -0.0014167  -0.00237336 -0.00201477\n",
      " -0.00150012 -0.00189723 -0.00190257 -0.00292223 -0.001721   -0.00248512\n",
      " -0.00150015 -0.00173758 -0.00390797 -0.00211734 -0.00179984 -0.00293281\n",
      " -0.00234475 -0.002927   -0.00285588 -0.00187913 -0.00044493 -0.00293396\n",
      " -0.00260813 -0.00160806 -0.00142007 -0.00229879 -0.00161543 -0.00146883\n",
      " -0.00345377 -0.00225383 -0.00251106 -0.00228217 -0.00230254 -0.00246863\n",
      " -0.00247423 -0.00110211 -0.00252352 -0.00219416 -0.00175332 -0.00183255\n",
      " -0.00271218 -0.00214254 -0.00168506 -0.00324827 -0.002586   -0.00134736\n",
      " -0.00238847 -0.00184101 -0.00139464 -0.00339323 -0.00198483 -0.00080632\n",
      " -0.00233935 -0.00266342 -0.00196951 -0.00153254 -0.00132469 -0.00188887]\n",
      "Model restored.\n",
      "\n",
      "test img name:254c20f3a15c2b40faf6d3ea7177f301.jpg.tfrecords\n",
      "[array([[ 0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n",
      "         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333]], dtype=float32)]\n"
     ]
    },
    {
     "ename": "TypeError",
     "evalue": "Fetch argument array([[ 0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333]], dtype=float32) has invalid type <class 'numpy.ndarray'>, must be a string or Tensor. (Can not convert a ndarray into a Tensor or Operation.)",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mTypeError\u001b[0m                                 Traceback (most recent call last)",
      "\u001b[0;32m/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, fetches, contraction_fn)\u001b[0m\n\u001b[1;32m    269\u001b[0m         self._unique_fetches.append(ops.get_default_graph().as_graph_element(\n\u001b[0;32m--> 270\u001b[0;31m             fetch, allow_tensor=True, allow_operation=True))\n\u001b[0m\u001b[1;32m    271\u001b[0m       \u001b[0;32mexcept\u001b[0m \u001b[0mTypeError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.5/dist-packages/tensorflow/python/framework/ops.py\u001b[0m in \u001b[0;36mas_graph_element\u001b[0;34m(self, obj, allow_tensor, allow_operation)\u001b[0m\n\u001b[1;32m   2707\u001b[0m     \u001b[0;32mwith\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_lock\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2708\u001b[0;31m       \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_as_graph_element_locked\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mallow_tensor\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mallow_operation\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   2709\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.5/dist-packages/tensorflow/python/framework/ops.py\u001b[0m in \u001b[0;36m_as_graph_element_locked\u001b[0;34m(self, obj, allow_tensor, allow_operation)\u001b[0m\n\u001b[1;32m   2796\u001b[0m       raise TypeError(\"Can not convert a %s into a %s.\"\n\u001b[0;32m-> 2797\u001b[0;31m                       % (type(obj).__name__, types_str))\n\u001b[0m\u001b[1;32m   2798\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mTypeError\u001b[0m: Can not convert a ndarray into a Tensor or Operation.",
      "\nDuring handling of the above exception, another exception occurred:\n",
      "\u001b[0;31mTypeError\u001b[0m                                 Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-2-1f24092b896a>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m     23\u001b[0m             \u001b[0mpreprocess_testimg\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msess\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtestnext_element\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     24\u001b[0m             softmax = sess.run([softmax], feed_dict={\n\u001b[0;32m---> 25\u001b[0;31m                     \u001b[0mpre_image_holder\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mpreprocess_testimg\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreshape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mIMAGE_SIZE\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mIMAGE_SIZE\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m3\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0;36m255\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m0.5\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     26\u001b[0m                 })\n\u001b[1;32m     27\u001b[0m             \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"test img name:\"\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0mfile\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m    893\u001b[0m     \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    894\u001b[0m       result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[0;32m--> 895\u001b[0;31m                          run_metadata_ptr)\n\u001b[0m\u001b[1;32m    896\u001b[0m       \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    897\u001b[0m         \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_run\u001b[0;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m   1107\u001b[0m     \u001b[0;31m# Create a fetch handler to take care of the structure of fetches.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1108\u001b[0m     fetch_handler = _FetchHandler(\n\u001b[0;32m-> 1109\u001b[0;31m         self._graph, fetches, feed_dict_tensor, feed_handles=feed_handles)\n\u001b[0m\u001b[1;32m   1110\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1111\u001b[0m     \u001b[0;31m# Run request and get response.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, graph, fetches, feeds, feed_handles)\u001b[0m\n\u001b[1;32m    411\u001b[0m     \"\"\"\n\u001b[1;32m    412\u001b[0m     \u001b[0;32mwith\u001b[0m \u001b[0mgraph\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mas_default\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 413\u001b[0;31m       \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_fetch_mapper\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_FetchMapper\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_fetch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    414\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_fetches\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    415\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_targets\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36mfor_fetch\u001b[0;34m(fetch)\u001b[0m\n\u001b[1;32m    231\u001b[0m     \u001b[0;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mlist\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtuple\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    232\u001b[0m       \u001b[0;31m# NOTE(touts): This is also the code path for namedtuples.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 233\u001b[0;31m       \u001b[0;32mreturn\u001b[0m \u001b[0m_ListFetchMapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    234\u001b[0m     \u001b[0;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    235\u001b[0m       \u001b[0;32mreturn\u001b[0m \u001b[0m_DictFetchMapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, fetches)\u001b[0m\n\u001b[1;32m    338\u001b[0m     \"\"\"\n\u001b[1;32m    339\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_fetch_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 340\u001b[0;31m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0m_FetchMapper\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_fetch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mfetch\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    341\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_unique_fetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_value_indices\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_uniquify_fetches\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    342\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m<listcomp>\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m    338\u001b[0m     \"\"\"\n\u001b[1;32m    339\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_fetch_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 340\u001b[0;31m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0m_FetchMapper\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_fetch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mfetch\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    341\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_unique_fetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_value_indices\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_uniquify_fetches\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    342\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36mfor_fetch\u001b[0;34m(fetch)\u001b[0m\n\u001b[1;32m    231\u001b[0m     \u001b[0;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mlist\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtuple\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    232\u001b[0m       \u001b[0;31m# NOTE(touts): This is also the code path for namedtuples.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 233\u001b[0;31m       \u001b[0;32mreturn\u001b[0m \u001b[0m_ListFetchMapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    234\u001b[0m     \u001b[0;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    235\u001b[0m       \u001b[0;32mreturn\u001b[0m \u001b[0m_DictFetchMapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, fetches)\u001b[0m\n\u001b[1;32m    338\u001b[0m     \"\"\"\n\u001b[1;32m    339\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_fetch_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 340\u001b[0;31m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0m_FetchMapper\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_fetch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mfetch\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    341\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_unique_fetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_value_indices\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_uniquify_fetches\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    342\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m<listcomp>\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m    338\u001b[0m     \"\"\"\n\u001b[1;32m    339\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_fetch_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 340\u001b[0;31m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0m_FetchMapper\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_fetch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mfetch\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    341\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_unique_fetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_value_indices\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_uniquify_fetches\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    342\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36mfor_fetch\u001b[0;34m(fetch)\u001b[0m\n\u001b[1;32m    239\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtensor_type\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    240\u001b[0m           \u001b[0mfetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcontraction_fn\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfetch_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 241\u001b[0;31m           \u001b[0;32mreturn\u001b[0m \u001b[0m_ElementFetchMapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcontraction_fn\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    242\u001b[0m     \u001b[0;31m# Did not find anything.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    243\u001b[0m     raise TypeError('Fetch argument %r has invalid type %r' %\n",
      "\u001b[0;32m/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, fetches, contraction_fn)\u001b[0m\n\u001b[1;32m    272\u001b[0m         raise TypeError('Fetch argument %r has invalid type %r, '\n\u001b[1;32m    273\u001b[0m                         \u001b[0;34m'must be a string or Tensor. (%s)'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 274\u001b[0;31m                         % (fetch, type(fetch), str(e)))\n\u001b[0m\u001b[1;32m    275\u001b[0m       \u001b[0;32mexcept\u001b[0m \u001b[0mValueError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    276\u001b[0m         raise ValueError('Fetch argument %r cannot be interpreted as a '\n",
      "\u001b[0;31mTypeError\u001b[0m: Fetch argument array([[ 0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333,\n         0.00833333,  0.00833333,  0.00833333,  0.00833333,  0.00833333]], dtype=float32) has invalid type <class 'numpy.ndarray'>, must be a string or Tensor. (Can not convert a ndarray into a Tensor or Operation.)"
     ]
    }
   ],
   "source": [
    "if __name__ == '__main__':\n",
    "    testdata_convert_to_tfrecord(\"./data/test\", \"./process_workspace/TestSet/\")\n",
    "    pre_image_holder = tf.placeholder(tf.float32, [1, IMAGE_SIZE, IMAGE_SIZE, 3])\n",
    "    _, softmax, _, p = inference_op(pre_image_holder, 1)\n",
    "    predict_result=[]\n",
    "    with tf.Session() as sess:\n",
    "        sess.run(tf.global_variables_initializer())\n",
    "        saverdict = {}\n",
    "        for item in p:\n",
    "            saverdict.update({item.name[:-2]: item})\n",
    "        saver = tf.train.Saver(saverdict)\n",
    "        if os.path.isfile(\"./process_workspace/DogBreedIdentificationVgg16_model.ckpt.meta\") is True:\n",
    "            print(\"Found checkpoint files , start to training from restored data\")\n",
    "            saver.restore(sess, \"./process_workspace/DogBreedIdentificationVgg16_model.ckpt\")\n",
    "            print(p[0], sess.run(p[-1]))\n",
    "            print(\"Model restored.\\n\")\n",
    "        filenames = os.listdir(\"./process_workspace/TestSet\")\n",
    "        for file in filenames:\n",
    "            testdataset = predict_read_tfrecord([\"./process_workspace/TestSet/\"+file])\n",
    "            iterator = testdataset.make_initializable_iterator()\n",
    "            testnext_element = iterator.get_next()\n",
    "            sess.run(iterator.initializer)\n",
    "            preprocess_testimg = sess.run(testnext_element)\n",
    "            predict_softmax = sess.run([softmax], feed_dict={\n",
    "                    pre_image_holder: preprocess_testimg[0].reshape(-1, IMAGE_SIZE, IMAGE_SIZE, 3)/255-0.5\n",
    "                })\n",
    "            print(\"test img name:\"+file)\n",
    "            print(softmax)\n",
    "            predict_result.append([file.rstrip(\".jpg.tfrecords\"),predict_softmax[0].tolist()])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.5.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
