{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "#!/usr/bin/env python3\n",
    "# -*- coding: utf-8 -*-\n",
    "import json\n",
    "import logging\n",
    "import numpy as np\n",
    "import tensorflow as tf\n",
    "import utils\n",
    "from model import Model\n",
    "from flags import parse_args\n",
    "FLAGS, unparsed = parse_args()\n",
    "logging.basicConfig(\n",
    "    format='%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s', level=logging.DEBUG)\n",
    "with open(FLAGS.dictionary, encoding='utf-8') as inf:\n",
    "    dictionary = json.load(inf, encoding='utf-8')\n",
    "with open(FLAGS.reverse_dictionary, encoding='utf-8') as inf:\n",
    "    reverse_dictionary = json.load(inf, encoding='utf-8')\n",
    "reverse_list = [reverse_dictionary[str(i)]\n",
    "                for i in range(len(reverse_dictionary))]\n",
    "#titles = ['江神子', '蝶恋花', '渔家傲']\n",
    "titles = [ '浣溪沙', '水调歌头']\n",
    "#'御街行',"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<tf.Variable 'embedding/embedding:0' shape=(5000, 128) dtype=float32_ref>\n",
      "WARNING:tensorflow:From /home/vincentliu/Documents/my_home_work/11th_week_base/quiz-w10-code/quiz-w10-code/model.py:99: softmax_cross_entropy_with_logits (from tensorflow.python.ops.nn_ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "\n",
      "Future major versions of TensorFlow will allow gradients to flow\n",
      "into the labels input on backprop by default.\n",
      "\n",
      "See @{tf.nn.softmax_cross_entropy_with_logits_v2}.\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2019-02-11 11:09:43,321 - WARNING - tf_logging.py:125 - From /home/vincentliu/Documents/my_home_work/11th_week_base/quiz-w10-code/quiz-w10-code/model.py:99: softmax_cross_entropy_with_logits (from tensorflow.python.ops.nn_ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "\n",
      "Future major versions of TensorFlow will allow gradients to flow\n",
      "into the labels input on backprop by default.\n",
      "\n",
      "See @{tf.nn.softmax_cross_entropy_with_logits_v2}.\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "!!!!!!!!!self.loss!!!!!!!!!\n",
      "Tensor(\"Mean:0\", shape=(), dtype=float32)\n",
      "!!!!!!!!self.loss!!!!!!!!!!\n"
     ]
    }
   ],
   "source": [
    "model = Model(learning_rate=FLAGS.learning_rate, batch_size=1, num_steps=1)\n",
    "model.build()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2019-02-11 11:09:45,196 - DEBUG - <ipython-input-3-44db384e2baf>:7 - Initialized\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Restoring parameters from ./rnn_log/model.ckpt-26470\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2019-02-11 11:09:45,205 - INFO - tf_logging.py:115 - Restoring parameters from ./rnn_log/model.ckpt-26470\n",
      "2019-02-11 11:09:45,258 - DEBUG - <ipython-input-3-44db384e2baf>:12 - restore from [./rnn_log/model.ckpt-26470]\n",
      "2019-02-11 11:09:45,638 - DEBUG - <ipython-input-3-44db384e2baf>:46 - ==============[浣溪沙]==============\n",
      "2019-02-11 11:09:45,639 - DEBUG - <ipython-input-3-44db384e2baf>:47 - 浣溪沙寿）\n",
      "\n",
      "一年一里，一年春，一人一处。一年一处，一年风，一人一处。\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "2019-02-11 11:09:46,006 - DEBUG - <ipython-input-3-44db384e2baf>:46 - ==============[水调歌头]==============\n",
      "2019-02-11 11:09:46,007 - DEBUG - <ipython-input-3-44db384e2baf>:47 - 水调歌头一年一里，一年一里，一年风。\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n"
     ]
    }
   ],
   "source": [
    "with tf.Session() as sess:\n",
    "    summary_string_writer = tf.summary.FileWriter(FLAGS.output_dir, sess.graph)\n",
    "\n",
    "    saver = tf.train.Saver(max_to_keep=5)\n",
    "    sess.run(tf.global_variables_initializer())\n",
    "    sess.run(tf.local_variables_initializer())\n",
    "    logging.debug('Initialized')\n",
    "\n",
    "    try:\n",
    "        checkpoint_path = tf.train.latest_checkpoint(FLAGS.output_dir)\n",
    "        saver.restore(sess, checkpoint_path)\n",
    "        logging.debug('restore from [{0}]'.format(checkpoint_path))\n",
    "\n",
    "    except Exception:\n",
    "        logging.debug('no check point found....')\n",
    "        exit(0)\n",
    "\n",
    "    for title in titles:\n",
    "        state = sess.run(model.state_tensor)\n",
    "        # feed title\n",
    "        for head in title:\n",
    "            input = utils.index_data(np.array([[head]]), dictionary)\n",
    "            feed_dict = {model.X: input,\n",
    "                         model.state_tensor: state,\n",
    "                         model.keep_prob: 1.0}\n",
    "\n",
    "            pred, state = sess.run(\n",
    "                [model.predictions, model.outputs_state_tensor], feed_dict=feed_dict)\n",
    "\n",
    "        sentence = title\n",
    "        word_index = pred[0].argsort()[-1]\n",
    "\n",
    "        # generate sample\n",
    "        for i in range(64):\n",
    "            feed_dict = {model.X: [[word_index]],\n",
    "                         model.state_tensor: state,\n",
    "                         model.keep_prob: 1.0}\n",
    "\n",
    "            pred, state = sess.run(\n",
    "                [model.predictions, model.outputs_state_tensor], feed_dict=feed_dict)\n",
    "\n",
    "            word_index = pred[0].argsort()[-1]\n",
    "            word = np.take(reverse_list, word_index)\n",
    "            sentence = sentence + word\n",
    "\n",
    "        logging.debug('==============[{0}]=============='.format(title))\n",
    "        logging.debug(sentence)\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.7"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
