{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Stacking LSTM Layers\n",
    "-----------------\n",
    "Here we implement an LSTM model on all a data set of Shakespeare works. We will stack multiple LSTM models for a more accurate representation of Shakespearean language.  We will also use characters instead of words."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import os\n",
    "import re\n",
    "import string\n",
    "import requests\n",
    "import numpy as np\n",
    "import collections\n",
    "import random\n",
    "import pickle\n",
    "import matplotlib.pyplot as plt\n",
    "import tensorflow as tf\n",
    "from tensorflow.python.framework import ops\n",
    "ops.reset_default_graph()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Start a computational graph session."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "sess = tf.Session()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Set RNN Parameters"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "num_layers = 3 # Number of RNN layers stacked\n",
    "min_word_freq = 5 # Trim the less frequent words off\n",
    "rnn_size = 128 # RNN Model size, has to equal embedding size\n",
    "epochs = 10 # Number of epochs to cycle through data\n",
    "batch_size = 100 # Train on this many examples at once\n",
    "learning_rate = 0.0005 # Learning rate\n",
    "training_seq_len = 50 # how long of a word group to consider \n",
    "save_every = 500 # How often to save model checkpoints\n",
    "eval_every = 50 # How often to evaluate the test sentences\n",
    "prime_texts = ['thou art more', 'to be or not to', 'wherefore art thou']"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Download/store Shakespeare data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "data_dir = 'temp'\n",
    "data_file = 'shakespeare.txt'\n",
    "model_path = 'shakespeare_model'\n",
    "full_model_dir = os.path.join(data_dir, model_path)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Declare the punctuation and then create the model and data directories"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# Declare punctuation to remove, everything except hyphens and apostrophes\n",
    "punctuation = string.punctuation\n",
    "punctuation = ''.join([x for x in punctuation if x not in ['-', \"'\"]])\n",
    "\n",
    "# Make Model Directory\n",
    "if not os.path.exists(full_model_dir):\n",
    "    os.makedirs(full_model_dir)\n",
    "\n",
    "# Make data directory\n",
    "if not os.path.exists(data_dir):\n",
    "    os.makedirs(data_dir)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Load the Shakespeare Data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Loading Shakespeare Data\n",
      "Not found, downloading Shakespeare texts from www.gutenberg.org\n",
      "Done Loading Data.\n"
     ]
    }
   ],
   "source": [
    "print('Loading Shakespeare Data')\n",
    "# Check if file is downloaded.\n",
    "if not os.path.isfile(os.path.join(data_dir, data_file)):\n",
    "    print('Not found, downloading Shakespeare texts from www.gutenberg.org')\n",
    "    shakespeare_url = 'http://www.gutenberg.org/cache/epub/100/pg100.txt'\n",
    "    # Get Shakespeare text\n",
    "    response = requests.get(shakespeare_url)\n",
    "    shakespeare_file = response.content\n",
    "    # Decode binary into string\n",
    "    s_text = shakespeare_file.decode('utf-8')\n",
    "    # Drop first few descriptive paragraphs.\n",
    "    s_text = s_text[7675:]\n",
    "    # Remove newlines\n",
    "    s_text = s_text.replace('\\r\\n', '')\n",
    "    s_text = s_text.replace('\\n', '')\n",
    "    \n",
    "    # Write to file\n",
    "    with open(os.path.join(data_dir, data_file), 'w') as out_conn:\n",
    "        out_conn.write(s_text)\n",
    "else:\n",
    "    # If file has been saved, load from that file\n",
    "    with open(os.path.join(data_dir, data_file), 'r') as file_conn:\n",
    "        s_text = file_conn.read().replace('\\n', '')\n",
    "\n",
    "print('Done Loading Data.')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Clean and split the text data."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Cleaning Text\n"
     ]
    }
   ],
   "source": [
    "# Clean text\n",
    "print('Cleaning Text')\n",
    "s_text = re.sub(r'[{}]'.format(punctuation), ' ', s_text)\n",
    "s_text = re.sub('\\s+', ' ', s_text ).strip().lower()\n",
    "\n",
    "# Split up by characters\n",
    "char_list = list(s_text)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Build word vocabulary function and transform the text."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Building Shakespeare Vocab by Characters\n",
      "Vocabulary Length = 40\n"
     ]
    }
   ],
   "source": [
    "def build_vocab(characters):\n",
    "    character_counts = collections.Counter(characters)\n",
    "    # Create vocab --> index mapping\n",
    "    chars = character_counts.keys()\n",
    "    vocab_to_ix_dict = {key:(ix+1) for ix, key in enumerate(chars)}\n",
    "    # Add unknown key --> 0 index\n",
    "    vocab_to_ix_dict['unknown']=0\n",
    "    # Create index --> vocab mapping\n",
    "    ix_to_vocab_dict = {val:key for key,val in vocab_to_ix_dict.items()}\n",
    "    return(ix_to_vocab_dict, vocab_to_ix_dict)\n",
    "\n",
    "# Build Shakespeare vocabulary\n",
    "print('Building Shakespeare Vocab by Characters')\n",
    "ix2vocab, vocab2ix = build_vocab(char_list)\n",
    "vocab_size = len(ix2vocab)\n",
    "print('Vocabulary Length = {}'.format(vocab_size))\n",
    "# Sanity Check\n",
    "assert(len(ix2vocab) == len(vocab2ix))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Convert text to word vectors"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "s_text_ix = []\n",
    "for x in char_list:\n",
    "    try:\n",
    "        s_text_ix.append(vocab2ix[x])\n",
    "    except:\n",
    "        s_text_ix.append(0)\n",
    "s_text_ix = np.array(s_text_ix)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Define LSTM RNN Model Class"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class LSTM_Model():\n",
    "    def __init__(self, rnn_size, num_layers, batch_size, learning_rate,\n",
    "                 training_seq_len, vocab_size, infer_sample=False):\n",
    "        self.rnn_size = rnn_size\n",
    "        self.num_layers = num_layers\n",
    "        self.vocab_size = vocab_size\n",
    "        self.infer_sample = infer_sample\n",
    "        self.learning_rate = learning_rate\n",
    "        \n",
    "        if infer_sample:\n",
    "            self.batch_size = 1\n",
    "            self.training_seq_len = 1\n",
    "        else:\n",
    "            self.batch_size = batch_size\n",
    "            self.training_seq_len = training_seq_len\n",
    "        \n",
    "        self.lstm_cell = tf.contrib.rnn.BasicLSTMCell(rnn_size)\n",
    "        self.lstm_cell = tf.contrib.rnn.MultiRNNCell([self.lstm_cell for _ in range(self.num_layers)])\n",
    "        self.initial_state = self.lstm_cell.zero_state(self.batch_size, tf.float32)\n",
    "        \n",
    "        self.x_data = tf.placeholder(tf.int32, [self.batch_size, self.training_seq_len])\n",
    "        self.y_output = tf.placeholder(tf.int32, [self.batch_size, self.training_seq_len])\n",
    "        \n",
    "        with tf.variable_scope('lstm_vars'):\n",
    "            # Softmax Output Weights\n",
    "            W = tf.get_variable('W', [self.rnn_size, self.vocab_size], tf.float32, tf.random_normal_initializer())\n",
    "            b = tf.get_variable('b', [self.vocab_size], tf.float32, tf.constant_initializer(0.0))\n",
    "        \n",
    "            # Define Embedding\n",
    "            embedding_mat = tf.get_variable('embedding_mat', [self.vocab_size, self.rnn_size],\n",
    "                                            tf.float32, tf.random_normal_initializer())\n",
    "                                            \n",
    "            embedding_output = tf.nn.embedding_lookup(embedding_mat, self.x_data)\n",
    "            rnn_inputs = tf.split(axis=1, num_or_size_splits=self.training_seq_len, value=embedding_output)\n",
    "            rnn_inputs_trimmed = [tf.squeeze(x, [1]) for x in rnn_inputs]\n",
    "        \n",
    "        decoder = tf.contrib.legacy_seq2seq.rnn_decoder\n",
    "        outputs, last_state = decoder(rnn_inputs_trimmed,\n",
    "                                      self.initial_state,\n",
    "                                      self.lstm_cell)\n",
    "        \n",
    "        # RNN outputs\n",
    "        output = tf.reshape(tf.concat(axis=1, values=outputs), [-1, rnn_size])\n",
    "        # Logits and output\n",
    "        self.logit_output = tf.matmul(output, W) + b\n",
    "        self.model_output = tf.nn.softmax(self.logit_output)\n",
    "        \n",
    "        loss_fun = tf.contrib.legacy_seq2seq.sequence_loss_by_example\n",
    "        loss = loss_fun([self.logit_output],[tf.reshape(self.y_output, [-1])],\n",
    "                [tf.ones([self.batch_size * self.training_seq_len])],\n",
    "                self.vocab_size)\n",
    "        self.cost = tf.reduce_sum(loss) / (self.batch_size * self.training_seq_len)\n",
    "        self.final_state = last_state\n",
    "        gradients, _ = tf.clip_by_global_norm(tf.gradients(self.cost, tf.trainable_variables()), 4.5)\n",
    "        optimizer = tf.train.AdamOptimizer(self.learning_rate)\n",
    "        self.train_op = optimizer.apply_gradients(zip(gradients, tf.trainable_variables()))\n",
    "        \n",
    "    def sample(self, sess, words=ix2vocab, vocab=vocab2ix, num=20, prime_text='thou art'):\n",
    "        state = sess.run(self.lstm_cell.zero_state(1, tf.float32))\n",
    "        char_list = list(prime_text)\n",
    "        for char in char_list[:-1]:\n",
    "            x = np.zeros((1, 1))\n",
    "            x[0, 0] = vocab[char]\n",
    "            feed_dict = {self.x_data: x, self.initial_state:state}\n",
    "            [state] = sess.run([self.final_state], feed_dict=feed_dict)\n",
    "\n",
    "        out_sentence = prime_text\n",
    "        char = char_list[-1]\n",
    "        for n in range(num):\n",
    "            x = np.zeros((1, 1))\n",
    "            x[0, 0] = vocab[char]\n",
    "            feed_dict = {self.x_data: x, self.initial_state:state}\n",
    "            [model_output, state] = sess.run([self.model_output, self.final_state], feed_dict=feed_dict)\n",
    "            sample = np.argmax(model_output[0])\n",
    "            if sample == 0:\n",
    "                break\n",
    "            char = words[sample]\n",
    "            out_sentence = out_sentence + char\n",
    "        return(out_sentence)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Initialize the LSTM Model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "lstm_model = LSTM_Model(rnn_size, num_layers, batch_size, learning_rate,\n",
    "                        training_seq_len, vocab_size)\n",
    "\n",
    "# Tell TensorFlow we are reusing the scope for the testing\n",
    "with tf.variable_scope(tf.get_variable_scope(), reuse=True):\n",
    "    test_lstm_model = LSTM_Model(rnn_size,num_layers, batch_size, learning_rate,\n",
    "                                 training_seq_len, vocab_size, infer_sample=True)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Create model saver"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "saver = tf.train.Saver(tf.global_variables())"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Create batches for each epoch"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "num_batches = int(len(s_text_ix)/(batch_size * training_seq_len)) + 1\n",
    "# Split up text indices into subarrays, of equal size\n",
    "batches = np.array_split(s_text_ix, num_batches)\n",
    "# Reshape each split into [batch_size, training_seq_len]\n",
    "batches = [np.resize(x, [batch_size, training_seq_len]) for x in batches]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Initialize all variables and train the model!"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "collapsed": false,
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Starting Epoch #1 of 10.\n",
      "Iteration: 10, Epoch: 1, Batch: 10 out of 950, Loss: 3.06\n",
      "Iteration: 20, Epoch: 1, Batch: 20 out of 950, Loss: 2.95\n",
      "Iteration: 30, Epoch: 1, Batch: 30 out of 950, Loss: 2.88\n",
      "Iteration: 40, Epoch: 1, Batch: 40 out of 950, Loss: 2.87\n",
      "Iteration: 50, Epoch: 1, Batch: 50 out of 950, Loss: 2.85\n",
      "thou art more    ee o o\n",
      "to be or not to   ee  e o\n",
      "wherefore art thou   ee  oo \n",
      "Iteration: 60, Epoch: 1, Batch: 60 out of 950, Loss: 2.84\n",
      "Iteration: 70, Epoch: 1, Batch: 70 out of 950, Loss: 2.84\n",
      "Iteration: 80, Epoch: 1, Batch: 80 out of 950, Loss: 2.83\n",
      "Iteration: 90, Epoch: 1, Batch: 90 out of 950, Loss: 2.79\n",
      "Iteration: 100, Epoch: 1, Batch: 100 out of 950, Loss: 2.79\n",
      "thou art more  teo o oo\n",
      "to be or not to e hee o o\n",
      "wherefore art thou  eoo o oo\n",
      "Iteration: 110, Epoch: 1, Batch: 110 out of 950, Loss: 2.77\n",
      "Iteration: 120, Epoch: 1, Batch: 120 out of 950, Loss: 2.77\n",
      "Iteration: 130, Epoch: 1, Batch: 130 out of 950, Loss: 2.73\n",
      "Iteration: 140, Epoch: 1, Batch: 140 out of 950, Loss: 2.76\n",
      "Iteration: 150, Epoch: 1, Batch: 150 out of 950, Loss: 2.70\n",
      "thou art more te oo o o\n",
      "to be or not to oe or oe \n",
      "wherefore art thou e oo or o\n",
      "Iteration: 160, Epoch: 1, Batch: 160 out of 950, Loss: 2.69\n",
      "Iteration: 170, Epoch: 1, Batch: 170 out of 950, Loss: 2.68\n",
      "Iteration: 180, Epoch: 1, Batch: 180 out of 950, Loss: 2.63\n",
      "Iteration: 190, Epoch: 1, Batch: 190 out of 950, Loss: 2.62\n",
      "Iteration: 200, Epoch: 1, Batch: 200 out of 950, Loss: 2.63\n",
      "thou art more the ae th\n",
      "to be or not to he al the\n",
      "wherefore art thou the ao th\n",
      "Iteration: 210, Epoch: 1, Batch: 210 out of 950, Loss: 2.53\n",
      "Iteration: 220, Epoch: 1, Batch: 220 out of 950, Loss: 2.59\n",
      "Iteration: 230, Epoch: 1, Batch: 230 out of 950, Loss: 2.56\n",
      "Iteration: 240, Epoch: 1, Batch: 240 out of 950, Loss: 2.39\n",
      "Iteration: 250, Epoch: 1, Batch: 250 out of 950, Loss: 2.43\n",
      "thou art more the the t\n",
      "to be or not to the the t\n",
      "wherefore art thou the the t\n",
      "Iteration: 260, Epoch: 1, Batch: 260 out of 950, Loss: 2.44\n",
      "Iteration: 270, Epoch: 1, Batch: 270 out of 950, Loss: 2.31\n",
      "Iteration: 280, Epoch: 1, Batch: 280 out of 950, Loss: 2.35\n",
      "Iteration: 290, Epoch: 1, Batch: 290 out of 950, Loss: 2.39\n",
      "Iteration: 300, Epoch: 1, Batch: 300 out of 950, Loss: 2.38\n",
      "thou art more the the t\n",
      "to be or not to the the t\n",
      "wherefore art thou the the t\n",
      "Iteration: 310, Epoch: 1, Batch: 310 out of 950, Loss: 2.31\n",
      "Iteration: 320, Epoch: 1, Batch: 320 out of 950, Loss: 2.23\n",
      "Iteration: 330, Epoch: 1, Batch: 330 out of 950, Loss: 2.27\n",
      "Iteration: 340, Epoch: 1, Batch: 340 out of 950, Loss: 2.26\n",
      "Iteration: 350, Epoch: 1, Batch: 350 out of 950, Loss: 2.27\n",
      "thou art more the the t\n",
      "to be or not to the the t\n",
      "wherefore art thou the the t\n",
      "Iteration: 360, Epoch: 1, Batch: 360 out of 950, Loss: 2.25\n",
      "Iteration: 370, Epoch: 1, Batch: 370 out of 950, Loss: 2.32\n",
      "Iteration: 380, Epoch: 1, Batch: 380 out of 950, Loss: 2.26\n",
      "Iteration: 390, Epoch: 1, Batch: 390 out of 950, Loss: 2.26\n",
      "Iteration: 400, Epoch: 1, Batch: 400 out of 950, Loss: 2.28\n",
      "thou art more the the t\n",
      "to be or not to the the t\n",
      "wherefore art thou the the t\n",
      "Iteration: 410, Epoch: 1, Batch: 410 out of 950, Loss: 2.23\n",
      "Iteration: 420, Epoch: 1, Batch: 420 out of 950, Loss: 2.26\n",
      "Iteration: 430, Epoch: 1, Batch: 430 out of 950, Loss: 2.24\n",
      "Iteration: 440, Epoch: 1, Batch: 440 out of 950, Loss: 2.39\n",
      "Iteration: 450, Epoch: 1, Batch: 450 out of 950, Loss: 2.21\n",
      "thou art more the the t\n",
      "to be or not to me the th\n",
      "wherefore art thou he the th\n",
      "Iteration: 460, Epoch: 1, Batch: 460 out of 950, Loss: 2.25\n",
      "Iteration: 470, Epoch: 1, Batch: 470 out of 950, Loss: 2.22\n",
      "Iteration: 480, Epoch: 1, Batch: 480 out of 950, Loss: 2.15\n",
      "Iteration: 490, Epoch: 1, Batch: 490 out of 950, Loss: 2.14\n",
      "Iteration: 500, Epoch: 1, Batch: 500 out of 950, Loss: 2.16\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the the t\n",
      "to be or not to the the t\n",
      "wherefore art thou hat the t\n",
      "Iteration: 510, Epoch: 1, Batch: 510 out of 950, Loss: 2.24\n",
      "Iteration: 520, Epoch: 1, Batch: 520 out of 950, Loss: 2.16\n",
      "Iteration: 530, Epoch: 1, Batch: 530 out of 950, Loss: 2.15\n",
      "Iteration: 540, Epoch: 1, Batch: 540 out of 950, Loss: 2.19\n",
      "Iteration: 550, Epoch: 1, Batch: 550 out of 950, Loss: 2.14\n",
      "thou art more the the t\n",
      "to be or not to the the t\n",
      "wherefore art thou the the t\n",
      "Iteration: 560, Epoch: 1, Batch: 560 out of 950, Loss: 2.12\n",
      "Iteration: 570, Epoch: 1, Batch: 570 out of 950, Loss: 2.12\n",
      "Iteration: 580, Epoch: 1, Batch: 580 out of 950, Loss: 2.17\n",
      "Iteration: 590, Epoch: 1, Batch: 590 out of 950, Loss: 2.11\n",
      "Iteration: 600, Epoch: 1, Batch: 600 out of 950, Loss: 2.25\n",
      "thou art more the the t\n",
      "to be or not to me the th\n",
      "wherefore art thou the the t\n",
      "Iteration: 610, Epoch: 1, Batch: 610 out of 950, Loss: 2.07\n",
      "Iteration: 620, Epoch: 1, Batch: 620 out of 950, Loss: 2.09\n",
      "Iteration: 630, Epoch: 1, Batch: 630 out of 950, Loss: 2.18\n",
      "Iteration: 640, Epoch: 1, Batch: 640 out of 950, Loss: 2.13\n",
      "Iteration: 650, Epoch: 1, Batch: 650 out of 950, Loss: 2.10\n",
      "thou art more the the t\n",
      "to be or not to my the th\n",
      "wherefore art thou the the t\n",
      "Iteration: 660, Epoch: 1, Batch: 660 out of 950, Loss: 2.11\n",
      "Iteration: 670, Epoch: 1, Batch: 670 out of 950, Loss: 2.09\n",
      "Iteration: 680, Epoch: 1, Batch: 680 out of 950, Loss: 2.08\n",
      "Iteration: 690, Epoch: 1, Batch: 690 out of 950, Loss: 2.09\n",
      "Iteration: 700, Epoch: 1, Batch: 700 out of 950, Loss: 2.20\n",
      "thou art more the the t\n",
      "to be or not to my the th\n",
      "wherefore art thou the the t\n",
      "Iteration: 710, Epoch: 1, Batch: 710 out of 950, Loss: 2.08\n",
      "Iteration: 720, Epoch: 1, Batch: 720 out of 950, Loss: 2.16\n",
      "Iteration: 730, Epoch: 1, Batch: 730 out of 950, Loss: 2.06\n",
      "Iteration: 740, Epoch: 1, Batch: 740 out of 950, Loss: 2.07\n",
      "Iteration: 750, Epoch: 1, Batch: 750 out of 950, Loss: 2.05\n",
      "thou art more the the s\n",
      "to be or not to man the t\n",
      "wherefore art thou shat shat\n",
      "Iteration: 760, Epoch: 1, Batch: 760 out of 950, Loss: 2.09\n",
      "Iteration: 770, Epoch: 1, Batch: 770 out of 950, Loss: 2.04\n",
      "Iteration: 780, Epoch: 1, Batch: 780 out of 950, Loss: 1.99\n",
      "Iteration: 790, Epoch: 1, Batch: 790 out of 950, Loss: 2.15\n",
      "Iteration: 800, Epoch: 1, Batch: 800 out of 950, Loss: 2.06\n",
      "thou art more the the t\n",
      "to be or not to the the t\n",
      "wherefore art thou the the t\n",
      "Iteration: 810, Epoch: 1, Batch: 810 out of 950, Loss: 2.04\n",
      "Iteration: 820, Epoch: 1, Batch: 820 out of 950, Loss: 2.01\n",
      "Iteration: 830, Epoch: 1, Batch: 830 out of 950, Loss: 2.04\n",
      "Iteration: 840, Epoch: 1, Batch: 840 out of 950, Loss: 2.04\n",
      "Iteration: 850, Epoch: 1, Batch: 850 out of 950, Loss: 2.04\n",
      "thou art more the the t\n",
      "to be or not to the the t\n",
      "wherefore art thou the the t\n",
      "Iteration: 860, Epoch: 1, Batch: 860 out of 950, Loss: 2.04\n",
      "Iteration: 870, Epoch: 1, Batch: 870 out of 950, Loss: 1.98\n",
      "Iteration: 880, Epoch: 1, Batch: 880 out of 950, Loss: 2.04\n",
      "Iteration: 890, Epoch: 1, Batch: 890 out of 950, Loss: 2.04\n",
      "Iteration: 900, Epoch: 1, Batch: 900 out of 950, Loss: 1.98\n",
      "thou art more the the t\n",
      "to be or not to the the t\n",
      "wherefore art thou the the t\n",
      "Iteration: 910, Epoch: 1, Batch: 910 out of 950, Loss: 2.04\n",
      "Iteration: 920, Epoch: 1, Batch: 920 out of 950, Loss: 2.00\n",
      "Iteration: 930, Epoch: 1, Batch: 930 out of 950, Loss: 2.03\n",
      "Iteration: 940, Epoch: 1, Batch: 940 out of 950, Loss: 2.03\n",
      "Starting Epoch #2 of 10.\n",
      "Iteration: 950, Epoch: 2, Batch: 1 out of 950, Loss: 2.08\n",
      "thou art more the so ma\n",
      "to be or not to man and t\n",
      "wherefore art thou hat shall\n",
      "Iteration: 960, Epoch: 2, Batch: 11 out of 950, Loss: 1.94\n",
      "Iteration: 970, Epoch: 2, Batch: 21 out of 950, Loss: 2.03\n",
      "Iteration: 980, Epoch: 2, Batch: 31 out of 950, Loss: 1.96\n",
      "Iteration: 990, Epoch: 2, Batch: 41 out of 950, Loss: 1.98\n",
      "Iteration: 1000, Epoch: 2, Batch: 51 out of 950, Loss: 2.06\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the man t\n",
      "to be or not to man the m\n",
      "wherefore art thou have the \n",
      "Iteration: 1010, Epoch: 2, Batch: 61 out of 950, Loss: 2.05\n",
      "Iteration: 1020, Epoch: 2, Batch: 71 out of 950, Loss: 2.01\n",
      "Iteration: 1030, Epoch: 2, Batch: 81 out of 950, Loss: 1.98\n",
      "Iteration: 1040, Epoch: 2, Batch: 91 out of 950, Loss: 1.98\n",
      "Iteration: 1050, Epoch: 2, Batch: 101 out of 950, Loss: 1.94\n",
      "thou art more the the t\n",
      "to be or not to the the s\n",
      "wherefore art thou her the t\n",
      "Iteration: 1060, Epoch: 2, Batch: 111 out of 950, Loss: 2.02\n",
      "Iteration: 1070, Epoch: 2, Batch: 121 out of 950, Loss: 2.15\n",
      "Iteration: 1080, Epoch: 2, Batch: 131 out of 950, Loss: 2.02\n",
      "Iteration: 1090, Epoch: 2, Batch: 141 out of 950, Loss: 1.95\n",
      "Iteration: 1100, Epoch: 2, Batch: 151 out of 950, Loss: 1.92\n",
      "thou art more the the t\n",
      "to be or not to the the t\n",
      "wherefore art thou the the t\n",
      "Iteration: 1110, Epoch: 2, Batch: 161 out of 950, Loss: 1.99\n",
      "Iteration: 1120, Epoch: 2, Batch: 171 out of 950, Loss: 2.07\n",
      "Iteration: 1130, Epoch: 2, Batch: 181 out of 950, Loss: 1.98\n",
      "Iteration: 1140, Epoch: 2, Batch: 191 out of 950, Loss: 2.00\n",
      "Iteration: 1150, Epoch: 2, Batch: 201 out of 950, Loss: 1.96\n",
      "thou art more the seent\n",
      "to be or not to the seent\n",
      "wherefore art thou so the se\n",
      "Iteration: 1160, Epoch: 2, Batch: 211 out of 950, Loss: 1.95\n",
      "Iteration: 1170, Epoch: 2, Batch: 221 out of 950, Loss: 1.97\n",
      "Iteration: 1180, Epoch: 2, Batch: 231 out of 950, Loss: 2.04\n",
      "Iteration: 1190, Epoch: 2, Batch: 241 out of 950, Loss: 1.99\n",
      "Iteration: 1200, Epoch: 2, Batch: 251 out of 950, Loss: 1.95\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou the see t\n",
      "Iteration: 1210, Epoch: 2, Batch: 261 out of 950, Loss: 1.95\n",
      "Iteration: 1220, Epoch: 2, Batch: 271 out of 950, Loss: 1.98\n",
      "Iteration: 1230, Epoch: 2, Batch: 281 out of 950, Loss: 2.00\n",
      "Iteration: 1240, Epoch: 2, Batch: 291 out of 950, Loss: 1.96\n",
      "Iteration: 1250, Epoch: 2, Batch: 301 out of 950, Loss: 1.93\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou hath the \n",
      "Iteration: 1260, Epoch: 2, Batch: 311 out of 950, Loss: 2.06\n",
      "Iteration: 1270, Epoch: 2, Batch: 321 out of 950, Loss: 1.91\n",
      "Iteration: 1280, Epoch: 2, Batch: 331 out of 950, Loss: 1.93\n",
      "Iteration: 1290, Epoch: 2, Batch: 341 out of 950, Loss: 2.03\n",
      "Iteration: 1300, Epoch: 2, Batch: 351 out of 950, Loss: 1.92\n",
      "thou art more the sall \n",
      "to be or not to the sall \n",
      "wherefore art thou have and \n",
      "Iteration: 1310, Epoch: 2, Batch: 361 out of 950, Loss: 1.94\n",
      "Iteration: 1320, Epoch: 2, Batch: 371 out of 950, Loss: 1.90\n",
      "Iteration: 1330, Epoch: 2, Batch: 381 out of 950, Loss: 1.90\n",
      "Iteration: 1340, Epoch: 2, Batch: 391 out of 950, Loss: 1.97\n",
      "Iteration: 1350, Epoch: 2, Batch: 401 out of 950, Loss: 1.93\n",
      "thou art more the so th\n",
      "to be or not to the so th\n",
      "wherefore art thou have the \n",
      "Iteration: 1360, Epoch: 2, Batch: 411 out of 950, Loss: 1.96\n",
      "Iteration: 1370, Epoch: 2, Batch: 421 out of 950, Loss: 1.90\n",
      "Iteration: 1380, Epoch: 2, Batch: 431 out of 950, Loss: 1.96\n",
      "Iteration: 1390, Epoch: 2, Batch: 441 out of 950, Loss: 1.97\n",
      "Iteration: 1400, Epoch: 2, Batch: 451 out of 950, Loss: 2.03\n",
      "thou art more the see h\n",
      "to be or not to the see h\n",
      "wherefore art thou have the \n",
      "Iteration: 1410, Epoch: 2, Batch: 461 out of 950, Loss: 1.97\n",
      "Iteration: 1420, Epoch: 2, Batch: 471 out of 950, Loss: 1.91\n",
      "Iteration: 1430, Epoch: 2, Batch: 481 out of 950, Loss: 1.96\n",
      "Iteration: 1440, Epoch: 2, Batch: 491 out of 950, Loss: 1.94\n",
      "Iteration: 1450, Epoch: 2, Batch: 501 out of 950, Loss: 1.90\n",
      "thou art more the see a\n",
      "to be or not to the see t\n",
      "wherefore art thou have the \n",
      "Iteration: 1460, Epoch: 2, Batch: 511 out of 950, Loss: 1.91\n",
      "Iteration: 1470, Epoch: 2, Batch: 521 out of 950, Loss: 1.90\n",
      "Iteration: 1480, Epoch: 2, Batch: 531 out of 950, Loss: 1.90\n",
      "Iteration: 1490, Epoch: 2, Batch: 541 out of 950, Loss: 1.91\n",
      "Iteration: 1500, Epoch: 2, Batch: 551 out of 950, Loss: 1.93\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou have the \n",
      "Iteration: 1510, Epoch: 2, Batch: 561 out of 950, Loss: 1.93\n",
      "Iteration: 1520, Epoch: 2, Batch: 571 out of 950, Loss: 1.90\n",
      "Iteration: 1530, Epoch: 2, Batch: 581 out of 950, Loss: 1.91\n",
      "Iteration: 1540, Epoch: 2, Batch: 591 out of 950, Loss: 1.97\n",
      "Iteration: 1550, Epoch: 2, Batch: 601 out of 950, Loss: 1.97\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou shall the\n",
      "Iteration: 1560, Epoch: 2, Batch: 611 out of 950, Loss: 1.93\n",
      "Iteration: 1570, Epoch: 2, Batch: 621 out of 950, Loss: 1.87\n",
      "Iteration: 1580, Epoch: 2, Batch: 631 out of 950, Loss: 1.89\n",
      "Iteration: 1590, Epoch: 2, Batch: 641 out of 950, Loss: 1.84\n",
      "Iteration: 1600, Epoch: 2, Batch: 651 out of 950, Loss: 1.96\n",
      "thou art more the see t\n",
      "to be or not to the shall\n",
      "wherefore art thou have and \n",
      "Iteration: 1610, Epoch: 2, Batch: 661 out of 950, Loss: 1.88\n",
      "Iteration: 1620, Epoch: 2, Batch: 671 out of 950, Loss: 2.08\n",
      "Iteration: 1630, Epoch: 2, Batch: 681 out of 950, Loss: 1.89\n",
      "Iteration: 1640, Epoch: 2, Batch: 691 out of 950, Loss: 1.88\n",
      "Iteration: 1650, Epoch: 2, Batch: 701 out of 950, Loss: 1.89\n",
      "thou art more the so th\n",
      "to be or not to the so th\n",
      "wherefore art thou have the \n",
      "Iteration: 1660, Epoch: 2, Batch: 711 out of 950, Loss: 1.88\n",
      "Iteration: 1670, Epoch: 2, Batch: 721 out of 950, Loss: 1.90\n",
      "Iteration: 1680, Epoch: 2, Batch: 731 out of 950, Loss: 1.90\n",
      "Iteration: 1690, Epoch: 2, Batch: 741 out of 950, Loss: 1.86\n",
      "Iteration: 1700, Epoch: 2, Batch: 751 out of 950, Loss: 1.83\n",
      "thou art more the see t\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 1710, Epoch: 2, Batch: 761 out of 950, Loss: 1.90\n",
      "Iteration: 1720, Epoch: 2, Batch: 771 out of 950, Loss: 1.92\n",
      "Iteration: 1730, Epoch: 2, Batch: 781 out of 950, Loss: 1.88\n",
      "Iteration: 1740, Epoch: 2, Batch: 791 out of 950, Loss: 1.93\n",
      "Iteration: 1750, Epoch: 2, Batch: 801 out of 950, Loss: 1.94\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have and \n",
      "Iteration: 1760, Epoch: 2, Batch: 811 out of 950, Loss: 1.92\n",
      "Iteration: 1770, Epoch: 2, Batch: 821 out of 950, Loss: 1.87\n",
      "Iteration: 1780, Epoch: 2, Batch: 831 out of 950, Loss: 1.88\n",
      "Iteration: 1790, Epoch: 2, Batch: 841 out of 950, Loss: 1.93\n",
      "Iteration: 1800, Epoch: 2, Batch: 851 out of 950, Loss: 1.83\n",
      "thou art more the so th\n",
      "to be or not to the so th\n",
      "wherefore art thou the so th\n",
      "Iteration: 1810, Epoch: 2, Batch: 861 out of 950, Loss: 1.88\n",
      "Iteration: 1820, Epoch: 2, Batch: 871 out of 950, Loss: 1.86\n",
      "Iteration: 1830, Epoch: 2, Batch: 881 out of 950, Loss: 1.97\n",
      "Iteration: 1840, Epoch: 2, Batch: 891 out of 950, Loss: 1.87\n",
      "Iteration: 1850, Epoch: 2, Batch: 901 out of 950, Loss: 1.87\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou have the \n",
      "Iteration: 1860, Epoch: 2, Batch: 911 out of 950, Loss: 1.84\n",
      "Iteration: 1870, Epoch: 2, Batch: 921 out of 950, Loss: 1.88\n",
      "Iteration: 1880, Epoch: 2, Batch: 931 out of 950, Loss: 2.05\n",
      "Iteration: 1890, Epoch: 2, Batch: 941 out of 950, Loss: 1.88\n",
      "Starting Epoch #3 of 10.\n",
      "Iteration: 1900, Epoch: 3, Batch: 2 out of 950, Loss: 1.88\n",
      "thou art more the see h\n",
      "to be or not to the see h\n",
      "wherefore art thou have the \n",
      "Iteration: 1910, Epoch: 3, Batch: 12 out of 950, Loss: 1.88\n",
      "Iteration: 1920, Epoch: 3, Batch: 22 out of 950, Loss: 1.89\n",
      "Iteration: 1930, Epoch: 3, Batch: 32 out of 950, Loss: 1.86\n",
      "Iteration: 1940, Epoch: 3, Batch: 42 out of 950, Loss: 1.82\n",
      "Iteration: 1950, Epoch: 3, Batch: 52 out of 950, Loss: 1.87\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 1960, Epoch: 3, Batch: 62 out of 950, Loss: 1.81\n",
      "Iteration: 1970, Epoch: 3, Batch: 72 out of 950, Loss: 1.90\n",
      "Iteration: 1980, Epoch: 3, Batch: 82 out of 950, Loss: 1.87\n",
      "Iteration: 1990, Epoch: 3, Batch: 92 out of 950, Loss: 1.89\n",
      "Iteration: 2000, Epoch: 3, Batch: 102 out of 950, Loss: 1.96\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou heart the\n",
      "Iteration: 2010, Epoch: 3, Batch: 112 out of 950, Loss: 1.86\n",
      "Iteration: 2020, Epoch: 3, Batch: 122 out of 950, Loss: 1.84\n",
      "Iteration: 2030, Epoch: 3, Batch: 132 out of 950, Loss: 1.83\n",
      "Iteration: 2040, Epoch: 3, Batch: 142 out of 950, Loss: 1.88\n",
      "Iteration: 2050, Epoch: 3, Batch: 152 out of 950, Loss: 1.86\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou have the \n",
      "Iteration: 2060, Epoch: 3, Batch: 162 out of 950, Loss: 1.85\n",
      "Iteration: 2070, Epoch: 3, Batch: 172 out of 950, Loss: 1.89\n",
      "Iteration: 2080, Epoch: 3, Batch: 182 out of 950, Loss: 1.84\n",
      "Iteration: 2090, Epoch: 3, Batch: 192 out of 950, Loss: 1.81\n",
      "Iteration: 2100, Epoch: 3, Batch: 202 out of 950, Loss: 1.89\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 2110, Epoch: 3, Batch: 212 out of 950, Loss: 1.89\n",
      "Iteration: 2120, Epoch: 3, Batch: 222 out of 950, Loss: 1.88\n",
      "Iteration: 2130, Epoch: 3, Batch: 232 out of 950, Loss: 1.82\n",
      "Iteration: 2140, Epoch: 3, Batch: 242 out of 950, Loss: 1.87\n",
      "Iteration: 2150, Epoch: 3, Batch: 252 out of 950, Loss: 1.83\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou have the \n",
      "Iteration: 2160, Epoch: 3, Batch: 262 out of 950, Loss: 1.94\n",
      "Iteration: 2170, Epoch: 3, Batch: 272 out of 950, Loss: 1.85\n",
      "Iteration: 2180, Epoch: 3, Batch: 282 out of 950, Loss: 1.90\n",
      "Iteration: 2190, Epoch: 3, Batch: 292 out of 950, Loss: 1.85\n",
      "Iteration: 2200, Epoch: 3, Batch: 302 out of 950, Loss: 1.82\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou have the \n",
      "Iteration: 2210, Epoch: 3, Batch: 312 out of 950, Loss: 1.83\n",
      "Iteration: 2220, Epoch: 3, Batch: 322 out of 950, Loss: 1.81\n",
      "Iteration: 2230, Epoch: 3, Batch: 332 out of 950, Loss: 1.83\n",
      "Iteration: 2240, Epoch: 3, Batch: 342 out of 950, Loss: 1.93\n",
      "Iteration: 2250, Epoch: 3, Batch: 352 out of 950, Loss: 1.83\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou the stand\n",
      "Iteration: 2260, Epoch: 3, Batch: 362 out of 950, Loss: 1.82\n",
      "Iteration: 2270, Epoch: 3, Batch: 372 out of 950, Loss: 1.87\n",
      "Iteration: 2280, Epoch: 3, Batch: 382 out of 950, Loss: 1.84\n",
      "Iteration: 2290, Epoch: 3, Batch: 392 out of 950, Loss: 1.81\n",
      "Iteration: 2300, Epoch: 3, Batch: 402 out of 950, Loss: 1.83\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou heart the\n",
      "Iteration: 2310, Epoch: 3, Batch: 412 out of 950, Loss: 1.82\n",
      "Iteration: 2320, Epoch: 3, Batch: 422 out of 950, Loss: 1.86\n",
      "Iteration: 2330, Epoch: 3, Batch: 432 out of 950, Loss: 1.81\n",
      "Iteration: 2340, Epoch: 3, Batch: 442 out of 950, Loss: 1.80\n",
      "Iteration: 2350, Epoch: 3, Batch: 452 out of 950, Loss: 1.83\n",
      "thou art more the so th\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 2360, Epoch: 3, Batch: 462 out of 950, Loss: 1.75\n",
      "Iteration: 2370, Epoch: 3, Batch: 472 out of 950, Loss: 1.86\n",
      "Iteration: 2380, Epoch: 3, Batch: 482 out of 950, Loss: 1.81\n",
      "Iteration: 2390, Epoch: 3, Batch: 492 out of 950, Loss: 1.77\n",
      "Iteration: 2400, Epoch: 3, Batch: 502 out of 950, Loss: 1.85\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou see the s\n",
      "Iteration: 2410, Epoch: 3, Batch: 512 out of 950, Loss: 1.76\n",
      "Iteration: 2420, Epoch: 3, Batch: 522 out of 950, Loss: 1.80\n",
      "Iteration: 2430, Epoch: 3, Batch: 532 out of 950, Loss: 1.87\n",
      "Iteration: 2440, Epoch: 3, Batch: 542 out of 950, Loss: 1.82\n",
      "Iteration: 2450, Epoch: 3, Batch: 552 out of 950, Loss: 1.82\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou have the \n",
      "Iteration: 2460, Epoch: 3, Batch: 562 out of 950, Loss: 1.80\n",
      "Iteration: 2470, Epoch: 3, Batch: 572 out of 950, Loss: 1.83\n",
      "Iteration: 2480, Epoch: 3, Batch: 582 out of 950, Loss: 1.77\n",
      "Iteration: 2490, Epoch: 3, Batch: 592 out of 950, Loss: 1.86\n",
      "Iteration: 2500, Epoch: 3, Batch: 602 out of 950, Loss: 1.77\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou the stand\n",
      "Iteration: 2510, Epoch: 3, Batch: 612 out of 950, Loss: 1.76\n",
      "Iteration: 2520, Epoch: 3, Batch: 622 out of 950, Loss: 1.84\n",
      "Iteration: 2530, Epoch: 3, Batch: 632 out of 950, Loss: 1.82\n",
      "Iteration: 2540, Epoch: 3, Batch: 642 out of 950, Loss: 1.82\n",
      "Iteration: 2550, Epoch: 3, Batch: 652 out of 950, Loss: 1.74\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou the see t\n",
      "Iteration: 2560, Epoch: 3, Batch: 662 out of 950, Loss: 1.80\n",
      "Iteration: 2570, Epoch: 3, Batch: 672 out of 950, Loss: 1.79\n",
      "Iteration: 2580, Epoch: 3, Batch: 682 out of 950, Loss: 1.85\n",
      "Iteration: 2590, Epoch: 3, Batch: 692 out of 950, Loss: 1.87\n",
      "Iteration: 2600, Epoch: 3, Batch: 702 out of 950, Loss: 1.82\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 2610, Epoch: 3, Batch: 712 out of 950, Loss: 1.79\n",
      "Iteration: 2620, Epoch: 3, Batch: 722 out of 950, Loss: 1.85\n",
      "Iteration: 2630, Epoch: 3, Batch: 732 out of 950, Loss: 1.79\n",
      "Iteration: 2640, Epoch: 3, Batch: 742 out of 950, Loss: 1.80\n",
      "Iteration: 2650, Epoch: 3, Batch: 752 out of 950, Loss: 1.81\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 2660, Epoch: 3, Batch: 762 out of 950, Loss: 1.77\n",
      "Iteration: 2670, Epoch: 3, Batch: 772 out of 950, Loss: 1.84\n",
      "Iteration: 2680, Epoch: 3, Batch: 782 out of 950, Loss: 1.74\n",
      "Iteration: 2690, Epoch: 3, Batch: 792 out of 950, Loss: 1.79\n",
      "Iteration: 2700, Epoch: 3, Batch: 802 out of 950, Loss: 2.11\n",
      "thou art more the shall\n",
      "to be or not to the strin\n",
      "wherefore art thou have the \n",
      "Iteration: 2710, Epoch: 3, Batch: 812 out of 950, Loss: 1.82\n",
      "Iteration: 2720, Epoch: 3, Batch: 822 out of 950, Loss: 1.75\n",
      "Iteration: 2730, Epoch: 3, Batch: 832 out of 950, Loss: 1.83\n",
      "Iteration: 2740, Epoch: 3, Batch: 842 out of 950, Loss: 1.77\n",
      "Iteration: 2750, Epoch: 3, Batch: 852 out of 950, Loss: 1.78\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 2760, Epoch: 3, Batch: 862 out of 950, Loss: 1.83\n",
      "Iteration: 2770, Epoch: 3, Batch: 872 out of 950, Loss: 1.75\n",
      "Iteration: 2780, Epoch: 3, Batch: 882 out of 950, Loss: 1.78\n",
      "Iteration: 2790, Epoch: 3, Batch: 892 out of 950, Loss: 1.79\n",
      "Iteration: 2800, Epoch: 3, Batch: 902 out of 950, Loss: 1.87\n",
      "thou art more the the c\n",
      "to be or not to the the c\n",
      "wherefore art thou have the \n",
      "Iteration: 2810, Epoch: 3, Batch: 912 out of 950, Loss: 1.82\n",
      "Iteration: 2820, Epoch: 3, Batch: 922 out of 950, Loss: 1.89\n",
      "Iteration: 2830, Epoch: 3, Batch: 932 out of 950, Loss: 1.87\n",
      "Iteration: 2840, Epoch: 3, Batch: 942 out of 950, Loss: 1.83\n",
      "Starting Epoch #4 of 10.\n",
      "Iteration: 2850, Epoch: 4, Batch: 3 out of 950, Loss: 1.75\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 2860, Epoch: 4, Batch: 13 out of 950, Loss: 1.81\n",
      "Iteration: 2870, Epoch: 4, Batch: 23 out of 950, Loss: 1.76\n",
      "Iteration: 2880, Epoch: 4, Batch: 33 out of 950, Loss: 1.75\n",
      "Iteration: 2890, Epoch: 4, Batch: 43 out of 950, Loss: 1.76\n",
      "Iteration: 2900, Epoch: 4, Batch: 53 out of 950, Loss: 1.81\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 2910, Epoch: 4, Batch: 63 out of 950, Loss: 1.82\n",
      "Iteration: 2920, Epoch: 4, Batch: 73 out of 950, Loss: 1.87\n",
      "Iteration: 2930, Epoch: 4, Batch: 83 out of 950, Loss: 1.81\n",
      "Iteration: 2940, Epoch: 4, Batch: 93 out of 950, Loss: 1.79\n",
      "Iteration: 2950, Epoch: 4, Batch: 103 out of 950, Loss: 1.78\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 2960, Epoch: 4, Batch: 113 out of 950, Loss: 1.78\n",
      "Iteration: 2970, Epoch: 4, Batch: 123 out of 950, Loss: 1.74\n",
      "Iteration: 2980, Epoch: 4, Batch: 133 out of 950, Loss: 1.81\n",
      "Iteration: 2990, Epoch: 4, Batch: 143 out of 950, Loss: 1.71\n",
      "Iteration: 3000, Epoch: 4, Batch: 153 out of 950, Loss: 1.76\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou have the \n",
      "Iteration: 3010, Epoch: 4, Batch: 163 out of 950, Loss: 1.79\n",
      "Iteration: 3020, Epoch: 4, Batch: 173 out of 950, Loss: 1.78\n",
      "Iteration: 3030, Epoch: 4, Batch: 183 out of 950, Loss: 1.75\n",
      "Iteration: 3040, Epoch: 4, Batch: 193 out of 950, Loss: 1.74\n",
      "Iteration: 3050, Epoch: 4, Batch: 203 out of 950, Loss: 1.70\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 3060, Epoch: 4, Batch: 213 out of 950, Loss: 1.78\n",
      "Iteration: 3070, Epoch: 4, Batch: 223 out of 950, Loss: 1.85\n",
      "Iteration: 3080, Epoch: 4, Batch: 233 out of 950, Loss: 1.72\n",
      "Iteration: 3090, Epoch: 4, Batch: 243 out of 950, Loss: 1.73\n",
      "Iteration: 3100, Epoch: 4, Batch: 253 out of 950, Loss: 1.73\n",
      "thou art more the sent \n",
      "to be or not to the sent \n",
      "wherefore art thou have the \n",
      "Iteration: 3110, Epoch: 4, Batch: 263 out of 950, Loss: 1.77\n",
      "Iteration: 3120, Epoch: 4, Batch: 273 out of 950, Loss: 1.81\n",
      "Iteration: 3130, Epoch: 4, Batch: 283 out of 950, Loss: 1.78\n",
      "Iteration: 3140, Epoch: 4, Batch: 293 out of 950, Loss: 1.74\n",
      "Iteration: 3150, Epoch: 4, Batch: 303 out of 950, Loss: 1.76\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou have the \n",
      "Iteration: 3160, Epoch: 4, Batch: 313 out of 950, Loss: 1.83\n",
      "Iteration: 3170, Epoch: 4, Batch: 323 out of 950, Loss: 1.79\n",
      "Iteration: 3180, Epoch: 4, Batch: 333 out of 950, Loss: 1.80\n",
      "Iteration: 3190, Epoch: 4, Batch: 343 out of 950, Loss: 1.77\n",
      "Iteration: 3200, Epoch: 4, Batch: 353 out of 950, Loss: 1.75\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou heart the\n",
      "Iteration: 3210, Epoch: 4, Batch: 363 out of 950, Loss: 1.70\n",
      "Iteration: 3220, Epoch: 4, Batch: 373 out of 950, Loss: 1.77\n",
      "Iteration: 3230, Epoch: 4, Batch: 383 out of 950, Loss: 1.74\n",
      "Iteration: 3240, Epoch: 4, Batch: 393 out of 950, Loss: 1.75\n",
      "Iteration: 3250, Epoch: 4, Batch: 403 out of 950, Loss: 1.81\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou the stand\n",
      "Iteration: 3260, Epoch: 4, Batch: 413 out of 950, Loss: 1.71\n",
      "Iteration: 3270, Epoch: 4, Batch: 423 out of 950, Loss: 1.66\n",
      "Iteration: 3280, Epoch: 4, Batch: 433 out of 950, Loss: 1.76\n",
      "Iteration: 3290, Epoch: 4, Batch: 443 out of 950, Loss: 1.74\n",
      "Iteration: 3300, Epoch: 4, Batch: 453 out of 950, Loss: 1.80\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou so the se\n",
      "Iteration: 3310, Epoch: 4, Batch: 463 out of 950, Loss: 1.83\n",
      "Iteration: 3320, Epoch: 4, Batch: 473 out of 950, Loss: 1.72\n",
      "Iteration: 3330, Epoch: 4, Batch: 483 out of 950, Loss: 1.73\n",
      "Iteration: 3340, Epoch: 4, Batch: 493 out of 950, Loss: 1.75\n",
      "Iteration: 3350, Epoch: 4, Batch: 503 out of 950, Loss: 1.77\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 3360, Epoch: 4, Batch: 513 out of 950, Loss: 1.75\n",
      "Iteration: 3370, Epoch: 4, Batch: 523 out of 950, Loss: 1.77\n",
      "Iteration: 3380, Epoch: 4, Batch: 533 out of 950, Loss: 1.80\n",
      "Iteration: 3390, Epoch: 4, Batch: 543 out of 950, Loss: 1.80\n",
      "Iteration: 3400, Epoch: 4, Batch: 553 out of 950, Loss: 1.80\n",
      "thou art more the will \n",
      "to be or not to the will \n",
      "wherefore art thou the will \n",
      "Iteration: 3410, Epoch: 4, Batch: 563 out of 950, Loss: 1.83\n",
      "Iteration: 3420, Epoch: 4, Batch: 573 out of 950, Loss: 1.75\n",
      "Iteration: 3430, Epoch: 4, Batch: 583 out of 950, Loss: 1.69\n",
      "Iteration: 3440, Epoch: 4, Batch: 593 out of 950, Loss: 1.84\n",
      "Iteration: 3450, Epoch: 4, Batch: 603 out of 950, Loss: 1.72\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 3460, Epoch: 4, Batch: 613 out of 950, Loss: 1.84\n",
      "Iteration: 3470, Epoch: 4, Batch: 623 out of 950, Loss: 1.76\n",
      "Iteration: 3480, Epoch: 4, Batch: 633 out of 950, Loss: 1.79\n",
      "Iteration: 3490, Epoch: 4, Batch: 643 out of 950, Loss: 1.72\n",
      "Iteration: 3500, Epoch: 4, Batch: 653 out of 950, Loss: 1.77\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 3510, Epoch: 4, Batch: 663 out of 950, Loss: 1.73\n",
      "Iteration: 3520, Epoch: 4, Batch: 673 out of 950, Loss: 1.70\n",
      "Iteration: 3530, Epoch: 4, Batch: 683 out of 950, Loss: 1.79\n",
      "Iteration: 3540, Epoch: 4, Batch: 693 out of 950, Loss: 1.76\n",
      "Iteration: 3550, Epoch: 4, Batch: 703 out of 950, Loss: 1.76\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 3560, Epoch: 4, Batch: 713 out of 950, Loss: 1.80\n",
      "Iteration: 3570, Epoch: 4, Batch: 723 out of 950, Loss: 1.82\n",
      "Iteration: 3580, Epoch: 4, Batch: 733 out of 950, Loss: 1.71\n",
      "Iteration: 3590, Epoch: 4, Batch: 743 out of 950, Loss: 1.73\n",
      "Iteration: 3600, Epoch: 4, Batch: 753 out of 950, Loss: 1.77\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 3610, Epoch: 4, Batch: 763 out of 950, Loss: 1.75\n",
      "Iteration: 3620, Epoch: 4, Batch: 773 out of 950, Loss: 1.69\n",
      "Iteration: 3630, Epoch: 4, Batch: 783 out of 950, Loss: 1.72\n",
      "Iteration: 3640, Epoch: 4, Batch: 793 out of 950, Loss: 1.77\n",
      "Iteration: 3650, Epoch: 4, Batch: 803 out of 950, Loss: 1.75\n",
      "thou art more the so th\n",
      "to be or not to the see t\n",
      "wherefore art thou have the \n",
      "Iteration: 3660, Epoch: 4, Batch: 813 out of 950, Loss: 1.77\n",
      "Iteration: 3670, Epoch: 4, Batch: 823 out of 950, Loss: 1.77\n",
      "Iteration: 3680, Epoch: 4, Batch: 833 out of 950, Loss: 1.82\n",
      "Iteration: 3690, Epoch: 4, Batch: 843 out of 950, Loss: 1.64\n",
      "Iteration: 3700, Epoch: 4, Batch: 853 out of 950, Loss: 1.94\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 3710, Epoch: 4, Batch: 863 out of 950, Loss: 1.77\n",
      "Iteration: 3720, Epoch: 4, Batch: 873 out of 950, Loss: 1.78\n",
      "Iteration: 3730, Epoch: 4, Batch: 883 out of 950, Loss: 1.71\n",
      "Iteration: 3740, Epoch: 4, Batch: 893 out of 950, Loss: 1.80\n",
      "Iteration: 3750, Epoch: 4, Batch: 903 out of 950, Loss: 1.73\n",
      "thou art more the stand\n",
      "to be or not to the come \n",
      "wherefore art thou have the \n",
      "Iteration: 3760, Epoch: 4, Batch: 913 out of 950, Loss: 1.79\n",
      "Iteration: 3770, Epoch: 4, Batch: 923 out of 950, Loss: 1.72\n",
      "Iteration: 3780, Epoch: 4, Batch: 933 out of 950, Loss: 1.77\n",
      "Iteration: 3790, Epoch: 4, Batch: 943 out of 950, Loss: 1.76\n",
      "Starting Epoch #5 of 10.\n",
      "Iteration: 3800, Epoch: 5, Batch: 4 out of 950, Loss: 1.68\n",
      "thou art more the so th\n",
      "to be or not to the so th\n",
      "wherefore art thou heart the\n",
      "Iteration: 3810, Epoch: 5, Batch: 14 out of 950, Loss: 1.75\n",
      "Iteration: 3820, Epoch: 5, Batch: 24 out of 950, Loss: 1.80\n",
      "Iteration: 3830, Epoch: 5, Batch: 34 out of 950, Loss: 1.72\n",
      "Iteration: 3840, Epoch: 5, Batch: 44 out of 950, Loss: 1.71\n",
      "Iteration: 3850, Epoch: 5, Batch: 54 out of 950, Loss: 1.73\n",
      "thou art more the shall\n",
      "to be or not to the come \n",
      "wherefore art thou have the \n",
      "Iteration: 3860, Epoch: 5, Batch: 64 out of 950, Loss: 1.73\n",
      "Iteration: 3870, Epoch: 5, Batch: 74 out of 950, Loss: 1.68\n",
      "Iteration: 3880, Epoch: 5, Batch: 84 out of 950, Loss: 1.74\n",
      "Iteration: 3890, Epoch: 5, Batch: 94 out of 950, Loss: 1.70\n",
      "Iteration: 3900, Epoch: 5, Batch: 104 out of 950, Loss: 1.69\n",
      "thou art more the see i\n",
      "to be or not to the see i\n",
      "wherefore art thou have the \n",
      "Iteration: 3910, Epoch: 5, Batch: 114 out of 950, Loss: 1.76\n",
      "Iteration: 3920, Epoch: 5, Batch: 124 out of 950, Loss: 1.81\n",
      "Iteration: 3930, Epoch: 5, Batch: 134 out of 950, Loss: 1.82\n",
      "Iteration: 3940, Epoch: 5, Batch: 144 out of 950, Loss: 1.69\n",
      "Iteration: 3950, Epoch: 5, Batch: 154 out of 950, Loss: 1.78\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 3960, Epoch: 5, Batch: 164 out of 950, Loss: 1.72\n",
      "Iteration: 3970, Epoch: 5, Batch: 174 out of 950, Loss: 1.74\n",
      "Iteration: 3980, Epoch: 5, Batch: 184 out of 950, Loss: 1.75\n",
      "Iteration: 3990, Epoch: 5, Batch: 194 out of 950, Loss: 1.80\n",
      "Iteration: 4000, Epoch: 5, Batch: 204 out of 950, Loss: 1.73\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou have the \n",
      "Iteration: 4010, Epoch: 5, Batch: 214 out of 950, Loss: 1.71\n",
      "Iteration: 4020, Epoch: 5, Batch: 224 out of 950, Loss: 1.77\n",
      "Iteration: 4030, Epoch: 5, Batch: 234 out of 950, Loss: 1.75\n",
      "Iteration: 4040, Epoch: 5, Batch: 244 out of 950, Loss: 1.77\n",
      "Iteration: 4050, Epoch: 5, Batch: 254 out of 950, Loss: 1.67\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou so the se\n",
      "Iteration: 4060, Epoch: 5, Batch: 264 out of 950, Loss: 1.75\n",
      "Iteration: 4070, Epoch: 5, Batch: 274 out of 950, Loss: 1.78\n",
      "Iteration: 4080, Epoch: 5, Batch: 284 out of 950, Loss: 1.76\n",
      "Iteration: 4090, Epoch: 5, Batch: 294 out of 950, Loss: 1.83\n",
      "Iteration: 4100, Epoch: 5, Batch: 304 out of 950, Loss: 2.05\n",
      "thou art more the see i\n",
      "to be or not to the see i\n",
      "wherefore art thou have the \n",
      "Iteration: 4110, Epoch: 5, Batch: 314 out of 950, Loss: 1.83\n",
      "Iteration: 4120, Epoch: 5, Batch: 324 out of 950, Loss: 1.68\n",
      "Iteration: 4130, Epoch: 5, Batch: 334 out of 950, Loss: 1.78\n",
      "Iteration: 4140, Epoch: 5, Batch: 344 out of 950, Loss: 1.69\n",
      "Iteration: 4150, Epoch: 5, Batch: 354 out of 950, Loss: 1.72\n",
      "thou art more the so th\n",
      "to be or not to the so th\n",
      "wherefore art thou shall be \n",
      "Iteration: 4160, Epoch: 5, Batch: 364 out of 950, Loss: 1.70\n",
      "Iteration: 4170, Epoch: 5, Batch: 374 out of 950, Loss: 1.71\n",
      "Iteration: 4180, Epoch: 5, Batch: 384 out of 950, Loss: 1.71\n",
      "Iteration: 4190, Epoch: 5, Batch: 394 out of 950, Loss: 1.79\n",
      "Iteration: 4200, Epoch: 5, Batch: 404 out of 950, Loss: 1.70\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou the come \n",
      "Iteration: 4210, Epoch: 5, Batch: 414 out of 950, Loss: 1.71\n",
      "Iteration: 4220, Epoch: 5, Batch: 424 out of 950, Loss: 1.75\n",
      "Iteration: 4230, Epoch: 5, Batch: 434 out of 950, Loss: 1.67\n",
      "Iteration: 4240, Epoch: 5, Batch: 444 out of 950, Loss: 1.81\n",
      "Iteration: 4250, Epoch: 5, Batch: 454 out of 950, Loss: 1.75\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 4260, Epoch: 5, Batch: 464 out of 950, Loss: 1.80\n",
      "Iteration: 4270, Epoch: 5, Batch: 474 out of 950, Loss: 1.74\n",
      "Iteration: 4280, Epoch: 5, Batch: 484 out of 950, Loss: 1.72\n",
      "Iteration: 4290, Epoch: 5, Batch: 494 out of 950, Loss: 1.72\n",
      "Iteration: 4300, Epoch: 5, Batch: 504 out of 950, Loss: 1.69\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou the shall\n",
      "Iteration: 4310, Epoch: 5, Batch: 514 out of 950, Loss: 1.82\n",
      "Iteration: 4320, Epoch: 5, Batch: 524 out of 950, Loss: 1.69\n",
      "Iteration: 4330, Epoch: 5, Batch: 534 out of 950, Loss: 1.73\n",
      "Iteration: 4340, Epoch: 5, Batch: 544 out of 950, Loss: 1.78\n",
      "Iteration: 4350, Epoch: 5, Batch: 554 out of 950, Loss: 1.84\n",
      "thou art more the see i\n",
      "to be or not to the see i\n",
      "wherefore art thou have the \n",
      "Iteration: 4360, Epoch: 5, Batch: 564 out of 950, Loss: 1.73\n",
      "Iteration: 4370, Epoch: 5, Batch: 574 out of 950, Loss: 1.68\n",
      "Iteration: 4380, Epoch: 5, Batch: 584 out of 950, Loss: 1.67\n",
      "Iteration: 4390, Epoch: 5, Batch: 594 out of 950, Loss: 1.71\n",
      "Iteration: 4400, Epoch: 5, Batch: 604 out of 950, Loss: 1.64\n",
      "thou art more the shoul\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 4410, Epoch: 5, Batch: 614 out of 950, Loss: 1.76\n",
      "Iteration: 4420, Epoch: 5, Batch: 624 out of 950, Loss: 1.67\n",
      "Iteration: 4430, Epoch: 5, Batch: 634 out of 950, Loss: 1.72\n",
      "Iteration: 4440, Epoch: 5, Batch: 644 out of 950, Loss: 1.68\n",
      "Iteration: 4450, Epoch: 5, Batch: 654 out of 950, Loss: 1.66\n",
      "thou art more the will \n",
      "to be or not to the will \n",
      "wherefore art thou the will \n",
      "Iteration: 4460, Epoch: 5, Batch: 664 out of 950, Loss: 1.84\n",
      "Iteration: 4470, Epoch: 5, Batch: 674 out of 950, Loss: 1.72\n",
      "Iteration: 4480, Epoch: 5, Batch: 684 out of 950, Loss: 1.69\n",
      "Iteration: 4490, Epoch: 5, Batch: 694 out of 950, Loss: 1.74\n",
      "Iteration: 4500, Epoch: 5, Batch: 704 out of 950, Loss: 1.75\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the stand\n",
      "to be or not to the come \n",
      "wherefore art thou art the c\n",
      "Iteration: 4510, Epoch: 5, Batch: 714 out of 950, Loss: 1.74\n",
      "Iteration: 4520, Epoch: 5, Batch: 724 out of 950, Loss: 1.72\n",
      "Iteration: 4530, Epoch: 5, Batch: 734 out of 950, Loss: 1.75\n",
      "Iteration: 4540, Epoch: 5, Batch: 744 out of 950, Loss: 1.68\n",
      "Iteration: 4550, Epoch: 5, Batch: 754 out of 950, Loss: 1.74\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou shall the\n",
      "Iteration: 4560, Epoch: 5, Batch: 764 out of 950, Loss: 1.66\n",
      "Iteration: 4570, Epoch: 5, Batch: 774 out of 950, Loss: 1.65\n",
      "Iteration: 4580, Epoch: 5, Batch: 784 out of 950, Loss: 1.64\n",
      "Iteration: 4590, Epoch: 5, Batch: 794 out of 950, Loss: 1.72\n",
      "Iteration: 4600, Epoch: 5, Batch: 804 out of 950, Loss: 1.74\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou have the \n",
      "Iteration: 4610, Epoch: 5, Batch: 814 out of 950, Loss: 1.65\n",
      "Iteration: 4620, Epoch: 5, Batch: 824 out of 950, Loss: 1.70\n",
      "Iteration: 4630, Epoch: 5, Batch: 834 out of 950, Loss: 1.75\n",
      "Iteration: 4640, Epoch: 5, Batch: 844 out of 950, Loss: 1.72\n",
      "Iteration: 4650, Epoch: 5, Batch: 854 out of 950, Loss: 1.67\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou shall be \n",
      "Iteration: 4660, Epoch: 5, Batch: 864 out of 950, Loss: 1.66\n",
      "Iteration: 4670, Epoch: 5, Batch: 874 out of 950, Loss: 1.68\n",
      "Iteration: 4680, Epoch: 5, Batch: 884 out of 950, Loss: 1.65\n",
      "Iteration: 4690, Epoch: 5, Batch: 894 out of 950, Loss: 1.58\n",
      "Iteration: 4700, Epoch: 5, Batch: 904 out of 950, Loss: 1.65\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have the \n",
      "Iteration: 4710, Epoch: 5, Batch: 914 out of 950, Loss: 1.72\n",
      "Iteration: 4720, Epoch: 5, Batch: 924 out of 950, Loss: 1.64\n",
      "Iteration: 4730, Epoch: 5, Batch: 934 out of 950, Loss: 1.65\n",
      "Iteration: 4740, Epoch: 5, Batch: 944 out of 950, Loss: 1.79\n",
      "Starting Epoch #6 of 10.\n",
      "Iteration: 4750, Epoch: 6, Batch: 5 out of 950, Loss: 1.78\n",
      "thou art more the so so\n",
      "to be or not to the stand\n",
      "wherefore art thou shall the\n",
      "Iteration: 4760, Epoch: 6, Batch: 15 out of 950, Loss: 1.69\n",
      "Iteration: 4770, Epoch: 6, Batch: 25 out of 950, Loss: 1.70\n",
      "Iteration: 4780, Epoch: 6, Batch: 35 out of 950, Loss: 1.71\n",
      "Iteration: 4790, Epoch: 6, Batch: 45 out of 950, Loss: 1.65\n",
      "Iteration: 4800, Epoch: 6, Batch: 55 out of 950, Loss: 1.77\n",
      "thou art more the come \n",
      "to be or not to the can t\n",
      "wherefore art thou the come \n",
      "Iteration: 4810, Epoch: 6, Batch: 65 out of 950, Loss: 1.71\n",
      "Iteration: 4820, Epoch: 6, Batch: 75 out of 950, Loss: 1.70\n",
      "Iteration: 4830, Epoch: 6, Batch: 85 out of 950, Loss: 1.71\n",
      "Iteration: 4840, Epoch: 6, Batch: 95 out of 950, Loss: 1.68\n",
      "Iteration: 4850, Epoch: 6, Batch: 105 out of 950, Loss: 1.69\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou the strea\n",
      "Iteration: 4860, Epoch: 6, Batch: 115 out of 950, Loss: 1.68\n",
      "Iteration: 4870, Epoch: 6, Batch: 125 out of 950, Loss: 1.63\n",
      "Iteration: 4880, Epoch: 6, Batch: 135 out of 950, Loss: 1.63\n",
      "Iteration: 4890, Epoch: 6, Batch: 145 out of 950, Loss: 1.74\n",
      "Iteration: 4900, Epoch: 6, Batch: 155 out of 950, Loss: 1.77\n",
      "thou art more the come \n",
      "to be or not to the come \n",
      "wherefore art thou have the \n",
      "Iteration: 4910, Epoch: 6, Batch: 165 out of 950, Loss: 1.66\n",
      "Iteration: 4920, Epoch: 6, Batch: 175 out of 950, Loss: 1.76\n",
      "Iteration: 4930, Epoch: 6, Batch: 185 out of 950, Loss: 1.70\n",
      "Iteration: 4940, Epoch: 6, Batch: 195 out of 950, Loss: 1.68\n",
      "Iteration: 4950, Epoch: 6, Batch: 205 out of 950, Loss: 1.72\n",
      "thou art more the see t\n",
      "to be or not to the canno\n",
      "wherefore art thou shall be \n",
      "Iteration: 4960, Epoch: 6, Batch: 215 out of 950, Loss: 1.74\n",
      "Iteration: 4970, Epoch: 6, Batch: 225 out of 950, Loss: 1.74\n",
      "Iteration: 4980, Epoch: 6, Batch: 235 out of 950, Loss: 1.62\n",
      "Iteration: 4990, Epoch: 6, Batch: 245 out of 950, Loss: 1.67\n",
      "Iteration: 5000, Epoch: 6, Batch: 255 out of 950, Loss: 1.62\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the send \n",
      "to be or not to the send \n",
      "wherefore art thou are the s\n",
      "Iteration: 5010, Epoch: 6, Batch: 265 out of 950, Loss: 1.63\n",
      "Iteration: 5020, Epoch: 6, Batch: 275 out of 950, Loss: 1.64\n",
      "Iteration: 5030, Epoch: 6, Batch: 285 out of 950, Loss: 1.65\n",
      "Iteration: 5040, Epoch: 6, Batch: 295 out of 950, Loss: 1.68\n",
      "Iteration: 5050, Epoch: 6, Batch: 305 out of 950, Loss: 1.65\n",
      "thou art more the sent \n",
      "to be or not to the sent \n",
      "wherefore art thou shall be \n",
      "Iteration: 5060, Epoch: 6, Batch: 315 out of 950, Loss: 1.66\n",
      "Iteration: 5070, Epoch: 6, Batch: 325 out of 950, Loss: 1.72\n",
      "Iteration: 5080, Epoch: 6, Batch: 335 out of 950, Loss: 1.69\n",
      "Iteration: 5090, Epoch: 6, Batch: 345 out of 950, Loss: 1.70\n",
      "Iteration: 5100, Epoch: 6, Batch: 355 out of 950, Loss: 1.65\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou shall be \n",
      "Iteration: 5110, Epoch: 6, Batch: 365 out of 950, Loss: 1.69\n",
      "Iteration: 5120, Epoch: 6, Batch: 375 out of 950, Loss: 1.66\n",
      "Iteration: 5130, Epoch: 6, Batch: 385 out of 950, Loss: 1.75\n",
      "Iteration: 5140, Epoch: 6, Batch: 395 out of 950, Loss: 1.70\n",
      "Iteration: 5150, Epoch: 6, Batch: 405 out of 950, Loss: 1.71\n",
      "thou art more the world\n",
      "to be or not to the see t\n",
      "wherefore art thou heart the\n",
      "Iteration: 5160, Epoch: 6, Batch: 415 out of 950, Loss: 1.74\n",
      "Iteration: 5170, Epoch: 6, Batch: 425 out of 950, Loss: 1.74\n",
      "Iteration: 5180, Epoch: 6, Batch: 435 out of 950, Loss: 1.79\n",
      "Iteration: 5190, Epoch: 6, Batch: 445 out of 950, Loss: 1.68\n",
      "Iteration: 5200, Epoch: 6, Batch: 455 out of 950, Loss: 1.76\n",
      "thou art more the shall\n",
      "to be or not to the stand\n",
      "wherefore art thou the shall\n",
      "Iteration: 5210, Epoch: 6, Batch: 465 out of 950, Loss: 1.70\n",
      "Iteration: 5220, Epoch: 6, Batch: 475 out of 950, Loss: 1.63\n",
      "Iteration: 5230, Epoch: 6, Batch: 485 out of 950, Loss: 1.74\n",
      "Iteration: 5240, Epoch: 6, Batch: 495 out of 950, Loss: 1.74\n",
      "Iteration: 5250, Epoch: 6, Batch: 505 out of 950, Loss: 1.75\n",
      "thou art more the so so\n",
      "to be or not to the stand\n",
      "wherefore art thou shall be \n",
      "Iteration: 5260, Epoch: 6, Batch: 515 out of 950, Loss: 1.63\n",
      "Iteration: 5270, Epoch: 6, Batch: 525 out of 950, Loss: 1.65\n",
      "Iteration: 5280, Epoch: 6, Batch: 535 out of 950, Loss: 1.71\n",
      "Iteration: 5290, Epoch: 6, Batch: 545 out of 950, Loss: 1.66\n",
      "Iteration: 5300, Epoch: 6, Batch: 555 out of 950, Loss: 1.73\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou are the s\n",
      "Iteration: 5310, Epoch: 6, Batch: 565 out of 950, Loss: 1.64\n",
      "Iteration: 5320, Epoch: 6, Batch: 575 out of 950, Loss: 1.74\n",
      "Iteration: 5330, Epoch: 6, Batch: 585 out of 950, Loss: 1.70\n",
      "Iteration: 5340, Epoch: 6, Batch: 595 out of 950, Loss: 1.74\n",
      "Iteration: 5350, Epoch: 6, Batch: 605 out of 950, Loss: 1.66\n",
      "thou art more the see i\n",
      "to be or not to the see i\n",
      "wherefore art thou have the \n",
      "Iteration: 5360, Epoch: 6, Batch: 615 out of 950, Loss: 1.69\n",
      "Iteration: 5370, Epoch: 6, Batch: 625 out of 950, Loss: 1.75\n",
      "Iteration: 5380, Epoch: 6, Batch: 635 out of 950, Loss: 1.61\n",
      "Iteration: 5390, Epoch: 6, Batch: 645 out of 950, Loss: 1.66\n",
      "Iteration: 5400, Epoch: 6, Batch: 655 out of 950, Loss: 1.65\n",
      "thou art more the see t\n",
      "to be or not to the stand\n",
      "wherefore art thou shall be \n",
      "Iteration: 5410, Epoch: 6, Batch: 665 out of 950, Loss: 1.67\n",
      "Iteration: 5420, Epoch: 6, Batch: 675 out of 950, Loss: 1.64\n",
      "Iteration: 5430, Epoch: 6, Batch: 685 out of 950, Loss: 1.76\n",
      "Iteration: 5440, Epoch: 6, Batch: 695 out of 950, Loss: 1.69\n",
      "Iteration: 5450, Epoch: 6, Batch: 705 out of 950, Loss: 1.73\n",
      "thou art more the send \n",
      "to be or not to the send \n",
      "wherefore art thou shall be \n",
      "Iteration: 5460, Epoch: 6, Batch: 715 out of 950, Loss: 1.71\n",
      "Iteration: 5470, Epoch: 6, Batch: 725 out of 950, Loss: 1.72\n",
      "Iteration: 5480, Epoch: 6, Batch: 735 out of 950, Loss: 1.66\n",
      "Iteration: 5490, Epoch: 6, Batch: 745 out of 950, Loss: 1.74\n",
      "Iteration: 5500, Epoch: 6, Batch: 755 out of 950, Loss: 1.61\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the see i\n",
      "to be or not to the see i\n",
      "wherefore art thou shall be \n",
      "Iteration: 5510, Epoch: 6, Batch: 765 out of 950, Loss: 1.67\n",
      "Iteration: 5520, Epoch: 6, Batch: 775 out of 950, Loss: 1.67\n",
      "Iteration: 5530, Epoch: 6, Batch: 785 out of 950, Loss: 1.77\n",
      "Iteration: 5540, Epoch: 6, Batch: 795 out of 950, Loss: 1.58\n",
      "Iteration: 5550, Epoch: 6, Batch: 805 out of 950, Loss: 1.63\n",
      "thou art more the come \n",
      "to be or not to the come \n",
      "wherefore art thou shall be \n",
      "Iteration: 5560, Epoch: 6, Batch: 815 out of 950, Loss: 1.65\n",
      "Iteration: 5570, Epoch: 6, Batch: 825 out of 950, Loss: 1.70\n",
      "Iteration: 5580, Epoch: 6, Batch: 835 out of 950, Loss: 1.66\n",
      "Iteration: 5590, Epoch: 6, Batch: 845 out of 950, Loss: 1.66\n",
      "Iteration: 5600, Epoch: 6, Batch: 855 out of 950, Loss: 1.71\n",
      "thou art more the send \n",
      "to be or not to the see t\n",
      "wherefore art thou shall be \n",
      "Iteration: 5610, Epoch: 6, Batch: 865 out of 950, Loss: 1.63\n",
      "Iteration: 5620, Epoch: 6, Batch: 875 out of 950, Loss: 1.63\n",
      "Iteration: 5630, Epoch: 6, Batch: 885 out of 950, Loss: 1.62\n",
      "Iteration: 5640, Epoch: 6, Batch: 895 out of 950, Loss: 1.65\n",
      "Iteration: 5650, Epoch: 6, Batch: 905 out of 950, Loss: 1.64\n",
      "thou art more the see t\n",
      "to be or not to the come \n",
      "wherefore art thou shall be \n",
      "Iteration: 5660, Epoch: 6, Batch: 915 out of 950, Loss: 1.68\n",
      "Iteration: 5670, Epoch: 6, Batch: 925 out of 950, Loss: 1.71\n",
      "Iteration: 5680, Epoch: 6, Batch: 935 out of 950, Loss: 1.63\n",
      "Iteration: 5690, Epoch: 6, Batch: 945 out of 950, Loss: 1.61\n",
      "Starting Epoch #7 of 10.\n",
      "Iteration: 5700, Epoch: 7, Batch: 6 out of 950, Loss: 1.69\n",
      "thou art more the come \n",
      "to be or not to the come \n",
      "wherefore art thou shall be \n",
      "Iteration: 5710, Epoch: 7, Batch: 16 out of 950, Loss: 1.56\n",
      "Iteration: 5720, Epoch: 7, Batch: 26 out of 950, Loss: 1.72\n",
      "Iteration: 5730, Epoch: 7, Batch: 36 out of 950, Loss: 1.62\n",
      "Iteration: 5740, Epoch: 7, Batch: 46 out of 950, Loss: 1.67\n",
      "Iteration: 5750, Epoch: 7, Batch: 56 out of 950, Loss: 1.67\n",
      "thou art more the stand\n",
      "to be or not to the come \n",
      "wherefore art thou shall be \n",
      "Iteration: 5760, Epoch: 7, Batch: 66 out of 950, Loss: 1.66\n",
      "Iteration: 5770, Epoch: 7, Batch: 76 out of 950, Loss: 1.81\n",
      "Iteration: 5780, Epoch: 7, Batch: 86 out of 950, Loss: 1.69\n",
      "Iteration: 5790, Epoch: 7, Batch: 96 out of 950, Loss: 1.64\n",
      "Iteration: 5800, Epoch: 7, Batch: 106 out of 950, Loss: 1.70\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou shall be \n",
      "Iteration: 5810, Epoch: 7, Batch: 116 out of 950, Loss: 1.71\n",
      "Iteration: 5820, Epoch: 7, Batch: 126 out of 950, Loss: 1.73\n",
      "Iteration: 5830, Epoch: 7, Batch: 136 out of 950, Loss: 1.55\n",
      "Iteration: 5840, Epoch: 7, Batch: 146 out of 950, Loss: 1.63\n",
      "Iteration: 5850, Epoch: 7, Batch: 156 out of 950, Loss: 1.76\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou have shal\n",
      "Iteration: 5860, Epoch: 7, Batch: 166 out of 950, Loss: 1.63\n",
      "Iteration: 5870, Epoch: 7, Batch: 176 out of 950, Loss: 1.72\n",
      "Iteration: 5880, Epoch: 7, Batch: 186 out of 950, Loss: 1.65\n",
      "Iteration: 5890, Epoch: 7, Batch: 196 out of 950, Loss: 1.65\n",
      "Iteration: 5900, Epoch: 7, Batch: 206 out of 950, Loss: 1.64\n",
      "thou art more the shall\n",
      "to be or not to the see t\n",
      "wherefore art thou have the \n",
      "Iteration: 5910, Epoch: 7, Batch: 216 out of 950, Loss: 1.66\n",
      "Iteration: 5920, Epoch: 7, Batch: 226 out of 950, Loss: 1.57\n",
      "Iteration: 5930, Epoch: 7, Batch: 236 out of 950, Loss: 1.66\n",
      "Iteration: 5940, Epoch: 7, Batch: 246 out of 950, Loss: 1.75\n",
      "Iteration: 5950, Epoch: 7, Batch: 256 out of 950, Loss: 1.66\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou have so s\n",
      "Iteration: 5960, Epoch: 7, Batch: 266 out of 950, Loss: 1.65\n",
      "Iteration: 5970, Epoch: 7, Batch: 276 out of 950, Loss: 1.72\n",
      "Iteration: 5980, Epoch: 7, Batch: 286 out of 950, Loss: 1.65\n",
      "Iteration: 5990, Epoch: 7, Batch: 296 out of 950, Loss: 1.71\n",
      "Iteration: 6000, Epoch: 7, Batch: 306 out of 950, Loss: 1.64\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou shall be \n",
      "Iteration: 6010, Epoch: 7, Batch: 316 out of 950, Loss: 1.66\n",
      "Iteration: 6020, Epoch: 7, Batch: 326 out of 950, Loss: 1.72\n",
      "Iteration: 6030, Epoch: 7, Batch: 336 out of 950, Loss: 1.61\n",
      "Iteration: 6040, Epoch: 7, Batch: 346 out of 950, Loss: 1.73\n",
      "Iteration: 6050, Epoch: 7, Batch: 356 out of 950, Loss: 1.61\n",
      "thou art more the shall\n",
      "to be or not to the shall\n",
      "wherefore art thou art the s\n",
      "Iteration: 6060, Epoch: 7, Batch: 366 out of 950, Loss: 1.75\n",
      "Iteration: 6070, Epoch: 7, Batch: 376 out of 950, Loss: 1.58\n",
      "Iteration: 6080, Epoch: 7, Batch: 386 out of 950, Loss: 1.64\n",
      "Iteration: 6090, Epoch: 7, Batch: 396 out of 950, Loss: 1.62\n",
      "Iteration: 6100, Epoch: 7, Batch: 406 out of 950, Loss: 1.67\n",
      "thou art more the dead \n",
      "to be or not to the soul \n",
      "wherefore art thou art the d\n",
      "Iteration: 6110, Epoch: 7, Batch: 416 out of 950, Loss: 1.62\n",
      "Iteration: 6120, Epoch: 7, Batch: 426 out of 950, Loss: 1.69\n",
      "Iteration: 6130, Epoch: 7, Batch: 436 out of 950, Loss: 1.67\n",
      "Iteration: 6140, Epoch: 7, Batch: 446 out of 950, Loss: 1.63\n",
      "Iteration: 6150, Epoch: 7, Batch: 456 out of 950, Loss: 1.64\n",
      "thou art more the world\n",
      "to be or not to the come \n",
      "wherefore art thou shall be \n",
      "Iteration: 6160, Epoch: 7, Batch: 466 out of 950, Loss: 1.75\n",
      "Iteration: 6170, Epoch: 7, Batch: 476 out of 950, Loss: 1.59\n",
      "Iteration: 6180, Epoch: 7, Batch: 486 out of 950, Loss: 1.69\n",
      "Iteration: 6190, Epoch: 7, Batch: 496 out of 950, Loss: 1.70\n",
      "Iteration: 6200, Epoch: 7, Batch: 506 out of 950, Loss: 1.64\n",
      "thou art more the sent \n",
      "to be or not to the sent \n",
      "wherefore art thou shall be \n",
      "Iteration: 6210, Epoch: 7, Batch: 516 out of 950, Loss: 1.69\n",
      "Iteration: 6220, Epoch: 7, Batch: 526 out of 950, Loss: 1.74\n",
      "Iteration: 6230, Epoch: 7, Batch: 536 out of 950, Loss: 1.65\n",
      "Iteration: 6240, Epoch: 7, Batch: 546 out of 950, Loss: 1.71\n",
      "Iteration: 6250, Epoch: 7, Batch: 556 out of 950, Loss: 1.63\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou shall be \n",
      "Iteration: 6260, Epoch: 7, Batch: 566 out of 950, Loss: 1.68\n",
      "Iteration: 6270, Epoch: 7, Batch: 576 out of 950, Loss: 1.67\n",
      "Iteration: 6280, Epoch: 7, Batch: 586 out of 950, Loss: 1.61\n",
      "Iteration: 6290, Epoch: 7, Batch: 596 out of 950, Loss: 1.68\n",
      "Iteration: 6300, Epoch: 7, Batch: 606 out of 950, Loss: 1.64\n",
      "thou art more the send \n",
      "to be or not to the compl\n",
      "wherefore art thou shall be \n",
      "Iteration: 6310, Epoch: 7, Batch: 616 out of 950, Loss: 1.67\n",
      "Iteration: 6320, Epoch: 7, Batch: 626 out of 950, Loss: 1.66\n",
      "Iteration: 6330, Epoch: 7, Batch: 636 out of 950, Loss: 1.61\n",
      "Iteration: 6340, Epoch: 7, Batch: 646 out of 950, Loss: 1.77\n",
      "Iteration: 6350, Epoch: 7, Batch: 656 out of 950, Loss: 1.65\n",
      "thou art more the sent \n",
      "to be or not to the comme\n",
      "wherefore art thou shall be \n",
      "Iteration: 6360, Epoch: 7, Batch: 666 out of 950, Loss: 1.66\n",
      "Iteration: 6370, Epoch: 7, Batch: 676 out of 950, Loss: 1.72\n",
      "Iteration: 6380, Epoch: 7, Batch: 686 out of 950, Loss: 1.70\n",
      "Iteration: 6390, Epoch: 7, Batch: 696 out of 950, Loss: 1.61\n",
      "Iteration: 6400, Epoch: 7, Batch: 706 out of 950, Loss: 1.68\n",
      "thou art more the send \n",
      "to be or not to the stand\n",
      "wherefore art thou shall be \n",
      "Iteration: 6410, Epoch: 7, Batch: 716 out of 950, Loss: 1.68\n",
      "Iteration: 6420, Epoch: 7, Batch: 726 out of 950, Loss: 1.63\n",
      "Iteration: 6430, Epoch: 7, Batch: 736 out of 950, Loss: 1.60\n",
      "Iteration: 6440, Epoch: 7, Batch: 746 out of 950, Loss: 1.60\n",
      "Iteration: 6450, Epoch: 7, Batch: 756 out of 950, Loss: 1.64\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou art the s\n",
      "Iteration: 6460, Epoch: 7, Batch: 766 out of 950, Loss: 1.67\n",
      "Iteration: 6470, Epoch: 7, Batch: 776 out of 950, Loss: 1.70\n",
      "Iteration: 6480, Epoch: 7, Batch: 786 out of 950, Loss: 1.68\n",
      "Iteration: 6490, Epoch: 7, Batch: 796 out of 950, Loss: 1.59\n",
      "Iteration: 6500, Epoch: 7, Batch: 806 out of 950, Loss: 1.64\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou art the s\n",
      "Iteration: 6510, Epoch: 7, Batch: 816 out of 950, Loss: 1.64\n",
      "Iteration: 6520, Epoch: 7, Batch: 826 out of 950, Loss: 1.62\n",
      "Iteration: 6530, Epoch: 7, Batch: 836 out of 950, Loss: 1.62\n",
      "Iteration: 6540, Epoch: 7, Batch: 846 out of 950, Loss: 1.64\n",
      "Iteration: 6550, Epoch: 7, Batch: 856 out of 950, Loss: 1.72\n",
      "thou art more the send \n",
      "to be or not to the send \n",
      "wherefore art thou shall be \n",
      "Iteration: 6560, Epoch: 7, Batch: 866 out of 950, Loss: 1.68\n",
      "Iteration: 6570, Epoch: 7, Batch: 876 out of 950, Loss: 1.73\n",
      "Iteration: 6580, Epoch: 7, Batch: 886 out of 950, Loss: 1.65\n",
      "Iteration: 6590, Epoch: 7, Batch: 896 out of 950, Loss: 1.95\n",
      "Iteration: 6600, Epoch: 7, Batch: 906 out of 950, Loss: 1.62\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou shall be \n",
      "Iteration: 6610, Epoch: 7, Batch: 916 out of 950, Loss: 1.64\n",
      "Iteration: 6620, Epoch: 7, Batch: 926 out of 950, Loss: 1.67\n",
      "Iteration: 6630, Epoch: 7, Batch: 936 out of 950, Loss: 1.73\n",
      "Iteration: 6640, Epoch: 7, Batch: 946 out of 950, Loss: 1.62\n",
      "Starting Epoch #8 of 10.\n",
      "Iteration: 6650, Epoch: 8, Batch: 7 out of 950, Loss: 1.59\n",
      "thou art more the come \n",
      "to be or not to the come \n",
      "wherefore art thou art the c\n",
      "Iteration: 6660, Epoch: 8, Batch: 17 out of 950, Loss: 1.67\n",
      "Iteration: 6670, Epoch: 8, Batch: 27 out of 950, Loss: 1.66\n",
      "Iteration: 6680, Epoch: 8, Batch: 37 out of 950, Loss: 1.67\n",
      "Iteration: 6690, Epoch: 8, Batch: 47 out of 950, Loss: 1.69\n",
      "Iteration: 6700, Epoch: 8, Batch: 57 out of 950, Loss: 1.56\n",
      "thou art more the comme\n",
      "to be or not to the comme\n",
      "wherefore art thou art the c\n",
      "Iteration: 6710, Epoch: 8, Batch: 67 out of 950, Loss: 1.65\n",
      "Iteration: 6720, Epoch: 8, Batch: 77 out of 950, Loss: 1.73\n",
      "Iteration: 6730, Epoch: 8, Batch: 87 out of 950, Loss: 1.59\n",
      "Iteration: 6740, Epoch: 8, Batch: 97 out of 950, Loss: 1.80\n",
      "Iteration: 6750, Epoch: 8, Batch: 107 out of 950, Loss: 1.69\n",
      "thou art more the compl\n",
      "to be or not to the comme\n",
      "wherefore art thou art the c\n",
      "Iteration: 6760, Epoch: 8, Batch: 117 out of 950, Loss: 1.75\n",
      "Iteration: 6770, Epoch: 8, Batch: 127 out of 950, Loss: 1.66\n",
      "Iteration: 6780, Epoch: 8, Batch: 137 out of 950, Loss: 1.69\n",
      "Iteration: 6790, Epoch: 8, Batch: 147 out of 950, Loss: 1.68\n",
      "Iteration: 6800, Epoch: 8, Batch: 157 out of 950, Loss: 1.65\n",
      "thou art more the send \n",
      "to be or not to the see t\n",
      "wherefore art thou art the s\n",
      "Iteration: 6810, Epoch: 8, Batch: 167 out of 950, Loss: 1.66\n",
      "Iteration: 6820, Epoch: 8, Batch: 177 out of 950, Loss: 1.66\n",
      "Iteration: 6830, Epoch: 8, Batch: 187 out of 950, Loss: 1.55\n",
      "Iteration: 6840, Epoch: 8, Batch: 197 out of 950, Loss: 1.57\n",
      "Iteration: 6850, Epoch: 8, Batch: 207 out of 950, Loss: 1.69\n",
      "thou art more the send \n",
      "to be or not to the stand\n",
      "wherefore art thou art the s\n",
      "Iteration: 6860, Epoch: 8, Batch: 217 out of 950, Loss: 1.51\n",
      "Iteration: 6870, Epoch: 8, Batch: 227 out of 950, Loss: 1.71\n",
      "Iteration: 6880, Epoch: 8, Batch: 237 out of 950, Loss: 1.60\n",
      "Iteration: 6890, Epoch: 8, Batch: 247 out of 950, Loss: 1.89\n",
      "Iteration: 6900, Epoch: 8, Batch: 257 out of 950, Loss: 1.70\n",
      "thou art more the send \n",
      "to be or not to the stand\n",
      "wherefore art thou have shal\n",
      "Iteration: 6910, Epoch: 8, Batch: 267 out of 950, Loss: 1.67\n",
      "Iteration: 6920, Epoch: 8, Batch: 277 out of 950, Loss: 1.62\n",
      "Iteration: 6930, Epoch: 8, Batch: 287 out of 950, Loss: 1.68\n",
      "Iteration: 6940, Epoch: 8, Batch: 297 out of 950, Loss: 1.66\n",
      "Iteration: 6950, Epoch: 8, Batch: 307 out of 950, Loss: 1.58\n",
      "thou art more the sent \n",
      "to be or not to the sent \n",
      "wherefore art thou art the s\n",
      "Iteration: 6960, Epoch: 8, Batch: 317 out of 950, Loss: 1.67\n",
      "Iteration: 6970, Epoch: 8, Batch: 327 out of 950, Loss: 1.66\n",
      "Iteration: 6980, Epoch: 8, Batch: 337 out of 950, Loss: 1.65\n",
      "Iteration: 6990, Epoch: 8, Batch: 347 out of 950, Loss: 1.60\n",
      "Iteration: 7000, Epoch: 8, Batch: 357 out of 950, Loss: 1.58\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the send \n",
      "to be or not to the send \n",
      "wherefore art thou shall be \n",
      "Iteration: 7010, Epoch: 8, Batch: 367 out of 950, Loss: 1.73\n",
      "Iteration: 7020, Epoch: 8, Batch: 377 out of 950, Loss: 1.56\n",
      "Iteration: 7030, Epoch: 8, Batch: 387 out of 950, Loss: 1.72\n",
      "Iteration: 7040, Epoch: 8, Batch: 397 out of 950, Loss: 1.59\n",
      "Iteration: 7050, Epoch: 8, Batch: 407 out of 950, Loss: 1.64\n",
      "thou art more the world\n",
      "to be or not to the come \n",
      "wherefore art thou shall be \n",
      "Iteration: 7060, Epoch: 8, Batch: 417 out of 950, Loss: 1.58\n",
      "Iteration: 7070, Epoch: 8, Batch: 427 out of 950, Loss: 1.76\n",
      "Iteration: 7080, Epoch: 8, Batch: 437 out of 950, Loss: 1.64\n",
      "Iteration: 7090, Epoch: 8, Batch: 447 out of 950, Loss: 1.66\n",
      "Iteration: 7100, Epoch: 8, Batch: 457 out of 950, Loss: 1.63\n",
      "thou art more the send \n",
      "to be or not to the send \n",
      "wherefore art thou shall be \n",
      "Iteration: 7110, Epoch: 8, Batch: 467 out of 950, Loss: 1.65\n",
      "Iteration: 7120, Epoch: 8, Batch: 477 out of 950, Loss: 1.64\n",
      "Iteration: 7130, Epoch: 8, Batch: 487 out of 950, Loss: 1.57\n",
      "Iteration: 7140, Epoch: 8, Batch: 497 out of 950, Loss: 1.61\n",
      "Iteration: 7150, Epoch: 8, Batch: 507 out of 950, Loss: 1.61\n",
      "thou art more the senat\n",
      "to be or not to the senat\n",
      "wherefore art thou shall be \n",
      "Iteration: 7160, Epoch: 8, Batch: 517 out of 950, Loss: 1.72\n",
      "Iteration: 7170, Epoch: 8, Batch: 527 out of 950, Loss: 1.65\n",
      "Iteration: 7180, Epoch: 8, Batch: 537 out of 950, Loss: 1.67\n",
      "Iteration: 7190, Epoch: 8, Batch: 547 out of 950, Loss: 1.67\n",
      "Iteration: 7200, Epoch: 8, Batch: 557 out of 950, Loss: 1.63\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou art the s\n",
      "Iteration: 7210, Epoch: 8, Batch: 567 out of 950, Loss: 1.52\n",
      "Iteration: 7220, Epoch: 8, Batch: 577 out of 950, Loss: 1.70\n",
      "Iteration: 7230, Epoch: 8, Batch: 587 out of 950, Loss: 1.67\n",
      "Iteration: 7240, Epoch: 8, Batch: 597 out of 950, Loss: 1.65\n",
      "Iteration: 7250, Epoch: 8, Batch: 607 out of 950, Loss: 1.63\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou shall be \n",
      "Iteration: 7260, Epoch: 8, Batch: 617 out of 950, Loss: 1.62\n",
      "Iteration: 7270, Epoch: 8, Batch: 627 out of 950, Loss: 1.60\n",
      "Iteration: 7280, Epoch: 8, Batch: 637 out of 950, Loss: 1.72\n",
      "Iteration: 7290, Epoch: 8, Batch: 647 out of 950, Loss: 1.69\n",
      "Iteration: 7300, Epoch: 8, Batch: 657 out of 950, Loss: 1.59\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou should be\n",
      "Iteration: 7310, Epoch: 8, Batch: 667 out of 950, Loss: 1.55\n",
      "Iteration: 7320, Epoch: 8, Batch: 677 out of 950, Loss: 1.59\n",
      "Iteration: 7330, Epoch: 8, Batch: 687 out of 950, Loss: 1.61\n",
      "Iteration: 7340, Epoch: 8, Batch: 697 out of 950, Loss: 1.69\n",
      "Iteration: 7350, Epoch: 8, Batch: 707 out of 950, Loss: 1.63\n",
      "thou art more the world\n",
      "to be or not to the come \n",
      "wherefore art thou art the w\n",
      "Iteration: 7360, Epoch: 8, Batch: 717 out of 950, Loss: 1.61\n",
      "Iteration: 7370, Epoch: 8, Batch: 727 out of 950, Loss: 1.75\n",
      "Iteration: 7380, Epoch: 8, Batch: 737 out of 950, Loss: 1.61\n",
      "Iteration: 7390, Epoch: 8, Batch: 747 out of 950, Loss: 1.60\n",
      "Iteration: 7400, Epoch: 8, Batch: 757 out of 950, Loss: 1.62\n",
      "thou art more the send \n",
      "to be or not to the come \n",
      "wherefore art thou are the s\n",
      "Iteration: 7410, Epoch: 8, Batch: 767 out of 950, Loss: 1.70\n",
      "Iteration: 7420, Epoch: 8, Batch: 777 out of 950, Loss: 1.60\n",
      "Iteration: 7430, Epoch: 8, Batch: 787 out of 950, Loss: 1.67\n",
      "Iteration: 7440, Epoch: 8, Batch: 797 out of 950, Loss: 1.66\n",
      "Iteration: 7450, Epoch: 8, Batch: 807 out of 950, Loss: 1.65\n",
      "thou art more the stand\n",
      "to be or not to the come \n",
      "wherefore art thou art the s\n",
      "Iteration: 7460, Epoch: 8, Batch: 817 out of 950, Loss: 1.63\n",
      "Iteration: 7470, Epoch: 8, Batch: 827 out of 950, Loss: 1.61\n",
      "Iteration: 7480, Epoch: 8, Batch: 837 out of 950, Loss: 1.59\n",
      "Iteration: 7490, Epoch: 8, Batch: 847 out of 950, Loss: 1.66\n",
      "Iteration: 7500, Epoch: 8, Batch: 857 out of 950, Loss: 1.66\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the send \n",
      "to be or not to the compl\n",
      "wherefore art thou art the s\n",
      "Iteration: 7510, Epoch: 8, Batch: 867 out of 950, Loss: 1.56\n",
      "Iteration: 7520, Epoch: 8, Batch: 877 out of 950, Loss: 1.53\n",
      "Iteration: 7530, Epoch: 8, Batch: 887 out of 950, Loss: 1.61\n",
      "Iteration: 7540, Epoch: 8, Batch: 897 out of 950, Loss: 1.68\n",
      "Iteration: 7550, Epoch: 8, Batch: 907 out of 950, Loss: 1.58\n",
      "thou art more the compa\n",
      "to be or not to the compa\n",
      "wherefore art thou art the s\n",
      "Iteration: 7560, Epoch: 8, Batch: 917 out of 950, Loss: 1.64\n",
      "Iteration: 7570, Epoch: 8, Batch: 927 out of 950, Loss: 1.66\n",
      "Iteration: 7580, Epoch: 8, Batch: 937 out of 950, Loss: 1.56\n",
      "Iteration: 7590, Epoch: 8, Batch: 947 out of 950, Loss: 1.59\n",
      "Starting Epoch #9 of 10.\n",
      "Iteration: 7600, Epoch: 9, Batch: 8 out of 950, Loss: 1.62\n",
      "thou art more the stand\n",
      "to be or not to the come \n",
      "wherefore art thou art the s\n",
      "Iteration: 7610, Epoch: 9, Batch: 18 out of 950, Loss: 1.61\n",
      "Iteration: 7620, Epoch: 9, Batch: 28 out of 950, Loss: 1.66\n",
      "Iteration: 7630, Epoch: 9, Batch: 38 out of 950, Loss: 1.68\n",
      "Iteration: 7640, Epoch: 9, Batch: 48 out of 950, Loss: 1.59\n",
      "Iteration: 7650, Epoch: 9, Batch: 58 out of 950, Loss: 1.64\n",
      "thou art more the see t\n",
      "to be or not to the court\n",
      "wherefore art thou art the s\n",
      "Iteration: 7660, Epoch: 9, Batch: 68 out of 950, Loss: 1.63\n",
      "Iteration: 7670, Epoch: 9, Batch: 78 out of 950, Loss: 1.64\n",
      "Iteration: 7680, Epoch: 9, Batch: 88 out of 950, Loss: 1.67\n",
      "Iteration: 7690, Epoch: 9, Batch: 98 out of 950, Loss: 1.58\n",
      "Iteration: 7700, Epoch: 9, Batch: 108 out of 950, Loss: 1.63\n",
      "thou art more the send \n",
      "to be or not to the comme\n",
      "wherefore art thou art the s\n",
      "Iteration: 7710, Epoch: 9, Batch: 118 out of 950, Loss: 1.66\n",
      "Iteration: 7720, Epoch: 9, Batch: 128 out of 950, Loss: 1.60\n",
      "Iteration: 7730, Epoch: 9, Batch: 138 out of 950, Loss: 1.72\n",
      "Iteration: 7740, Epoch: 9, Batch: 148 out of 950, Loss: 1.64\n",
      "Iteration: 7750, Epoch: 9, Batch: 158 out of 950, Loss: 1.65\n",
      "thou art more the shall\n",
      "to be or not to the stand\n",
      "wherefore art thou art the s\n",
      "Iteration: 7760, Epoch: 9, Batch: 168 out of 950, Loss: 1.70\n",
      "Iteration: 7770, Epoch: 9, Batch: 178 out of 950, Loss: 1.66\n",
      "Iteration: 7780, Epoch: 9, Batch: 188 out of 950, Loss: 1.64\n",
      "Iteration: 7790, Epoch: 9, Batch: 198 out of 950, Loss: 1.70\n",
      "Iteration: 7800, Epoch: 9, Batch: 208 out of 950, Loss: 1.70\n",
      "thou art more the send \n",
      "to be or not to the sent \n",
      "wherefore art thou shall be \n",
      "Iteration: 7810, Epoch: 9, Batch: 218 out of 950, Loss: 1.65\n",
      "Iteration: 7820, Epoch: 9, Batch: 228 out of 950, Loss: 1.62\n",
      "Iteration: 7830, Epoch: 9, Batch: 238 out of 950, Loss: 1.64\n",
      "Iteration: 7840, Epoch: 9, Batch: 248 out of 950, Loss: 1.61\n",
      "Iteration: 7850, Epoch: 9, Batch: 258 out of 950, Loss: 1.57\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou art the s\n",
      "Iteration: 7860, Epoch: 9, Batch: 268 out of 950, Loss: 1.60\n",
      "Iteration: 7870, Epoch: 9, Batch: 278 out of 950, Loss: 1.61\n",
      "Iteration: 7880, Epoch: 9, Batch: 288 out of 950, Loss: 1.58\n",
      "Iteration: 7890, Epoch: 9, Batch: 298 out of 950, Loss: 1.62\n",
      "Iteration: 7900, Epoch: 9, Batch: 308 out of 950, Loss: 1.63\n",
      "thou art more the sent \n",
      "to be or not to the come \n",
      "wherefore art thou art the s\n",
      "Iteration: 7910, Epoch: 9, Batch: 318 out of 950, Loss: 1.60\n",
      "Iteration: 7920, Epoch: 9, Batch: 328 out of 950, Loss: 1.67\n",
      "Iteration: 7930, Epoch: 9, Batch: 338 out of 950, Loss: 1.47\n",
      "Iteration: 7940, Epoch: 9, Batch: 348 out of 950, Loss: 1.58\n",
      "Iteration: 7950, Epoch: 9, Batch: 358 out of 950, Loss: 1.68\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou shall be \n",
      "Iteration: 7960, Epoch: 9, Batch: 368 out of 950, Loss: 1.67\n",
      "Iteration: 7970, Epoch: 9, Batch: 378 out of 950, Loss: 1.67\n",
      "Iteration: 7980, Epoch: 9, Batch: 388 out of 950, Loss: 1.58\n",
      "Iteration: 7990, Epoch: 9, Batch: 398 out of 950, Loss: 1.58\n",
      "Iteration: 8000, Epoch: 9, Batch: 408 out of 950, Loss: 1.62\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the senat\n",
      "to be or not to the senat\n",
      "wherefore art thou shall see\n",
      "Iteration: 8010, Epoch: 9, Batch: 418 out of 950, Loss: 1.52\n",
      "Iteration: 8020, Epoch: 9, Batch: 428 out of 950, Loss: 1.62\n",
      "Iteration: 8030, Epoch: 9, Batch: 438 out of 950, Loss: 1.59\n",
      "Iteration: 8040, Epoch: 9, Batch: 448 out of 950, Loss: 1.54\n",
      "Iteration: 8050, Epoch: 9, Batch: 458 out of 950, Loss: 1.59\n",
      "thou art more the send \n",
      "to be or not to the stand\n",
      "wherefore art thou art the s\n",
      "Iteration: 8060, Epoch: 9, Batch: 468 out of 950, Loss: 1.56\n",
      "Iteration: 8070, Epoch: 9, Batch: 478 out of 950, Loss: 1.70\n",
      "Iteration: 8080, Epoch: 9, Batch: 488 out of 950, Loss: 1.58\n",
      "Iteration: 8090, Epoch: 9, Batch: 498 out of 950, Loss: 1.61\n",
      "Iteration: 8100, Epoch: 9, Batch: 508 out of 950, Loss: 1.70\n",
      "thou art more the see t\n",
      "to be or not to the stran\n",
      "wherefore art thou art the s\n",
      "Iteration: 8110, Epoch: 9, Batch: 518 out of 950, Loss: 1.66\n",
      "Iteration: 8120, Epoch: 9, Batch: 528 out of 950, Loss: 1.64\n",
      "Iteration: 8130, Epoch: 9, Batch: 538 out of 950, Loss: 1.61\n",
      "Iteration: 8140, Epoch: 9, Batch: 548 out of 950, Loss: 1.56\n",
      "Iteration: 8150, Epoch: 9, Batch: 558 out of 950, Loss: 1.53\n",
      "thou art more the sent \n",
      "to be or not to the see t\n",
      "wherefore art thou should be\n",
      "Iteration: 8160, Epoch: 9, Batch: 568 out of 950, Loss: 1.67\n",
      "Iteration: 8170, Epoch: 9, Batch: 578 out of 950, Loss: 1.62\n",
      "Iteration: 8180, Epoch: 9, Batch: 588 out of 950, Loss: 1.58\n",
      "Iteration: 8190, Epoch: 9, Batch: 598 out of 950, Loss: 1.66\n",
      "Iteration: 8200, Epoch: 9, Batch: 608 out of 950, Loss: 1.55\n",
      "thou art more the senat\n",
      "to be or not to the compl\n",
      "wherefore art thou should be\n",
      "Iteration: 8210, Epoch: 9, Batch: 618 out of 950, Loss: 1.51\n",
      "Iteration: 8220, Epoch: 9, Batch: 628 out of 950, Loss: 1.70\n",
      "Iteration: 8230, Epoch: 9, Batch: 638 out of 950, Loss: 1.70\n",
      "Iteration: 8240, Epoch: 9, Batch: 648 out of 950, Loss: 1.55\n",
      "Iteration: 8250, Epoch: 9, Batch: 658 out of 950, Loss: 1.65\n",
      "thou art more the sent \n",
      "to be or not to the compl\n",
      "wherefore art thou shall be \n",
      "Iteration: 8260, Epoch: 9, Batch: 668 out of 950, Loss: 1.72\n",
      "Iteration: 8270, Epoch: 9, Batch: 678 out of 950, Loss: 1.60\n",
      "Iteration: 8280, Epoch: 9, Batch: 688 out of 950, Loss: 1.66\n",
      "Iteration: 8290, Epoch: 9, Batch: 698 out of 950, Loss: 1.62\n",
      "Iteration: 8300, Epoch: 9, Batch: 708 out of 950, Loss: 1.58\n",
      "thou art more the send \n",
      "to be or not to the stand\n",
      "wherefore art thou art the s\n",
      "Iteration: 8310, Epoch: 9, Batch: 718 out of 950, Loss: 1.61\n",
      "Iteration: 8320, Epoch: 9, Batch: 728 out of 950, Loss: 1.58\n",
      "Iteration: 8330, Epoch: 9, Batch: 738 out of 950, Loss: 1.65\n",
      "Iteration: 8340, Epoch: 9, Batch: 748 out of 950, Loss: 1.63\n",
      "Iteration: 8350, Epoch: 9, Batch: 758 out of 950, Loss: 1.53\n",
      "thou art more the stand\n",
      "to be or not to the comme\n",
      "wherefore art thou art the s\n",
      "Iteration: 8360, Epoch: 9, Batch: 768 out of 950, Loss: 1.70\n",
      "Iteration: 8370, Epoch: 9, Batch: 778 out of 950, Loss: 1.55\n",
      "Iteration: 8380, Epoch: 9, Batch: 788 out of 950, Loss: 1.67\n",
      "Iteration: 8390, Epoch: 9, Batch: 798 out of 950, Loss: 1.64\n",
      "Iteration: 8400, Epoch: 9, Batch: 808 out of 950, Loss: 1.71\n",
      "thou art more the stand\n",
      "to be or not to the compl\n",
      "wherefore art thou art the s\n",
      "Iteration: 8410, Epoch: 9, Batch: 818 out of 950, Loss: 1.54\n",
      "Iteration: 8420, Epoch: 9, Batch: 828 out of 950, Loss: 1.61\n",
      "Iteration: 8430, Epoch: 9, Batch: 838 out of 950, Loss: 1.65\n",
      "Iteration: 8440, Epoch: 9, Batch: 848 out of 950, Loss: 1.48\n",
      "Iteration: 8450, Epoch: 9, Batch: 858 out of 950, Loss: 1.69\n",
      "thou art more the send \n",
      "to be or not to the sense\n",
      "wherefore art thou shall be \n",
      "Iteration: 8460, Epoch: 9, Batch: 868 out of 950, Loss: 1.60\n",
      "Iteration: 8470, Epoch: 9, Batch: 878 out of 950, Loss: 1.56\n",
      "Iteration: 8480, Epoch: 9, Batch: 888 out of 950, Loss: 1.64\n",
      "Iteration: 8490, Epoch: 9, Batch: 898 out of 950, Loss: 1.59\n",
      "Iteration: 8500, Epoch: 9, Batch: 908 out of 950, Loss: 1.58\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more the send \n",
      "to be or not to the stran\n",
      "wherefore art thou art the s\n",
      "Iteration: 8510, Epoch: 9, Batch: 918 out of 950, Loss: 1.62\n",
      "Iteration: 8520, Epoch: 9, Batch: 928 out of 950, Loss: 1.62\n",
      "Iteration: 8530, Epoch: 9, Batch: 938 out of 950, Loss: 1.58\n",
      "Iteration: 8540, Epoch: 9, Batch: 948 out of 950, Loss: 1.58\n",
      "Starting Epoch #10 of 10.\n",
      "Iteration: 8550, Epoch: 10, Batch: 9 out of 950, Loss: 1.67\n",
      "thou art more the world\n",
      "to be or not to the compl\n",
      "wherefore art thou shall be \n",
      "Iteration: 8560, Epoch: 10, Batch: 19 out of 950, Loss: 1.66\n",
      "Iteration: 8570, Epoch: 10, Batch: 29 out of 950, Loss: 1.53\n",
      "Iteration: 8580, Epoch: 10, Batch: 39 out of 950, Loss: 1.57\n",
      "Iteration: 8590, Epoch: 10, Batch: 49 out of 950, Loss: 1.69\n",
      "Iteration: 8600, Epoch: 10, Batch: 59 out of 950, Loss: 1.60\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou shall be \n",
      "Iteration: 8610, Epoch: 10, Batch: 69 out of 950, Loss: 1.64\n",
      "Iteration: 8620, Epoch: 10, Batch: 79 out of 950, Loss: 1.70\n",
      "Iteration: 8630, Epoch: 10, Batch: 89 out of 950, Loss: 1.68\n",
      "Iteration: 8640, Epoch: 10, Batch: 99 out of 950, Loss: 1.57\n",
      "Iteration: 8650, Epoch: 10, Batch: 109 out of 950, Loss: 1.59\n",
      "thou art more the sent \n",
      "to be or not to the compl\n",
      "wherefore art thou shall be \n",
      "Iteration: 8660, Epoch: 10, Batch: 119 out of 950, Loss: 1.69\n",
      "Iteration: 8670, Epoch: 10, Batch: 129 out of 950, Loss: 1.54\n",
      "Iteration: 8680, Epoch: 10, Batch: 139 out of 950, Loss: 1.71\n",
      "Iteration: 8690, Epoch: 10, Batch: 149 out of 950, Loss: 1.60\n",
      "Iteration: 8700, Epoch: 10, Batch: 159 out of 950, Loss: 1.61\n",
      "thou art more the send \n",
      "to be or not to the send \n",
      "wherefore art thou art the s\n",
      "Iteration: 8710, Epoch: 10, Batch: 169 out of 950, Loss: 1.65\n",
      "Iteration: 8720, Epoch: 10, Batch: 179 out of 950, Loss: 1.61\n",
      "Iteration: 8730, Epoch: 10, Batch: 189 out of 950, Loss: 1.65\n",
      "Iteration: 8740, Epoch: 10, Batch: 199 out of 950, Loss: 1.59\n",
      "Iteration: 8750, Epoch: 10, Batch: 209 out of 950, Loss: 1.64\n",
      "thou art more the send \n",
      "to be or not to the send \n",
      "wherefore art thou art the s\n",
      "Iteration: 8760, Epoch: 10, Batch: 219 out of 950, Loss: 1.58\n",
      "Iteration: 8770, Epoch: 10, Batch: 229 out of 950, Loss: 1.57\n",
      "Iteration: 8780, Epoch: 10, Batch: 239 out of 950, Loss: 1.59\n",
      "Iteration: 8790, Epoch: 10, Batch: 249 out of 950, Loss: 1.63\n",
      "Iteration: 8800, Epoch: 10, Batch: 259 out of 950, Loss: 1.56\n",
      "thou art more the send \n",
      "to be or not to the compl\n",
      "wherefore art thou shall be \n",
      "Iteration: 8810, Epoch: 10, Batch: 269 out of 950, Loss: 1.60\n",
      "Iteration: 8820, Epoch: 10, Batch: 279 out of 950, Loss: 1.67\n",
      "Iteration: 8830, Epoch: 10, Batch: 289 out of 950, Loss: 1.64\n",
      "Iteration: 8840, Epoch: 10, Batch: 299 out of 950, Loss: 1.62\n",
      "Iteration: 8850, Epoch: 10, Batch: 309 out of 950, Loss: 1.57\n",
      "thou art more the more \n",
      "to be or not to the compa\n",
      "wherefore art thou art the s\n",
      "Iteration: 8860, Epoch: 10, Batch: 319 out of 950, Loss: 1.61\n",
      "Iteration: 8870, Epoch: 10, Batch: 329 out of 950, Loss: 1.59\n",
      "Iteration: 8880, Epoch: 10, Batch: 339 out of 950, Loss: 1.65\n",
      "Iteration: 8890, Epoch: 10, Batch: 349 out of 950, Loss: 1.65\n",
      "Iteration: 8900, Epoch: 10, Batch: 359 out of 950, Loss: 1.60\n",
      "thou art more that i ha\n",
      "to be or not to the compl\n",
      "wherefore art thou art the c\n",
      "Iteration: 8910, Epoch: 10, Batch: 369 out of 950, Loss: 1.56\n",
      "Iteration: 8920, Epoch: 10, Batch: 379 out of 950, Loss: 1.59\n",
      "Iteration: 8930, Epoch: 10, Batch: 389 out of 950, Loss: 1.56\n",
      "Iteration: 8940, Epoch: 10, Batch: 399 out of 950, Loss: 1.57\n",
      "Iteration: 8950, Epoch: 10, Batch: 409 out of 950, Loss: 1.63\n",
      "thou art more the sent \n",
      "to be or not to the compl\n",
      "wherefore art thou shall be \n",
      "Iteration: 8960, Epoch: 10, Batch: 419 out of 950, Loss: 1.59\n",
      "Iteration: 8970, Epoch: 10, Batch: 429 out of 950, Loss: 1.63\n",
      "Iteration: 8980, Epoch: 10, Batch: 439 out of 950, Loss: 1.58\n",
      "Iteration: 8990, Epoch: 10, Batch: 449 out of 950, Loss: 1.69\n",
      "Iteration: 9000, Epoch: 10, Batch: 459 out of 950, Loss: 1.50\n",
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Error encountered when serializing LAYER_NAME_UIDS.\n",
      "Type is unsupported, or the types of the items don't match field type in CollectionDef.\n",
      "'dict' object has no attribute 'name'\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Saved To: temp/shakespeare_model/model\n",
      "thou art more that thou\n",
      "to be or not to the compl\n",
      "wherefore art thou art thou \n",
      "Iteration: 9010, Epoch: 10, Batch: 469 out of 950, Loss: 1.67\n",
      "Iteration: 9020, Epoch: 10, Batch: 479 out of 950, Loss: 1.57\n",
      "Iteration: 9030, Epoch: 10, Batch: 489 out of 950, Loss: 1.71\n",
      "Iteration: 9040, Epoch: 10, Batch: 499 out of 950, Loss: 1.65\n",
      "Iteration: 9050, Epoch: 10, Batch: 509 out of 950, Loss: 1.63\n",
      "thou art more the world\n",
      "to be or not to the world\n",
      "wherefore art thou shall be \n",
      "Iteration: 9060, Epoch: 10, Batch: 519 out of 950, Loss: 1.62\n",
      "Iteration: 9070, Epoch: 10, Batch: 529 out of 950, Loss: 1.51\n",
      "Iteration: 9080, Epoch: 10, Batch: 539 out of 950, Loss: 1.61\n",
      "Iteration: 9090, Epoch: 10, Batch: 549 out of 950, Loss: 1.48\n",
      "Iteration: 9100, Epoch: 10, Batch: 559 out of 950, Loss: 1.68\n",
      "thou art more the stand\n",
      "to be or not to the compl\n",
      "wherefore art thou art the s\n",
      "Iteration: 9110, Epoch: 10, Batch: 569 out of 950, Loss: 1.59\n",
      "Iteration: 9120, Epoch: 10, Batch: 579 out of 950, Loss: 1.59\n",
      "Iteration: 9130, Epoch: 10, Batch: 589 out of 950, Loss: 1.64\n",
      "Iteration: 9140, Epoch: 10, Batch: 599 out of 950, Loss: 1.56\n",
      "Iteration: 9150, Epoch: 10, Batch: 609 out of 950, Loss: 1.58\n",
      "thou art more the see t\n",
      "to be or not to the see t\n",
      "wherefore art thou art the s\n",
      "Iteration: 9160, Epoch: 10, Batch: 619 out of 950, Loss: 1.64\n",
      "Iteration: 9170, Epoch: 10, Batch: 629 out of 950, Loss: 1.55\n",
      "Iteration: 9180, Epoch: 10, Batch: 639 out of 950, Loss: 1.69\n",
      "Iteration: 9190, Epoch: 10, Batch: 649 out of 950, Loss: 1.63\n",
      "Iteration: 9200, Epoch: 10, Batch: 659 out of 950, Loss: 1.60\n",
      "thou art more the stran\n",
      "to be or not to the stand\n",
      "wherefore art thou shall be \n",
      "Iteration: 9210, Epoch: 10, Batch: 669 out of 950, Loss: 1.56\n",
      "Iteration: 9220, Epoch: 10, Batch: 679 out of 950, Loss: 1.59\n",
      "Iteration: 9230, Epoch: 10, Batch: 689 out of 950, Loss: 1.59\n",
      "Iteration: 9240, Epoch: 10, Batch: 699 out of 950, Loss: 1.56\n",
      "Iteration: 9250, Epoch: 10, Batch: 709 out of 950, Loss: 1.58\n",
      "thou art more the stran\n",
      "to be or not to the stand\n",
      "wherefore art thou shall be \n",
      "Iteration: 9260, Epoch: 10, Batch: 719 out of 950, Loss: 1.57\n",
      "Iteration: 9270, Epoch: 10, Batch: 729 out of 950, Loss: 2.21\n",
      "Iteration: 9280, Epoch: 10, Batch: 739 out of 950, Loss: 1.65\n",
      "Iteration: 9290, Epoch: 10, Batch: 749 out of 950, Loss: 1.72\n",
      "Iteration: 9300, Epoch: 10, Batch: 759 out of 950, Loss: 1.69\n",
      "thou art more that i wi\n",
      "to be or not to the stran\n",
      "wherefore art thou art the s\n",
      "Iteration: 9310, Epoch: 10, Batch: 769 out of 950, Loss: 1.57\n",
      "Iteration: 9320, Epoch: 10, Batch: 779 out of 950, Loss: 1.57\n",
      "Iteration: 9330, Epoch: 10, Batch: 789 out of 950, Loss: 1.60\n",
      "Iteration: 9340, Epoch: 10, Batch: 799 out of 950, Loss: 1.56\n",
      "Iteration: 9350, Epoch: 10, Batch: 809 out of 950, Loss: 1.52\n",
      "thou art more the compa\n",
      "to be or not to the compa\n",
      "wherefore art thou art the c\n",
      "Iteration: 9360, Epoch: 10, Batch: 819 out of 950, Loss: 1.71\n",
      "Iteration: 9370, Epoch: 10, Batch: 829 out of 950, Loss: 1.50\n",
      "Iteration: 9380, Epoch: 10, Batch: 839 out of 950, Loss: 1.64\n",
      "Iteration: 9390, Epoch: 10, Batch: 849 out of 950, Loss: 1.64\n",
      "Iteration: 9400, Epoch: 10, Batch: 859 out of 950, Loss: 1.61\n",
      "thou art more that i wi\n",
      "to be or not to the stand\n",
      "wherefore art thou shall be \n",
      "Iteration: 9410, Epoch: 10, Batch: 869 out of 950, Loss: 1.58\n",
      "Iteration: 9420, Epoch: 10, Batch: 879 out of 950, Loss: 1.64\n",
      "Iteration: 9430, Epoch: 10, Batch: 889 out of 950, Loss: 1.63\n",
      "Iteration: 9440, Epoch: 10, Batch: 899 out of 950, Loss: 1.65\n",
      "Iteration: 9450, Epoch: 10, Batch: 909 out of 950, Loss: 1.72\n",
      "thou art more the stand\n",
      "to be or not to the stand\n",
      "wherefore art thou art the s\n",
      "Iteration: 9460, Epoch: 10, Batch: 919 out of 950, Loss: 1.58\n",
      "Iteration: 9470, Epoch: 10, Batch: 929 out of 950, Loss: 1.63\n",
      "Iteration: 9480, Epoch: 10, Batch: 939 out of 950, Loss: 1.56\n",
      "Iteration: 9490, Epoch: 10, Batch: 949 out of 950, Loss: 1.69\n"
     ]
    }
   ],
   "source": [
    "# Initialize all variables\n",
    "init = tf.global_variables_initializer()\n",
    "sess.run(init)\n",
    "\n",
    "# Train model\n",
    "train_loss = []\n",
    "iteration_count = 1\n",
    "for epoch in range(epochs):\n",
    "    # Shuffle word indices\n",
    "    random.shuffle(batches)\n",
    "    # Create targets from shuffled batches\n",
    "    targets = [np.roll(x, -1, axis=1) for x in batches]\n",
    "    # Run a through one epoch\n",
    "    print('Starting Epoch #{} of {}.'.format(epoch+1, epochs))\n",
    "    # Reset initial LSTM state every epoch\n",
    "    state = sess.run(lstm_model.initial_state)\n",
    "    for ix, batch in enumerate(batches):\n",
    "        training_dict = {lstm_model.x_data: batch, lstm_model.y_output: targets[ix]}\n",
    "        # We need to update initial state for each RNN cell:\n",
    "        for i, (c, h) in enumerate(lstm_model.initial_state):\n",
    "                    training_dict[c] = state[i].c\n",
    "                    training_dict[h] = state[i].h\n",
    "        \n",
    "        temp_loss, state, _ = sess.run([lstm_model.cost, lstm_model.final_state, lstm_model.train_op],\n",
    "                                       feed_dict=training_dict)\n",
    "        train_loss.append(temp_loss)\n",
    "        \n",
    "        # Print status every 10 gens\n",
    "        if iteration_count % 10 == 0:\n",
    "            summary_nums = (iteration_count, epoch+1, ix+1, num_batches+1, temp_loss)\n",
    "            print('Iteration: {}, Epoch: {}, Batch: {} out of {}, Loss: {:.2f}'.format(*summary_nums))\n",
    "        \n",
    "        # Save the model and the vocab\n",
    "        if iteration_count % save_every == 0:\n",
    "            # Save model\n",
    "            model_file_name = os.path.join(full_model_dir, 'model')\n",
    "            saver.save(sess, model_file_name, global_step = iteration_count)\n",
    "            print('Model Saved To: {}'.format(model_file_name))\n",
    "            # Save vocabulary\n",
    "            dictionary_file = os.path.join(full_model_dir, 'vocab.pkl')\n",
    "            with open(dictionary_file, 'wb') as dict_file_conn:\n",
    "                pickle.dump([vocab2ix, ix2vocab], dict_file_conn)\n",
    "        \n",
    "        if iteration_count % eval_every == 0:\n",
    "            for sample in prime_texts:\n",
    "                print(test_lstm_model.sample(sess, ix2vocab, vocab2ix, num=10, prime_text=sample))\n",
    "                \n",
    "        iteration_count += 1"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Plot loss over time"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "plt.plot(train_loss, 'k-')\n",
    "plt.title('Sequence to Sequence Loss')\n",
    "plt.xlabel('Generation')\n",
    "plt.ylabel('Loss')\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.1"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 0
}
