{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from brnn_model_fake_data import *\n",
    "import reader\n",
    "\n",
    "import subprocess\n",
    "import tensorflow as tf\n",
    "import import_folders\n",
    "import pickle_lib as pkl\n",
    "from graph_lib import gl"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "\"\"\"\n",
    "    Global variables\n",
    "\"\"\"\n",
    "model_type = \"test\"\n",
    "data_path = \"../data/\"\n",
    "save_path = \"./saved_model/\"\n",
    "global_prior_pi = 0.25\n",
    "global_log_sigma1 = -1.0\n",
    "global_log_sigma2 = -7.0\n",
    "global_random_seed = 12\n",
    "global_num_gpus = 0\n",
    "\n",
    "\n",
    "# Model can be \"test\", \"small\", \"medium\", \"large\"\n",
    "model_select = \"test\"\n",
    "model_type = model_select\n",
    "#Put the path to the data here\n",
    "dat_path = \"../data\"\n",
    "\n",
    "#Put the path to where you want to save the training data\n",
    "sav_path = \"tensorboard/\"\n",
    "\n",
    "# The mixing degree for the prior gaussian mixture\n",
    "# As in Fortunato they report scanning\n",
    "# mix_pi \\in { 1/4, 1/2, 3/4 }\n",
    "mixing_pi = 0.25\n",
    "\n",
    "# As in Fortunato they report scanning\n",
    "# log sigma1 \\in { 0, -1, -2 }\n",
    "# log sigma2 \\in { -6, -7, -8 }\n",
    "prior_log_sigma1 = -1.0\n",
    "prior_log_sigma2 = -7.0\n",
    "\n",
    "\n",
    "class SmallConfig(object):\n",
    "    \"\"\"Small config.\"\"\"\n",
    "    init_scale = 0.1\n",
    "    learning_rate = 1.0\n",
    "    max_grad_norm = 5\n",
    "    num_layers = 2\n",
    "    num_steps = 20\n",
    "    hidden_size = 200\n",
    "    max_epoch = 4\n",
    "    max_max_epoch = 13\n",
    "    keep_prob = 1.0\n",
    "    lr_decay = 0.5\n",
    "    \n",
    "    batch_size = 20\n",
    "    vocab_size = 10000\n",
    "    \n",
    "    X_dim = 200 # Size of the embedding\n",
    "\n",
    "class MediumConfig(object):\n",
    "    \"\"\"\n",
    "    Medium config.\n",
    "    Slightly modified according to email.\n",
    "    \"\"\"\n",
    "    init_scale = 0.05\n",
    "    learning_rate = 1.0\n",
    "    max_grad_norm = 5\n",
    "    num_layers = 2\n",
    "    num_steps = 35\n",
    "    hidden_size = 650\n",
    "    max_epoch = 20\n",
    "    max_max_epoch = 70\n",
    "    keep_prob = 1.0\n",
    "    lr_decay = 0.9\n",
    "    batch_size = 20\n",
    "    vocab_size = 10000\n",
    "\n",
    "    X_dim = 50 # Size of the embedding\n",
    "    \n",
    "class LargeConfig(object):\n",
    "    \"\"\"Large config.\"\"\"\n",
    "    init_scale = 0.04\n",
    "    learning_rate = 1.0\n",
    "    max_grad_norm = 10\n",
    "    num_layers = 2\n",
    "    num_steps = 35\n",
    "    hidden_size = 1500\n",
    "    max_epoch = 14\n",
    "    max_max_epoch = 55\n",
    "    keep_prob = 0.35\n",
    "    lr_decay = 1 / 1.15\n",
    "    batch_size = 20\n",
    "    vocab_size = 10000\n",
    "\n",
    "    X_dim = 100 # Size of the embedding\n",
    "    \n",
    "class TestConfig(object):\n",
    "    \"\"\"Tiny config, for testing.\"\"\"\n",
    "    init_scale = 0.1\n",
    "    learning_rate = 0.5\n",
    "    max_grad_norm = 1\n",
    "    num_layers = 2\n",
    "    num_steps = 20\n",
    "    hidden_size = 30\n",
    "    max_epoch = 1\n",
    "    max_max_epoch = 20\n",
    "    keep_prob = 1.0\n",
    "    lr_decay = 0.9\n",
    "    batch_size = 1\n",
    "    \n",
    "    vocab_size = 10000\n",
    "\n",
    "    X_dim = 19 # Size of the embedding\n",
    "\n",
    "\n",
    "#    global_random_seed = set_random_seed\n",
    "    \n",
    "def get_config():\n",
    "    \"\"\"Get model config.\"\"\"\n",
    "    if model_type == \"small\":\n",
    "        config = SmallConfig()\n",
    "    elif model_type == \"medium\":\n",
    "        config = MediumConfig()\n",
    "    elif model_type == \"large\":\n",
    "        config = LargeConfig()\n",
    "    elif model_type == \"test\":\n",
    "        config = TestConfig()\n",
    "    else:\n",
    "        raise ValueError(\"Invalid model: %s\", model_type)\n",
    "\n",
    "    print (\"Model Type\")\n",
    "    print (model_type)\n",
    "    config.prior_pi = global_prior_pi\n",
    "    config.log_sigma1 = global_log_sigma1\n",
    "    config.log_sigma2 = global_log_sigma2\n",
    "\n",
    "    return config"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "test\n",
      "Model Type\n",
      "test\n",
      "Model Type\n",
      "test\n",
      "Number of total initial chains 20000\n",
      "Dimensionality of chains (num_step,X_dim) (20, 1)\n"
     ]
    }
   ],
   "source": [
    "\n",
    "print (model_type)\n",
    "\n",
    "####### Global data reading #########\n",
    "Ndivisions = 10;\n",
    "folder_data = \"./data/artificial/\"\n",
    "\n",
    "X_list = pkl.load_pickle(folder_data +\"X_values.pkl\",Ndivisions)\n",
    "Y_list = pkl.load_pickle(folder_data +\"Y_values.pkl\",Ndivisions)\n",
    "\n",
    "num_steps, X_dim = X_list[0].shape\n",
    "num_chains = len(X_list)\n",
    "\n",
    "\n",
    "## Divide in train val and test\n",
    "proportion_tr = 0.8\n",
    "proportion_val = 0.1\n",
    "proportion_tst = 1 -( proportion_val + proportion_tr)\n",
    "\n",
    "num_tr = 10000\n",
    "num_val = 5000\n",
    "num_tst = 5000\n",
    "\n",
    "train_X = [X_list[i] for i in range(num_tr)]\n",
    "train_Y = [Y_list[i] for i in range(num_tr)]\n",
    "\n",
    "val_X = [X_list[i] for i in range(num_tr, num_tr + num_val)]\n",
    "val_Y = [Y_list[i] for i in range(num_tr, num_tr + num_val)]\n",
    "\n",
    "tst_X = [X_list[i] for i in range(num_tr + num_val,  num_tr + num_val + num_tst)]\n",
    "tst_Y = [Y_list[i] for i in range(num_tr + num_val,  num_tr + num_val + num_tst)]\n",
    "\n",
    "# Create the objects with the hyperparameters that will be fed to the network\n",
    "train_config = get_config()\n",
    "eval_config = get_config( )\n",
    "\n",
    "###### Over Set parameters #####\n",
    "train_config.X_dim  = X_dim\n",
    "eval_config.X_dim  = X_dim\n",
    "train_config.num_steps  = num_steps\n",
    "eval_config.num_steps  = num_steps\n",
    "\n",
    "train_config.vocab_size = 2\n",
    "eval_config.vocab_size= 2\n",
    "\n",
    "\n",
    "eval_config.batch_size = 2\n",
    "    \n",
    "#eval_config.num_steps = 1\n",
    "\n",
    "print (\"Number of total initial chains %i\"%len(X_list))\n",
    "print (\"Dimensionality of chains (num_step,X_dim)\",X_list[0].shape )\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Summary name KL Loss is illegal; using KL_Loss instead.\n",
      "INFO:tensorflow:Summary name Total Loss is illegal; using Total_Loss instead.\n",
      "Creating Validation model\n"
     ]
    }
   ],
   "source": [
    "\n",
    "#subprocess.Popen([\"tensorboard\",\"--logdir=tensorboard\"])\n",
    "\n",
    "with tf.Graph().as_default():\n",
    "    initializer = tf.random_uniform_initializer(-train_config.init_scale,\n",
    "                                                train_config.init_scale)\n",
    "\n",
    "    with tf.name_scope(\"Train\"):\n",
    "        train_input = BBB_LSTM_Artificial_Data_Input(batch_size = train_config.batch_size, \n",
    "                                                        X = train_X, Y = train_Y,  name=\"TrainInput\")\n",
    "        \n",
    "        with tf.variable_scope(\"Model\", reuse=None, initializer=initializer):\n",
    "            m = PTBModel(is_training=True, config=train_config, input_=train_input)\n",
    "        tf.summary.scalar(\"Training_Loss\", m.cost)\n",
    "        tf.summary.scalar(\"Learning_Rate\", m.lr)\n",
    "        tf.summary.scalar(\"KL Loss\", m.kl_loss)\n",
    "        tf.summary.scalar(\"Total Loss\", m.total_loss)\n",
    "\n",
    "    print (\"Creating Validation model\")\n",
    "    with tf.name_scope(\"Valid\"):\n",
    "        valid_input = BBB_LSTM_Artificial_Data_Input(batch_size = eval_config.batch_size, \n",
    "                                                            X = val_X, Y = val_Y,  name=\"ValidInput\")\n",
    "        \n",
    "        with tf.variable_scope(\"Model\", reuse=True, initializer=initializer):\n",
    "            mvalid = PTBModel(is_training=False, config=eval_config, input_=valid_input)\n",
    "        tf.summary.scalar(\"Validation_Loss\", mvalid.cost)\n",
    "\n",
    "    with tf.name_scope(\"Test\"):\n",
    "        test_input = BBB_LSTM_Artificial_Data_Input(batch_size = eval_config.batch_size, \n",
    "                                                            X = tst_X, Y = tst_Y,  name=\"TestInput\")\n",
    "            \n",
    "        with tf.variable_scope(\"Model\", reuse=True, initializer=initializer):\n",
    "            mtest = PTBModel(is_training=False, config=eval_config,\n",
    "                             input_=test_input)\n",
    "\n",
    "    models = {\"Train\": m, \"Valid\": mvalid, \"Test\": mtest}\n",
    "    for name, model in models.items():\n",
    "        model.export_ops(name)\n",
    "    metagraph = tf.train.export_meta_graph()\n",
    "    soft_placement = False\n",
    "    if global_num_gpus > 1:\n",
    "        soft_placement = True\n",
    "        util.auto_parallel(metagraph, m)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Starting standard services.\n",
      "INFO:tensorflow:Saving checkpoint to path ./saved_model/model.ckpt\n",
      "INFO:tensorflow:Starting queue runners.\n",
      "INFO:tensorflow:Model/global_step/sec: 0\n",
      "INFO:tensorflow:Recording summary at step 0.\n",
      "Epoch: 1 Learning rate: 0.500\n",
      "0.000 perplexity: 1.971 speed: 49 wps\n",
      "KL is 1.4222021102905273\n",
      "0.001 perplexity: 2.087 speed: 474 wps\n",
      "KL is 1.4242544174194336\n",
      "0.101 perplexity: 2.055 speed: 3602 wps\n",
      "KL is 1.4144656658172607\n",
      "0.201 perplexity: 2.050 speed: 3732 wps\n",
      "KL is 1.4184719324111938\n",
      "0.301 perplexity: 2.048 speed: 3769 wps\n",
      "KL is 1.4229850769042969\n",
      "0.401 perplexity: 2.047 speed: 3748 wps\n",
      "KL is 1.4321799278259277\n",
      "0.501 perplexity: 2.047 speed: 3769 wps\n",
      "KL is 1.4195882081985474\n",
      "0.601 perplexity: 2.047 speed: 3783 wps\n",
      "KL is 1.4349300861358643\n",
      "0.701 perplexity: 2.046 speed: 3627 wps\n",
      "KL is 1.435726284980774\n",
      "0.801 perplexity: 2.041 speed: 3513 wps\n",
      "KL is 1.4712722301483154\n",
      "0.901 perplexity: 2.033 speed: 3478 wps\n",
      "KL is 1.4894403219223022\n",
      "Epoch: 1 Train Perplexity: 2.024\n",
      "Epoch: 1 Valid Perplexity: 1.921\n",
      "Epoch: 2 Learning rate: 0.450\n",
      "0.000 perplexity: 1.854 speed: 4222 wps\n",
      "KL is 1.5140024423599243\n",
      "0.001 perplexity: 1.853 speed: 3753 wps\n",
      "KL is 1.529215693473816\n",
      "0.101 perplexity: 1.911 speed: 3125 wps\n",
      "KL is 1.5499837398529053\n",
      "0.201 perplexity: 1.906 speed: 3428 wps\n",
      "KL is 1.5674784183502197\n",
      "0.301 perplexity: 1.901 speed: 3596 wps\n",
      "KL is 1.5856537818908691\n",
      "0.401 perplexity: 1.899 speed: 3585 wps\n",
      "KL is 1.6086679697036743\n",
      "0.501 perplexity: 1.897 speed: 3503 wps\n",
      "KL is 1.6266306638717651\n",
      "0.601 perplexity: 1.894 speed: 3534 wps\n",
      "KL is 1.625685691833496\n",
      "0.701 perplexity: 1.892 speed: 3563 wps\n",
      "KL is 1.6494637727737427\n",
      "0.801 perplexity: 1.888 speed: 3576 wps\n",
      "KL is 1.6587979793548584\n",
      "0.901 perplexity: 1.886 speed: 3585 wps\n",
      "KL is 1.6565829515457153\n",
      "INFO:tensorflow:Recording summary at step 19522.\n",
      "INFO:tensorflow:Model/global_step/sec: 162.801\n",
      "Epoch: 2 Train Perplexity: 1.883\n",
      "Epoch: 2 Valid Perplexity: 1.872\n",
      "Epoch: 3 Learning rate: 0.405\n",
      "0.000 perplexity: 1.815 speed: 3502 wps\n",
      "KL is 1.680667757987976\n",
      "0.001 perplexity: 1.786 speed: 3501 wps\n",
      "KL is 1.6872317790985107\n",
      "0.101 perplexity: 1.852 speed: 3606 wps\n",
      "KL is 1.6942331790924072\n",
      "0.201 perplexity: 1.856 speed: 3676 wps\n",
      "KL is 1.690229058265686\n",
      "0.301 perplexity: 1.855 speed: 3648 wps\n",
      "KL is 1.712469458580017\n",
      "0.401 perplexity: 1.854 speed: 3596 wps\n",
      "KL is 1.7069933414459229\n",
      "0.501 perplexity: 1.853 speed: 3427 wps\n",
      "KL is 1.702115535736084\n",
      "0.601 perplexity: 1.855 speed: 3480 wps\n",
      "KL is 1.711639165878296\n",
      "0.701 perplexity: 1.854 speed: 3337 wps\n",
      "KL is 1.7156848907470703\n",
      "0.801 perplexity: 1.852 speed: 3309 wps\n",
      "KL is 1.7137510776519775\n",
      "0.901 perplexity: 1.852 speed: 3330 wps\n",
      "KL is 1.7088009119033813\n",
      "Epoch: 3 Train Perplexity: 1.851\n",
      "Epoch: 3 Valid Perplexity: 1.875\n",
      "Epoch: 4 Learning rate: 0.364\n",
      "0.000 perplexity: 1.990 speed: 2242 wps\n",
      "KL is 1.7156105041503906\n",
      "0.001 perplexity: 1.853 speed: 2925 wps\n",
      "KL is 1.7210228443145752\n",
      "0.101 perplexity: 1.837 speed: 3425 wps\n",
      "KL is 1.721038818359375\n",
      "0.201 perplexity: 1.836 speed: 3214 wps\n",
      "KL is 1.719315767288208\n",
      "0.301 perplexity: 1.838 speed: 3324 wps\n",
      "KL is 1.7098605632781982\n",
      "0.401 perplexity: 1.841 speed: 3354 wps\n",
      "KL is 1.7059358358383179\n",
      "0.501 perplexity: 1.840 speed: 3154 wps\n",
      "KL is 1.7196860313415527\n",
      "0.601 perplexity: 1.841 speed: 3185 wps\n",
      "KL is 1.7021501064300537\n",
      "INFO:tensorflow:Recording summary at step 36392.\n",
      "INFO:tensorflow:Model/global_step/sec: 140.616\n",
      "0.701 perplexity: 1.841 speed: 3272 wps\n",
      "KL is 1.699524164199829\n",
      "0.801 perplexity: 1.839 speed: 3315 wps\n",
      "KL is 1.702396273612976\n",
      "0.901 perplexity: 1.839 speed: 3346 wps\n",
      "KL is 1.7070536613464355\n",
      "Epoch: 4 Train Perplexity: 1.838\n",
      "Epoch: 4 Valid Perplexity: 1.853\n",
      "Epoch: 5 Learning rate: 0.328\n",
      "0.000 perplexity: 1.688 speed: 2418 wps\n",
      "KL is 1.7043378353118896\n",
      "0.001 perplexity: 1.781 speed: 3023 wps\n",
      "KL is 1.708009123802185\n",
      "0.101 perplexity: 1.822 speed: 2803 wps\n",
      "KL is 1.7025984525680542\n",
      "0.201 perplexity: 1.828 speed: 2607 wps\n",
      "KL is 1.6906155347824097\n",
      "0.301 perplexity: 1.827 speed: 2752 wps\n",
      "KL is 1.6922430992126465\n",
      "0.401 perplexity: 1.831 speed: 2635 wps\n",
      "KL is 1.6920500993728638\n",
      "0.501 perplexity: 1.830 speed: 2769 wps\n",
      "KL is 1.6881593465805054\n",
      "0.601 perplexity: 1.831 speed: 2901 wps\n",
      "KL is 1.6919786930084229\n",
      "0.701 perplexity: 1.831 speed: 2861 wps\n",
      "KL is 1.678871989250183\n",
      "0.801 perplexity: 1.829 speed: 2897 wps\n",
      "KL is 1.6901569366455078\n",
      "0.901 perplexity: 1.829 speed: 2951 wps\n",
      "KL is 1.6722724437713623\n",
      "Epoch: 5 Train Perplexity: 1.829\n",
      "Epoch: 5 Valid Perplexity: 1.867\n",
      "Epoch: 6 Learning rate: 0.295\n",
      "0.000 perplexity: 1.693 speed: 2681 wps\n",
      "KL is 1.6651054620742798\n",
      "0.001 perplexity: 1.732 speed: 3509 wps\n",
      "KL is 1.6775048971176147\n",
      "0.101 perplexity: 1.815 speed: 3664 wps\n",
      "KL is 1.6650137901306152\n",
      "0.201 perplexity: 1.820 speed: 3680 wps\n",
      "KL is 1.664462685585022\n",
      "INFO:tensorflow:Recording summary at step 52443.\n",
      "INFO:tensorflow:Model/global_step/sec: 133.733\n",
      "0.301 perplexity: 1.820 speed: 3335 wps\n",
      "KL is 1.6787337064743042\n",
      "0.401 perplexity: 1.822 speed: 3287 wps\n",
      "KL is 1.6485443115234375\n",
      "0.501 perplexity: 1.821 speed: 3255 wps\n",
      "KL is 1.6426115036010742\n",
      "0.601 perplexity: 1.821 speed: 3263 wps\n",
      "KL is 1.666681170463562\n",
      "0.701 perplexity: 1.822 speed: 3231 wps\n",
      "KL is 1.6625182628631592\n",
      "0.801 perplexity: 1.820 speed: 3136 wps\n",
      "KL is 1.663061261177063\n",
      "0.901 perplexity: 1.820 speed: 3138 wps\n",
      "KL is 1.6592910289764404\n",
      "Epoch: 6 Train Perplexity: 1.820\n",
      "Epoch: 6 Valid Perplexity: 1.812\n",
      "Epoch: 7 Learning rate: 0.266\n",
      "0.000 perplexity: 1.801 speed: 2354 wps\n",
      "KL is 1.652345061302185\n",
      "0.001 perplexity: 1.699 speed: 3269 wps\n",
      "KL is 1.646584391593933\n",
      "0.101 perplexity: 1.805 speed: 3367 wps\n",
      "KL is 1.6542999744415283\n",
      "0.201 perplexity: 1.811 speed: 3016 wps\n",
      "KL is 1.6482776403427124\n",
      "0.301 perplexity: 1.813 speed: 3191 wps\n",
      "KL is 1.6396591663360596\n",
      "0.401 perplexity: 1.815 speed: 3257 wps\n",
      "KL is 1.6310280561447144\n",
      "0.501 perplexity: 1.813 speed: 3349 wps\n",
      "KL is 1.6360691785812378\n",
      "0.601 perplexity: 1.814 speed: 3400 wps\n",
      "KL is 1.6451135873794556\n",
      "0.701 perplexity: 1.814 speed: 3270 wps\n",
      "KL is 1.6380776166915894\n",
      "0.801 perplexity: 1.813 speed: 3309 wps\n",
      "KL is 1.6221919059753418\n",
      "0.901 perplexity: 1.813 speed: 3333 wps\n",
      "KL is 1.6232409477233887\n",
      "INFO:tensorflow:Recording summary at step 69741.\n",
      "INFO:tensorflow:Model/global_step/sec: 144.134\n",
      "Epoch: 7 Train Perplexity: 1.813\n",
      "Epoch: 7 Valid Perplexity: 1.812\n",
      "Epoch: 8 Learning rate: 0.239\n",
      "0.000 perplexity: 1.542 speed: 3399 wps\n",
      "KL is 1.6345281600952148\n",
      "0.001 perplexity: 1.749 speed: 4263 wps\n",
      "KL is 1.6327731609344482\n",
      "0.101 perplexity: 1.804 speed: 3754 wps\n",
      "KL is 1.6134368181228638\n",
      "0.201 perplexity: 1.808 speed: 3136 wps\n",
      "KL is 1.6054315567016602\n",
      "0.301 perplexity: 1.810 speed: 3277 wps\n",
      "KL is 1.61784827709198\n",
      "0.401 perplexity: 1.812 speed: 3324 wps\n",
      "KL is 1.6140201091766357\n",
      "0.501 perplexity: 1.811 speed: 3357 wps\n",
      "KL is 1.6032745838165283\n",
      "0.601 perplexity: 1.812 speed: 3366 wps\n",
      "KL is 1.5971702337265015\n",
      "0.701 perplexity: 1.812 speed: 3390 wps\n",
      "KL is 1.586925745010376\n",
      "0.801 perplexity: 1.810 speed: 3381 wps\n",
      "KL is 1.6171016693115234\n",
      "0.901 perplexity: 1.810 speed: 3394 wps\n",
      "KL is 1.590042233467102\n",
      "Epoch: 8 Train Perplexity: 1.809\n",
      "Epoch: 8 Valid Perplexity: 1.802\n",
      "Epoch: 9 Learning rate: 0.215\n",
      "0.000 perplexity: 1.487 speed: 2645 wps\n",
      "KL is 1.598605990409851\n",
      "0.001 perplexity: 1.732 speed: 3583 wps\n",
      "KL is 1.6112425327301025\n",
      "0.101 perplexity: 1.800 speed: 3653 wps\n",
      "KL is 1.583457589149475\n",
      "0.201 perplexity: 1.801 speed: 3059 wps\n",
      "KL is 1.6074491739273071\n",
      "0.301 perplexity: 1.803 speed: 3196 wps\n",
      "KL is 1.5851918458938599\n",
      "0.401 perplexity: 1.807 speed: 3241 wps\n",
      "KL is 1.5800977945327759\n",
      "0.501 perplexity: 1.805 speed: 3139 wps\n",
      "KL is 1.5773462057113647\n",
      "INFO:tensorflow:Saving checkpoint to path ./saved_model/model.ckpt\n",
      "INFO:tensorflow:Recording summary at step 85997.\n",
      "INFO:tensorflow:Model/global_step/sec: 135.43\n",
      "0.601 perplexity: 1.805 speed: 2951 wps\n",
      "KL is 1.5802961587905884\n",
      "0.701 perplexity: 1.805 speed: 2989 wps\n",
      "KL is 1.5733706951141357\n",
      "0.801 perplexity: 1.803 speed: 3036 wps\n",
      "KL is 1.5779595375061035\n",
      "0.901 perplexity: 1.803 speed: 3067 wps\n",
      "KL is 1.5639506578445435\n",
      "Epoch: 9 Train Perplexity: 1.802\n",
      "Epoch: 9 Valid Perplexity: 1.800\n",
      "Epoch: 10 Learning rate: 0.194\n",
      "0.000 perplexity: 1.771 speed: 3984 wps\n",
      "KL is 1.5693600177764893\n",
      "0.001 perplexity: 1.782 speed: 3779 wps\n",
      "KL is 1.5826035737991333\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.101 perplexity: 1.791 speed: 3142 wps\n",
      "KL is 1.5498507022857666\n",
      "0.201 perplexity: 1.797 speed: 2777 wps\n",
      "KL is 1.5596697330474854\n",
      "0.301 perplexity: 1.797 speed: 2882 wps\n",
      "KL is 1.556118130683899\n",
      "0.401 perplexity: 1.801 speed: 3050 wps\n",
      "KL is 1.5615850687026978\n",
      "0.501 perplexity: 1.800 speed: 2992 wps\n",
      "KL is 1.540049433708191\n",
      "0.601 perplexity: 1.800 speed: 3079 wps\n",
      "KL is 1.5501033067703247\n",
      "0.701 perplexity: 1.800 speed: 3140 wps\n",
      "KL is 1.5487414598464966\n",
      "0.801 perplexity: 1.798 speed: 3131 wps\n",
      "KL is 1.559279441833496\n",
      "0.901 perplexity: 1.799 speed: 3180 wps\n",
      "KL is 1.5412113666534424\n",
      "Epoch: 10 Train Perplexity: 1.799\n",
      "Epoch: 10 Valid Perplexity: 1.796\n",
      "Epoch: 11 Learning rate: 0.174\n",
      "0.000 perplexity: 1.778 speed: 2799 wps\n",
      "KL is 1.5336109399795532\n",
      "0.001 perplexity: 1.809 speed: 3509 wps\n",
      "KL is 1.541611671447754\n",
      "0.101 perplexity: 1.788 speed: 3755 wps\n",
      "KL is 1.5433015823364258\n",
      "0.201 perplexity: 1.792 speed: 3760 wps\n",
      "KL is 1.5269759893417358\n",
      "INFO:tensorflow:Recording summary at step 102684.\n",
      "INFO:tensorflow:Model/global_step/sec: 139.112\n",
      "0.301 perplexity: 1.794 speed: 3634 wps\n",
      "KL is 1.5310075283050537\n",
      "0.401 perplexity: 1.797 speed: 3340 wps\n",
      "KL is 1.5325039625167847\n",
      "0.501 perplexity: 1.795 speed: 3380 wps\n",
      "KL is 1.542006254196167\n",
      "0.601 perplexity: 1.795 speed: 3385 wps\n",
      "KL is 1.5214600563049316\n",
      "0.701 perplexity: 1.795 speed: 3423 wps\n",
      "KL is 1.5271644592285156\n",
      "0.801 perplexity: 1.793 speed: 3338 wps\n",
      "KL is 1.5387239456176758\n",
      "0.901 perplexity: 1.794 speed: 3359 wps\n",
      "KL is 1.5435314178466797\n",
      "Epoch: 11 Train Perplexity: 1.794\n",
      "Epoch: 11 Valid Perplexity: 1.793\n",
      "Epoch: 12 Learning rate: 0.157\n",
      "0.000 perplexity: 1.824 speed: 3833 wps\n",
      "KL is 1.5118328332901\n",
      "0.001 perplexity: 1.748 speed: 3984 wps\n",
      "KL is 1.5211838483810425\n",
      "0.101 perplexity: 1.787 speed: 3945 wps\n",
      "KL is 1.5197664499282837\n",
      "0.201 perplexity: 1.792 speed: 3943 wps\n",
      "KL is 1.5063930749893188\n",
      "0.301 perplexity: 1.793 speed: 3935 wps\n",
      "KL is 1.526502251625061\n",
      "0.401 perplexity: 1.796 speed: 3941 wps\n",
      "KL is 1.5075392723083496\n",
      "0.501 perplexity: 1.796 speed: 3925 wps\n",
      "KL is 1.521519660949707\n",
      "0.601 perplexity: 1.796 speed: 3912 wps\n",
      "KL is 1.5082571506500244\n",
      "0.701 perplexity: 1.796 speed: 3837 wps\n",
      "KL is 1.5110722780227661\n",
      "0.801 perplexity: 1.793 speed: 3784 wps\n",
      "KL is 1.5172605514526367\n",
      "0.901 perplexity: 1.793 speed: 3748 wps\n",
      "KL is 1.5013352632522583\n",
      "Epoch: 12 Train Perplexity: 1.793\n",
      "Epoch: 12 Valid Perplexity: 1.786\n",
      "Epoch: 13 Learning rate: 0.141\n",
      "0.000 perplexity: 1.769 speed: 3188 wps\n",
      "KL is 1.506024956703186\n",
      "0.001 perplexity: 1.770 speed: 3883 wps\n",
      "KL is 1.5087473392486572\n",
      "INFO:tensorflow:Recording summary at step 120723.\n",
      "INFO:tensorflow:Model/global_step/sec: 150.35\n",
      "0.101 perplexity: 1.784 speed: 3685 wps\n",
      "KL is 1.4991003274917603\n",
      "0.201 perplexity: 1.787 speed: 3759 wps\n",
      "KL is 1.5057188272476196\n",
      "0.301 perplexity: 1.789 speed: 3840 wps\n",
      "KL is 1.5043659210205078\n",
      "0.401 perplexity: 1.792 speed: 3845 wps\n",
      "KL is 1.4952123165130615\n",
      "0.501 perplexity: 1.791 speed: 3880 wps\n",
      "KL is 1.5012686252593994\n",
      "0.601 perplexity: 1.791 speed: 3856 wps\n",
      "KL is 1.4947073459625244\n",
      "0.701 perplexity: 1.792 speed: 3622 wps\n",
      "KL is 1.494767189025879\n",
      "0.801 perplexity: 1.789 speed: 3567 wps\n",
      "KL is 1.493407130241394\n",
      "0.901 perplexity: 1.789 speed: 3555 wps\n",
      "KL is 1.4925507307052612\n",
      "Epoch: 13 Train Perplexity: 1.789\n",
      "Epoch: 13 Valid Perplexity: 1.782\n",
      "Epoch: 14 Learning rate: 0.127\n",
      "0.000 perplexity: 1.576 speed: 2932 wps\n",
      "KL is 1.4881618022918701\n",
      "0.001 perplexity: 1.749 speed: 3373 wps\n",
      "KL is 1.4911816120147705\n",
      "0.101 perplexity: 1.775 speed: 3776 wps\n",
      "KL is 1.4872733354568481\n",
      "0.201 perplexity: 1.783 speed: 3627 wps\n",
      "KL is 1.4951531887054443\n",
      "0.301 perplexity: 1.785 speed: 3133 wps\n",
      "KL is 1.4892266988754272\n",
      "0.401 perplexity: 1.788 speed: 3260 wps\n",
      "KL is 1.4947978258132935\n",
      "0.501 perplexity: 1.787 speed: 3314 wps\n",
      "KL is 1.4822386503219604\n",
      "0.601 perplexity: 1.787 speed: 3187 wps\n",
      "KL is 1.5000720024108887\n",
      "0.701 perplexity: 1.787 speed: 3198 wps\n",
      "KL is 1.4796432256698608\n",
      "0.801 perplexity: 1.786 speed: 3205 wps\n",
      "KL is 1.4816787242889404\n",
      "0.901 perplexity: 1.786 speed: 3251 wps\n",
      "KL is 1.4872188568115234\n",
      "INFO:tensorflow:Recording summary at step 139483.\n",
      "INFO:tensorflow:Model/global_step/sec: 156.325\n",
      "Epoch: 14 Train Perplexity: 1.786\n",
      "Epoch: 14 Valid Perplexity: 1.787\n",
      "Epoch: 15 Learning rate: 0.114\n",
      "0.000 perplexity: 2.389 speed: 3691 wps\n",
      "KL is 1.4843506813049316\n",
      "0.001 perplexity: 1.861 speed: 3962 wps\n",
      "KL is 1.4577885866165161\n",
      "0.101 perplexity: 1.777 speed: 3544 wps\n",
      "KL is 1.474022388458252\n",
      "0.201 perplexity: 1.781 speed: 3424 wps\n",
      "KL is 1.4703000783920288\n",
      "0.301 perplexity: 1.784 speed: 3429 wps\n",
      "KL is 1.4641871452331543\n",
      "0.401 perplexity: 1.787 speed: 3223 wps\n",
      "KL is 1.4728755950927734\n",
      "0.501 perplexity: 1.787 speed: 3021 wps\n",
      "KL is 1.474019169807434\n",
      "0.601 perplexity: 1.787 speed: 3036 wps\n",
      "KL is 1.4620168209075928\n",
      "0.701 perplexity: 1.787 speed: 3035 wps\n",
      "KL is 1.4698801040649414\n",
      "0.801 perplexity: 1.785 speed: 3022 wps\n",
      "KL is 1.446463704109192\n",
      "0.901 perplexity: 1.785 speed: 2931 wps\n",
      "KL is 1.4683396816253662\n",
      "Epoch: 15 Train Perplexity: 1.785\n",
      "Epoch: 15 Valid Perplexity: 1.782\n",
      "Epoch: 16 Learning rate: 0.103\n",
      "0.000 perplexity: 2.396 speed: 3042 wps\n",
      "KL is 1.4540549516677856\n",
      "0.001 perplexity: 1.798 speed: 3931 wps\n",
      "KL is 1.4591156244277954\n",
      "0.101 perplexity: 1.778 speed: 4061 wps\n",
      "KL is 1.4539653062820435\n",
      "0.201 perplexity: 1.779 speed: 4019 wps\n",
      "KL is 1.4665213823318481\n",
      "0.301 perplexity: 1.780 speed: 4007 wps\n",
      "KL is 1.4499561786651611\n",
      "0.401 perplexity: 1.783 speed: 4005 wps\n",
      "KL is 1.456613302230835\n",
      "0.501 perplexity: 1.783 speed: 3938 wps\n",
      "KL is 1.4607182741165161\n",
      "INFO:tensorflow:Recording summary at step 155763.\n",
      "INFO:tensorflow:Model/global_step/sec: 135.65\n",
      "0.601 perplexity: 1.784 speed: 3708 wps\n",
      "KL is 1.4595915079116821\n",
      "0.701 perplexity: 1.784 speed: 3733 wps\n",
      "KL is 1.45586097240448\n",
      "0.801 perplexity: 1.781 speed: 3762 wps\n",
      "KL is 1.4453965425491333\n",
      "0.901 perplexity: 1.781 speed: 3639 wps\n",
      "KL is 1.4501416683197021\n",
      "Epoch: 16 Train Perplexity: 1.781\n",
      "Epoch: 16 Valid Perplexity: 1.776\n",
      "Epoch: 17 Learning rate: 0.093\n",
      "0.000 perplexity: 1.631 speed: 3960 wps\n",
      "KL is 1.4471323490142822\n",
      "0.001 perplexity: 1.790 speed: 4200 wps\n",
      "KL is 1.442384958267212\n",
      "0.101 perplexity: 1.771 speed: 4081 wps\n",
      "KL is 1.4457240104675293\n",
      "0.201 perplexity: 1.773 speed: 4068 wps\n",
      "KL is 1.4482824802398682\n",
      "0.301 perplexity: 1.776 speed: 4045 wps\n",
      "KL is 1.4434070587158203\n",
      "0.401 perplexity: 1.780 speed: 4056 wps\n",
      "KL is 1.4379708766937256\n",
      "0.501 perplexity: 1.780 speed: 4056 wps\n",
      "KL is 1.4358932971954346\n",
      "0.601 perplexity: 1.780 speed: 4041 wps\n",
      "KL is 1.4561794996261597\n",
      "0.701 perplexity: 1.780 speed: 4042 wps\n",
      "KL is 1.440863013267517\n",
      "0.801 perplexity: 1.778 speed: 3846 wps\n",
      "KL is 1.4321036338806152\n",
      "0.901 perplexity: 1.780 speed: 3827 wps\n",
      "KL is 1.443294644355774\n",
      "Epoch: 17 Train Perplexity: 1.780\n",
      "Epoch: 17 Valid Perplexity: 1.780\n",
      "Epoch: 18 Learning rate: 0.083\n",
      "0.000 perplexity: 1.558 speed: 2679 wps\n",
      "KL is 1.4343363046646118\n",
      "0.001 perplexity: 1.707 speed: 3721 wps\n",
      "KL is 1.4451628923416138\n",
      "0.101 perplexity: 1.771 speed: 3981 wps\n",
      "KL is 1.4498341083526611\n",
      "0.201 perplexity: 1.772 speed: 3358 wps\n",
      "KL is 1.43850839138031\n",
      "0.301 perplexity: 1.775 speed: 3445 wps\n",
      "KL is 1.4221112728118896\n",
      "0.401 perplexity: 1.780 speed: 3570 wps\n",
      "KL is 1.4326741695404053\n",
      "INFO:tensorflow:Saving checkpoint to path ./saved_model/model.ckpt\n",
      "INFO:tensorflow:Recording summary at step 174741.\n",
      "0.501 perplexity: 1.779 speed: 3583 wps\n",
      "KL is 1.4332280158996582\n",
      "0.601 perplexity: 1.780 speed: 3650 wps\n",
      "KL is 1.4360591173171997\n",
      "0.701 perplexity: 1.780 speed: 3690 wps\n",
      "KL is 1.4475818872451782\n",
      "0.801 perplexity: 1.777 speed: 3724 wps\n",
      "KL is 1.4212045669555664\n",
      "0.901 perplexity: 1.778 speed: 3593 wps\n",
      "KL is 1.4219999313354492\n",
      "Epoch: 18 Train Perplexity: 1.778\n",
      "Epoch: 18 Valid Perplexity: 1.773\n",
      "Epoch: 19 Learning rate: 0.075\n",
      "0.000 perplexity: 1.950 speed: 2795 wps\n",
      "KL is 1.4215576648712158\n",
      "0.001 perplexity: 1.780 speed: 4058 wps\n",
      "KL is 1.4169172048568726\n",
      "0.101 perplexity: 1.764 speed: 4059 wps\n",
      "KL is 1.4251792430877686\n",
      "0.201 perplexity: 1.773 speed: 4055 wps\n",
      "KL is 1.4299625158309937\n",
      "0.301 perplexity: 1.776 speed: 4027 wps\n",
      "KL is 1.423761010169983\n",
      "0.401 perplexity: 1.779 speed: 3640 wps\n",
      "KL is 1.4216715097427368\n",
      "0.501 perplexity: 1.778 speed: 3670 wps\n",
      "KL is 1.4183294773101807\n",
      "0.601 perplexity: 1.778 speed: 3733 wps\n",
      "KL is 1.428529977798462\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.701 perplexity: 1.778 speed: 3771 wps\n",
      "KL is 1.4042292833328247\n",
      "0.801 perplexity: 1.775 speed: 3802 wps\n",
      "KL is 1.4130198955535889\n",
      "0.901 perplexity: 1.776 speed: 3664 wps\n",
      "KL is 1.4213001728057861\n",
      "Epoch: 19 Train Perplexity: 1.775\n",
      "Epoch: 19 Valid Perplexity: 1.776\n",
      "Epoch: 20 Learning rate: 0.068\n",
      "0.000 perplexity: 2.040 speed: 3383 wps\n",
      "KL is 1.414412498474121\n",
      "0.001 perplexity: 1.751 speed: 3786 wps\n",
      "KL is 1.4192227125167847\n",
      "0.101 perplexity: 1.767 speed: 4081 wps\n",
      "KL is 1.416638731956482\n",
      "0.201 perplexity: 1.771 speed: 4037 wps\n",
      "KL is 1.4229174852371216\n",
      "0.301 perplexity: 1.774 speed: 4015 wps\n",
      "KL is 1.4149538278579712\n",
      "INFO:tensorflow:Recording summary at step 193497.\n",
      "0.401 perplexity: 1.776 speed: 3644 wps\n",
      "KL is 1.413296103477478\n",
      "0.501 perplexity: 1.776 speed: 3656 wps\n",
      "KL is 1.4103249311447144\n",
      "0.601 perplexity: 1.775 speed: 3715 wps\n",
      "KL is 1.414872407913208\n",
      "0.701 perplexity: 1.776 speed: 3745 wps\n",
      "KL is 1.4087764024734497\n",
      "0.801 perplexity: 1.773 speed: 3779 wps\n",
      "KL is 1.411034107208252\n",
      "0.901 perplexity: 1.774 speed: 3652 wps\n",
      "KL is 1.394631028175354\n",
      "Epoch: 20 Train Perplexity: 1.774\n",
      "Epoch: 20 Valid Perplexity: 1.781\n",
      "Test Perplexity: 1.774\n",
      "Saving model to ./saved_model/.\n"
     ]
    }
   ],
   "source": [
    "\n",
    "## Training !\n",
    "with tf.Graph().as_default():\n",
    "    tf.train.import_meta_graph(metagraph)\n",
    "    for model in models.values():\n",
    "        model.import_ops()\n",
    "    sv = tf.train.Supervisor(logdir=save_path)\n",
    "    config_proto = tf.ConfigProto(allow_soft_placement=soft_placement)\n",
    "    with sv.managed_session(config=config_proto) as session:\n",
    "\n",
    "        for i in range(train_config.max_max_epoch):\n",
    "            lr_decay = train_config.lr_decay ** max(i + 1 - train_config.max_epoch, 0.0)\n",
    "            m.assign_lr(session, train_config.learning_rate * lr_decay)\n",
    "\n",
    "            print(\"Epoch: %d Learning rate: %.3f\" % (i + 1, session.run(m.lr)))\n",
    "            train_perplexity = run_epoch(session, m, eval_op=m.train_op,\n",
    "                                         verbose=True)\n",
    "            print(\"Epoch: %d Train Perplexity: %.3f\" % (i + 1, train_perplexity))\n",
    "            valid_perplexity = run_epoch(session, mvalid)\n",
    "            print(\"Epoch: %d Valid Perplexity: %.3f\" % (i + 1, valid_perplexity))\n",
    "            \n",
    "        test_perplexity = run_epoch(session, mtest)\n",
    "        print(\"Test Perplexity: %.3f\" % test_perplexity)\n",
    "        \n",
    "\n",
    "        print(\"Saving model to %s.\" % save_path)\n",
    "        sv.saver.save(session, save_path, global_step=sv.global_step)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Testing\n",
      "INFO:tensorflow:Restoring parameters from ./saved_model/-200000\n",
      "INFO:tensorflow:Starting standard services.\n",
      "INFO:tensorflow:Saving checkpoint to path ./saved_model/model.ckpt\n",
      "INFO:tensorflow:Starting queue runners.\n",
      "INFO:tensorflow:Recording summary at step 200000.\n",
      "Test Perplexity: 1.774\n",
      "----------------------------------------------------------------\n",
      "------------------ Prediction of Output ---------------------\n",
      "Computing batch 0/2500\n",
      "Computing batch 1/2500\n",
      "Computing batch 2/2500\n",
      "Computing batch 3/2500\n",
      "Computing batch 4/2500\n",
      "Computing batch 5/2500\n",
      "Computing batch 6/2500\n",
      "Computing batch 7/2500\n",
      "Computing batch 8/2500\n",
      "Computing batch 9/2500\n",
      "Computing batch 10/2500\n",
      "Computing batch 11/2500\n",
      "Computing batch 12/2500\n",
      "Computing batch 13/2500\n",
      "Computing batch 14/2500\n",
      "Computing batch 15/2500\n",
      "Computing batch 16/2500\n",
      "Computing batch 17/2500\n",
      "Computing batch 18/2500\n",
      "Computing batch 19/2500\n",
      "Computing batch 20/2500\n",
      "Computing batch 21/2500\n",
      "Computing batch 22/2500\n",
      "Computing batch 23/2500\n",
      "Computing batch 24/2500\n",
      "Computing batch 25/2500\n",
      "Computing batch 26/2500\n",
      "Computing batch 27/2500\n",
      "Computing batch 28/2500\n",
      "Computing batch 29/2500\n",
      "Computing batch 30/2500\n",
      "Computing batch 31/2500\n",
      "Computing batch 32/2500\n",
      "Computing batch 33/2500\n",
      "Computing batch 34/2500\n",
      "Computing batch 35/2500\n",
      "Computing batch 36/2500\n",
      "Computing batch 37/2500\n",
      "Computing batch 38/2500\n",
      "Computing batch 39/2500\n",
      "Computing batch 40/2500\n",
      "Computing batch 41/2500\n",
      "Computing batch 42/2500\n",
      "Computing batch 43/2500\n",
      "Computing batch 44/2500\n",
      "Computing batch 45/2500\n",
      "Computing batch 46/2500\n",
      "Computing batch 47/2500\n",
      "Computing batch 48/2500\n",
      "Computing batch 49/2500\n",
      "Computing batch 50/2500\n",
      "Computing batch 51/2500\n",
      "Computing batch 52/2500\n",
      "Computing batch 53/2500\n",
      "Computing batch 54/2500\n",
      "Computing batch 55/2500\n",
      "Computing batch 56/2500\n",
      "Computing batch 57/2500\n",
      "Computing batch 58/2500\n",
      "Computing batch 59/2500\n",
      "Computing batch 60/2500\n",
      "Computing batch 61/2500\n",
      "Computing batch 62/2500\n",
      "Computing batch 63/2500\n",
      "Computing batch 64/2500\n",
      "Computing batch 65/2500\n",
      "Computing batch 66/2500\n",
      "Computing batch 67/2500\n",
      "Computing batch 68/2500\n",
      "Computing batch 69/2500\n",
      "Computing batch 70/2500\n",
      "Computing batch 71/2500\n",
      "Computing batch 72/2500\n",
      "Computing batch 73/2500\n",
      "Computing batch 74/2500\n",
      "Computing batch 75/2500\n",
      "Computing batch 76/2500\n",
      "Computing batch 77/2500\n",
      "Computing batch 78/2500\n",
      "Computing batch 79/2500\n",
      "Computing batch 80/2500\n",
      "Computing batch 81/2500\n",
      "Computing batch 82/2500\n",
      "Computing batch 83/2500\n",
      "Computing batch 84/2500\n",
      "Computing batch 85/2500\n",
      "Computing batch 86/2500\n",
      "Computing batch 87/2500\n",
      "Computing batch 88/2500\n",
      "Computing batch 89/2500\n",
      "Computing batch 90/2500\n",
      "Computing batch 91/2500\n",
      "Computing batch 92/2500\n",
      "Computing batch 93/2500\n",
      "Computing batch 94/2500\n",
      "Computing batch 95/2500\n",
      "Computing batch 96/2500\n",
      "Computing batch 97/2500\n",
      "Computing batch 98/2500\n",
      "Computing batch 99/2500\n",
      "Computing batch 100/2500\n"
     ]
    }
   ],
   "source": [
    "## Testing\n",
    "print (\"Testing\")\n",
    "predicted = []   # Variable to store predictions\n",
    "with tf.Graph().as_default():\n",
    "    tf.train.import_meta_graph(metagraph)\n",
    "    for model in models.values():\n",
    "        model.import_ops()\n",
    "    sv = tf.train.Supervisor(logdir=save_path)\n",
    "    config_proto = tf.ConfigProto(allow_soft_placement=soft_placement)\n",
    "    with sv.managed_session(config=config_proto) as session:\n",
    "        \n",
    "       # session = tf.Session()\n",
    "    \n",
    "        test_perplexity = run_epoch(session, mtest)\n",
    "        print(\"Test Perplexity: %.3f\" % test_perplexity)\n",
    "\n",
    "        print (\"----------------------------------------------------------------\")\n",
    "        print (\"------------------ Prediction of Output ---------------------\")\n",
    "\n",
    "       #  inputs, predicted = fetch_output(session, mtest)\n",
    "\n",
    "        costs = 0.0\n",
    "        state = session.run(model.initial_state)\n",
    "\n",
    "        inputs = []\n",
    "        outputs = []\n",
    "        targets = []\n",
    "        fetches = {\n",
    "            \"final_state\": model.final_state,\n",
    "            \"output\": model.output,\n",
    "            \"input\": model.input_data,\n",
    "            \"targets\": model.targets\n",
    "        }\n",
    "\n",
    "        for step in range(model.input.epoch_size):\n",
    "            feed_dict = {}\n",
    "            for i, (c, h) in enumerate(model.initial_state):\n",
    "                feed_dict[c] = state[i].c\n",
    "                feed_dict[h] = state[i].h\n",
    "\n",
    "            print (\"Computing batch %i/%i\"%(step, model.input.epoch_size))\n",
    "            vals = session.run(fetches, feed_dict)\n",
    "            state = vals[\"final_state\"]\n",
    "            output = vals[\"output\"]\n",
    "            input_i = vals[\"input\"]\n",
    "            \n",
    "            outputs.append(output)\n",
    "            inputs.append(input_i)\n",
    "            targets.append(vals[\"targets\"])\n",
    "            if (step == 100):\n",
    "                break;\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Input and output of the first chain of the first batch\n",
      "[[ 0.78568113]\n",
      " [ 0.3411822 ]\n",
      " [ 0.69928485]\n",
      " [ 0.37534976]\n",
      " [ 0.68023551]\n",
      " [ 0.59758115]\n",
      " [ 0.42770228]\n",
      " [ 0.50533962]\n",
      " [ 0.16082796]\n",
      " [ 0.39151707]\n",
      " [ 0.11470659]\n",
      " [ 0.35381809]\n",
      " [ 0.37184376]\n",
      " [ 0.44925529]\n",
      " [ 0.43924159]\n",
      " [ 0.3247906 ]\n",
      " [-0.12229992]\n",
      " [-0.24622317]\n",
      " [-0.15712306]\n",
      " [ 0.05666054]]\n",
      "Soft Outputs\n",
      "[[ 0.57892776  0.42107227]\n",
      " [ 0.85956663  0.14043331]\n",
      " [ 0.9392904   0.06070961]\n",
      " [ 0.95536703  0.04463298]\n",
      " [ 0.94723648  0.0527635 ]\n",
      " [ 0.84963983  0.15036012]\n",
      " [ 0.45057261  0.54942733]\n",
      " [ 0.15611783  0.84388214]\n",
      " [ 0.24667032  0.75332969]\n",
      " [ 0.49709234  0.50290769]\n",
      " [ 0.7880637   0.21193631]\n",
      " [ 0.84090334  0.15909669]\n",
      " [ 0.80555278  0.19444714]\n",
      " [ 0.68619329  0.31380671]\n",
      " [ 0.64310175  0.35689825]\n",
      " [ 0.74428773  0.25571221]\n",
      " [ 0.85805261  0.14194739]\n",
      " [ 0.90897298  0.09102707]\n",
      " [ 0.83597082  0.16402918]\n",
      " [ 0.47672921  0.52327085]]\n",
      "Hard Outputs\n",
      "[0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 1]\n",
      "targets\n",
      "[1 1 1 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 1 1]\n",
      "Prob of -1\n",
      "[ 0.78568113  0.3411822   0.69928485  0.37534976  0.68023551  0.59758115\n",
      "  0.42770228  0.50533962  0.16082796  0.39151707  0.11470659  0.35381809\n",
      "  0.37184376  0.44925529  0.43924159  0.3247906  -0.12229992 -0.24622317\n",
      " -0.15712306  0.05666054]\n"
     ]
    }
   ],
   "source": [
    "print (\"Input and output of the first chain of the first batch\")\n",
    "print (inputs[0][0])\n",
    "\n",
    "print(\"Soft Outputs\")\n",
    "print (outputs[0][0])\n",
    "print(\"Hard Outputs\")\n",
    "selected_words = np.argmax(outputs[0][0], axis = 1)\n",
    "print (selected_words)\n",
    "print(\"targets\")\n",
    "print (targets[0][0])\n",
    "\n",
    "print (\"Prob of -1\")\n",
    "print (np.array(inputs[0][0])[:,0])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/montoya/anaconda3/lib/python3.6/site-packages/matplotlib/axes/_axes.py:545: UserWarning: No labelled objects found. Use label='...' kwarg on individual plots.\n",
      "  warnings.warn(\"No labelled objects found. \"\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYoAAAEJCAYAAACKWmBmAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xd4U9UbwPHvSRelFMoslFUEyrTsqSwHAvoT3DgQFUTc\nG1EZLlTcW1RA3DgAQWWq4GBJkSHDsqXMMsoo0Ja25/dHRkNJ07S5yU3S9/M8eci4956XS+jbs5XW\nGiGEEKIoFrMDEEIIEdgkUQghhHBLEoUQQgi3JFEIIYRwSxKFEEIItyRRCCGEcEsShRBCCLckUQgh\nhHBLEoUQQgi3ws0OwJ1q1arpxMTEUp174sQJYmJijA3IB4IhTonROMEQp8RonECPc+XKlQe11tWL\nPVBrHbCPdu3a6dJauHBhqc/1p2CIU2I0TjDEKTEaJ9DjBFK0Bz+LpelJCCGEW5IohBBCuCWJQggh\nhFuSKIQQQrgliUIIIYRbkiiEEEK4JYlCCCGEW5IoQlx2do7ZIQghgpwkihA2ceJ0YmI7M3fuYrND\nEUIEMUkUIeznX5aRl5fHy69OMTsUIUQQk0QRwtJ27Qfg11//Yvv2XSZHI4QIVpIoQtjOnXsdzz+e\nMtPESIQQwUwSRYjKzc1lz54DjtdTPplFXl6eiREJIYKVIYlCKdVHKZWqlNqilBrp4vOeSqmjSqnV\ntscYI8oVRduz5wD5+fnUrFmNc86pQ1raPn75ZbnZYQkhgpDX+1EopcKAd4GLgV3ACqXULK31hkKH\n/qG1vszb8oRn7M1O9evX4rJLuzN6zLtMmjyD3r27mhyZCGQ5Oac5cOAIK1duYP/+Q6SnH2Z/+iH2\n7z/E/v3W5xViyvPx5GeoXLmi2eEKPzFi46KOwBat9TYApdRUoD9QOFEIP0pLs3Zk161Tk1sG92fM\n2Pf4fuZCDh06QtWqcSZHJ8yyePEqNmzYdsYP//QDh23PD5GRccyj64x7/iNeeflhH0crAoURiaI2\nkOb0ehfQycVxXZVSa4HdwCNa6/UGlC2KsDPNWqOoV68mderEc0nvrsydt5gvvvyJ++690eTohBmW\nLl3D+d1vcXtMWFgYlSrFULduLeJrVCE+vio1bH/G16hKfn4+tw4Zw9vvfMW991xP/foJ/glemMpf\nW6H+DdTTWmcqpfoB3wONXR2olBoGDAOIj49n0aJFpSowMzOz1Of6k6/iXLp0JQDZ2dbrd+6UxNx5\ni3nzrc85t2UCSinTYzRSMMQI5sb53TRruQ0b1qZTx2ZUrhx75iMulooVy3Py5EkqVKhQ5HUuuKAt\nv/76N7ffMYYnRt7kp+jPJP/efubJNnjuHkAXYJ7T68eBx4s5ZwdQrbhry1aopfe/y+/VWJL1tGkL\ntNZaZ2Vl66rVu2ssyTolZX2JrhUM97KoGA8dOqJ//XW5njJlpj5y5Jh/g3LBzHt5+7CnNZZk/dbb\nX7g9rrgYt21L0xFRbbUKa6VXr/7XwAg9FwzfSa0DP078uBXqCqCxUqqBUioSGAjMcj5AKVVT2X6F\nVUp1xDra6pABZYsi7EzbB0C9erUAiIqKZNBN1rEEkz/+3rS4fCU/P5/Nm//ju+8WMHrMO/zv8nup\nl3gJVat354KLbueW20bz8COvmh2mqTZs3ApA82YNvbpOgwZ1uPuugWiteWzkG0aEJgKc101PWutc\npdQ9wDwgDJistV6vlBpu+3wCcDVwp1IqFzgFDLRlM+EjabZEUbduTcd7t906gDfe/JwvvpzNKy8/\nRHR0ObPC88qJEyf5558trFmTyuo1qaxZu4lVqzaSlXX2AojR0eVo2bIhK1as56upc3jt1UeoWLHo\nZpVQpbVmw4ZtADRvfo7X13vyiaFM/vh75s1fws8/L+Oiizp7fU0RuAzpo9BazwZmF3pvgtPzd4B3\njChLFC8z8ySHDx8lKiqS6tUrO94/99zGtG/fgpSU9cyY8Ss33NDPxCg9t3nzf3zz7XzWrNnEmrWp\nbN68E1e/ZyQkVKd1q6a0apVEq+QkWrduSqNGdQkLC6PnBUP47bcUvvpqDnfccY0Jfwtz2Uc0xcXF\nUrNmNa+vV61aZUY+dhtPPPkWI0a+TspfX2GxyPzdUOWvzmzhR861icL/eYfcOoCUlPVMnvJ9UCSK\nAwcO06HTjRw9etzxXnh4OM2aNaB1qya2pNCEzMyDDBhQ9DSd24dcyW+/pTBx8owymSg2bLA1OzVv\nWKKBDO7cf98NvPveVFat+pepU+cGxfdJlI78ChCCChJF/FmfDRzYh3Llovjll+VBsVDgCy9O4ujR\n43To0IKPJz3DqpVfk3lsKWtXf8enn4zj4YcGc9FFnYmLc9+cdOWVFxIXF0tKynpWr/7XT9EHjg0b\nbc1OzbxvdrIrXz6ap8feCcCTo9+WvU9CmCSKELRzp60ju26tsz6Li6vI1VddBFjXfwpkaWn7eO/9\nbwD44P3R3HJLf1q3bkpUVGSJrxUdXY6bbrwUgImTphsaZzAwsn/C2eDBl9O8+Tns2LGH9yd8Y+i1\nReCQRBGC0nYVXaMAa6c2WFeUDeSFAp997kOys3O47tpLaNOmmdfXu33oVQB8/sVsTp3K8vp6wcQX\nNQqwNgO++Pz9ADw37qMzmghF6JBEEYLs6zy5qlEA9OjRPuAXCty0aQeTP/6esLAwnnn6LkOumZyc\nRIcOLTh69DjffbfAkGsGC+c+CqNddlkPunVry6FDRxj/0seGX1+YTxJFCCqYQ1HT5ecWi4Vbb+kP\nwKTJM/wWV0mMfep98vLyuGXw5SQlJRp23duHWGsVEwP07+0LBw4c5sCBDCpUKE+dOq5rmd5QSvHS\niw8C8MabX7B7937DyxDmkkQRglzNoSjslsH9UUo5FgoMJGvWpDL167lERkYwZvQdhl574MA+xMRE\n8/vvK9m0aYeh1w5UGzduB6z9E0aNeCqsc+dkrr7qYk6dymLsU+/7pAxhHkkUIUZrXbByrJtEYV8o\nMCfnNF98+ZO/wvPIqNHWKTd33XmdY2a5UWJjY7ju2ksAmDipbNQqHM1OBvdPFPb8uHsJDw/n4ykz\nHWWK0CCJIsQcPJhBVlY2cXGxxMbGuD3W3qk9afL3LiewmWHJktX8+NPvxMRE8/jI23xShr1T+5NP\nZ5GTc9onZQQSR0e2D/onnDVuXJ9ht19Ffn4+Ix9/06dlCf+SRBFiHENjPfhN/PLLe1K1ahxr125i\n1Srz5xZorXli1NsAPPjATdSoUdUn5XTqdC4tWjQkPf0wP/zwm0/KCCS+GvHkypjRd1ChQnl++PE3\nfv99pc/LE/4hiSLEFIx4KrrZyS4qKtIxtyAQOrUXLFjKb7+lULlyRR5+6GaflaOUYuiQK4GyMafC\nV3MoXImPr8ojDw8GYMTI1wOmpiq8I4kixBTMoSg+UQAMue0KAL78yty5Bc61icdG3EpcnG+32Rx0\n02VERkYwb/4SR3INRRkZx9i79wDR0eX8tsnQww/dTHx8VZYv/4dp0372S5nCtyRRhJiCpifPEoV9\nocAjR44zY8avvgzNrRkzfmHlyg3UrFmNe++53uflVa0ax5VXXIjWOiSXXbfbaGt2atasgd8W7atQ\noTxjRw8H4IlRb3H6dOj3A4U6SRQhxjE0to5niQKsCwUCTJ5izg/MvLw8Ro15F4DRo4ZRvny0X8q9\nfai1+Wnyx98H9Ax1bzianfzQP+Fs6NArSEqqz+bNO/loYug374U6SRQhpvCGRZ4we6HAzz//iY0b\nt5GYmODoO/CHnj07OGaoL1iwzG/l+pNjsyIfj3gqLCIiguefuw+Ap5+ZwPHjJ/xavjCWJIoQ4+jM\n9rDpCcxdKDA7O4exT1snaD099i4iIyP8VrbFYnH00Xw0cZrfyvUns2oUYF2xt3PnZNLTD/Pqa5/6\nvXxhHEkUIeT06dPs3XsQpRQJCdVLdK5ZCwVOnDSd//7bQ/Pm53Djjf7fz+CWwZcTFhbGrB9+Y//+\n0Nudt2AOhf8ThVKKl8dbl/Z45dVP2LfvoN9jEMaQRBFCdu9OR2tNQkJ1IiJK9pu5GQsFnjhxkmef\n+xCA5565h7CwML+U6ywhoQaX9utGbm4un3wa2Muul9SxY5mkpe0jKiqSBg1qmxLD+ee35fL/9eTE\niVM88+wHpsQgvGdIolBK9VFKpSqltiilRro5roNSKlcpdbUR5YozebJ0R1GcFwr01yigd96dyv79\nh2jfvgUDBlzglzJdGTrE2vw0cdL0kBr3/++/1jWemjRJJDzcvM0sX3j+PiwWCx9+NK3MrK8VarxO\nFEqpMOBdoC/QHLheKdW8iOPGA/O9LVO4VpLJdq4MvvlylFLM+P5Xny8UeOTIMceS1M8/d6/PFqvz\nRN++55OQUJ3Nm3fyxx9/mxaH0czsn3DWvHlDbrt1AHl5eTzx5NumxiJKx4gaRUdgi9Z6m9Y6B5gK\n9Hdx3L3ANCDdgDKFC6UZ8eSsbt2ajoUCv/xqtpGhneWVVz8lI+MYPXu256KLOvu0rOKEh4dz6y3W\nPppQ6tS290+0aOHfEU+uPP3UnURHl2Pa9J9Ztmyt2eGIEjIiUdQG0pxe77K956CUqg1cAcj6wz7k\nbq9sT/ljocD9+w/xxpufA/D8c/eZWpuwG3Kb9e/93bSfycg4ZnI0xvDXqrGeSEiowYMP3ATAo4+9\nFlJNfGWBvxou3wAe01rnF/dDQSk1DBgGEB8fz6JFi0pVYGZmZqnP9Scj41y1ah0Ax44eKvU1K1Wy\nULFiDGvWpPLRR5+TlFTX8Hv5zrvTOXHiFF27tCQ7O8OQaxsRY7u2Saz8exNPPf0mVwzo5nVMrvjz\ne/n3qg22Mkv2ffBVjF06N6JixRj+/HMVz7/wDud1PbfU1yqL/79NpbX26gF0AeY5vX4ceLzQMduB\nHbZHJtbmpwHFXbtdu3a6tBYuXFjqc/3JyDiTW1+tsSTrlJT1Xl3n/gfGayzJ+q67x2mtjY1xx47d\nOrJcO40lWa9Zk2rYdY2IcerUORpLsm7V5hqdn5/vfVAu+Ot7mZl5QquwVjoiqq3Oyckp0bm+jPGN\nNz/XWJJ1j163eXWdsvj/2xeAFO3Bz3kjmp5WAI2VUg2UUpHAQOCMcYZa6wZa60StdSLwHXCX1jp0\nF9gxSWkm27niy4UCn3n2A3JyTnP9wL4kJycZem1vDRhwAVWrxrFmTSorV24wOxyvpKb+h9aapKT6\nJR4q7UvXXtMbgHXrtpgciSgJrxOF1joXuAeYB2wEvtFar1dKDVdKDff2+sIzx4+f4MiR45QrF0W1\napW9upavFgpMTd3BlE9mERYWxjNP32XYdY0SFRXJzYMuA4J/+fGC/gnzO7Kd1axZjQoVynPo0JGA\n24JXFM2QeRRa69la6yStdUOt9TjbexO01hNcHHuL1vo7I8oVBZw7so3oHPbFQoFjxr5Lfn4+Q24b\nQKNG9Qy7rpHsa019+dUcMjNPmhxN6Zk5I9sdpRRNmiQC1l8cRHCQmdkhoiQ723nCeaHAvXu9X9pi\n1aqNfPPtfKKiIhk96g4DIvSN5s0b0rVra44fP8G33wbvlJ9AmUPhSlLj+gBs2vSfyZEIT0miCBH2\n/om6dUo/NNaZ80KBc+Z6v7Lqk6PeAeDuu66jjkEx+spQ+0KBQdz8VLBqbOAlCkeNQmZpBw1JFCHC\nvrOdUTUKKJhT8dnn86lavTudutzIDTeOZMzYd/nkk1ksXryKffsOFjsm/o8//mbO3D+pUKE8Ix+7\nzbD4fOXaa3sTGxvD0qVrWL8++Dpds7Ky2bp1F2FhYTS2/fYeSKRGEXzMWwBGGMrR9FTK5Ttc6dGj\nPVdfdTE//LiIw4eP8tdfR/nrr3VnHRcTE03DhnVpeE4dGjWqV/BnwzrUrVuTJ0a9BVi3yKxevYph\n8flKTEx5bri+Lx98+B2TJs/gtVcfNTukEtm06T/y8/Np0iSRqKhIs8M5i9Qogo8kihBR0r2yPWGx\nWPj2m1dYuHAhTZu2ZMuWNLZuTWPrtjTrc9ufGRnHWLt2E2vXbjrrGhER4Zw+nUuVKpV46MFBhsXm\na0OHXMkHH37Hp5/9yAvP3x+QP3CLEkgzsl1p3Ng6kGHLljTy8vJMWTVYlIwkihBhdGe2M6UUtWpV\np1at6nTr1vaszw8fPmpNIFvT2LptF1u27LT9mcbevQcA61o/FStWMDw2X2nXrjmtWzdh9epUvv/+\nV667ro/ZIXmsYMRTYA2NtYuNjSEhoTp79hxg5869NGhQx+yQRDEkUYSA/Px8Q9Z5Kq0qVSpRpUol\nOnRoedZnJ06c5PDhYwHfgV2YUoqhQ67knntfYOKkGcGVKAJ4xJNdkyaJ7NlzgE2b/pNEEQSkMzsE\nHDiQQU7OaapUqURMTHmzwzlDTEx56tatGRAL/5XUDdf3o1y5KH7+ZRnbtvl/L/HSCtQ5FM7sHdrS\nTxEcJFGEAKOW7hBnqly5ItdcfTHgv82cvJWTc5rNm3eeMbEtENljk5FPwUESRQhwNDvVkURhNPtM\n7Y+nfE9ubq7J0RRvy5ad5Obmcs45dYiOLmd2OEVy1ChSJVEEA0kUIaBgwyJJFEbr1q0tSUn12bPn\nAHPm/Gl2OMUKhv4JcKpRbJZEEQwkUYQAx6xsA4fGCit7pzbAxEkzTI6meIE8I9tZYmICERHh7Ny5\nl5MnT5kdjiiGJIoQkJa2H4B6dY0fGivg5kH/Izw8nJ9m/+EY7huoCmoUgTk01i48PJyGDesC1vkU\nIrBJoggBO9OkM9uX4uOrcmm/buTl5fHV1Dlmh+NWMIx4sivop9hhbiCiWJIoQoC9RiFNT74z6Cbr\nPhWfff6jyZEULTc31/FDt2nTBuYG4wHppwgekiiCXHZ2Dnv3HsBisZCQUN3scELWZZd1Jy4ultWr\nU1m3brPZ4bi0bdsucnJOU79+AhUqBNZ8GlekRhE8JFEEud270wFISKhOeLhMtPeVqKhIxzaegVqr\nCJYRT3ayOGDwkEQR5NLSfLfGkziTvfnpiy9nk5+fb3I0Zwum/gmApKSC5caLW6pemMuQRKGU6qOU\nSlVKbVFKjXTxeX+l1Fql1GqlVIpS6nwjyhVOs7Klf8LnzjuvDQ0a1Gb37nQWLVphdjhnCfRVYwur\nUaMKlSrFcuTIcQ4cOGx2OMINrxOFUioMeBfoCzQHrldKNS902C9AK611a+A2YKK35QqrtF3Ske0v\nSiluuvFSIDCbnwJ91djClFIkJVmXHJelPAKbETWKjsAWrfU2rXUOMBXo73yA1jpTF9QtYwCpZxpE\n1nnyL3ui+G7azwE1USwvL4+NG7cD0KxZ4I94smuSlAhIh3agMyJR1AacZ8zssr13BqXUFUqpf4Gf\nsNYqhAHs+1DIOk/+kZSUSMeOLcnMPMnMmYvMDsdhx449ZGVlU7t2DSpVijU7HI85+ilkiGxA89sw\nGa31DGCGUqo78CxwkavjlFLDgGEA8fHxLFq0qFTlZWZmlvpcf/I2zn9Tre3S6em7ffb3DYZ76c8Y\nO3dqwl9/rePNtz6hVq3oEp3rqziXLLFuUVurZmWvr+/Pe5mbexKAxYtXlqjMYPhOQvDEWSyttVcP\noAswz+n148DjxZyzDahW3LXbtWunS2vhwoWlPtefvI2zYlxXjSVZHzyYYUxALgTDvfRnjAcOHNbh\nkW11WEQbvW/fwRKd66s4Xxw/SWNJ1vc/MN7ra/nzXq5atVFjSdbNWgwo0XnB8J3UOvDjBFK0Bz/n\njWh6WgE0Vko1UEpFAgOBWc4HKKUaKdvONUqptkAUcMiAssu0o0ePc+xYJtHR5ahSpZLZ4ZQZ1apV\npm+f88jLy2Pq13PNDgcIvjkUdgX7Z+8MimXcyyqvE4XWOhe4B5gHbAS+0VqvV0oNV0oNtx12FbBO\nKbUa6wip62zZTHghzWl58WDcQS6YBdqSHsE2h8IuJqY8derEc/p0Lv/9t9fscEQRDOmj0FrPBmYX\nem+C0/PxwHgjyhIFpCPbPP/7Xw8qVqzAypUb2LhxG81M/E0+Pz+fjbZEYWYcpdWkSSK7du0nNXWH\nY0VZEVhkZnYQS5MNi0xTrlyUY5tUs2sVaWn7OHHiFPHxValaNc7UWErDvuaTjHwKXJIogthOWb7D\nVIGypIejfyLImp3sHGs+yVyKgCWJIogV7JUdb3IkZVO3bm2pV68WO3fu5fffV5oWh2NXuyBsdgKp\nUQQDSRRBTGoU5rJYLAGxpEdBjSI4lu4oTGoUgU8SRRCTvbLNZ29++m7az5w6lWVKDI4RT0Fao6hf\nvxaRkRHs3p1OZuZJs8MRLkiiCFL5+fnsciwIKE1PZmnatAHt27fg2LFMfvjhN7+Xr7UO+j6KsLAw\nGjWyjnba7IfmJ601L738MT/8sMjnZYUKSRRBav/+Q5w+nUu1apUpX75ky0gIYw26ybzmpz170jl2\nLJNq1SpTo0ZVv5dvFH8uDrhy5QYeG/kGN98ySib5eUgSRZBydGRLbcJ0A6/rQ1hYGHPnLfH7vgrB\nXpuw8+figIuXrAbgyJHjLF/+j8/LCwWSKIKUfbJdvbrSkW22GjWqcknvruTm5vp9SY9g75+wK6hR\n+D5RLLElCoA5cxf7vLxQIIkiSO1Ms3dkS40iEJjV/OTY1U5qFB5bumyt4/mcuX/6vLxQIIkiSKWl\nWTuyZWhsYOjfvxexsTGsWLHer8M8C2oUwTk01s55iKwvl4FLS9tHWto+KlWKpVy5KP7+eyP79h30\nWXmhQhKFC6dOZXH7sKeZPHmG2aEUSfbKDizR0eW4+irrFiv+qlVorVm/PjRqFFWrxlG5ckWOHz/B\n/v2+W1h66dI1AHTpnEzPHu0BmDdvic/KCxWSKFwYM/Y9Jk6azp13j2Pbtl1mh+PSTkdntiSKQFGw\npMdPflnSIz39MBkZx4iLi6VmzWo+L8+XlFJ+mXi3xJYounZtRd++5wHS/OQJSRSFrFixjtde/wyA\nnJzTPPHkWyZH5FqazMoOOD16tKdOnXh27NjD4sWriz/BSwX9Ew1DYpl5x1Iem3zXT7FkqfXfpWuX\n1vTtcz4A8xcslWGyxZBE4SQn5zS3DR1Lfn4+g266jHLlovj6m3ksc+r8CgTZ2Tns33+IsLAwatUK\n7t8kQ4nFYuHGG/oB/ml+CpURT3aOGsWmHT65/qlTWaxalYrFYqFjx5Y0blyfhg3rkpFxjL/+WueT\nMkOFJAonz78wkXXrttC4cT0+mDCahx4cBMAjI171aQdbSdlnZNeuXYOwsDCToxHO7M1P33w7n6ys\nbJ+WFSpzKOx8XaNISdlAbm4uycmNiY2NAaBvH2l+8oQkCpu1azcx7vmJAEz88Cmio8vx2IhbqV69\nMosXr2bGjF9MjrCAoyNb9qEIOC1aNKJNm6YcPXqcH3/83adlBfuqsYX5ukbh3OxkZ29+kvkU7kmi\nAHJzc7lt6Fhyc3O5687r6N69HQAVK1bg6bF3AfDY42+Sk3PazDAdZGe7wOavbVKDfdXYwho1qotS\nim3bdnP6tPH/15YsKRjxZNezZ3uioiJZuXKDT0dbBTtJFMBrr3/GypUbqFevFi++cP8Znw0degVN\nmzZgy5adTPjgG5MiPFPaLtnZLpBdP7AvFouF2XP+5ODBDJ+UcfBgBunph6lQwbrndCiIji5HvXq1\nyM3NZfv23YZeW2vtNOKpoEZRvny0DJP1gCGJQinVRymVqpTaopQa6eLzG5VSa5VS/yilliilWhlR\nrhE2bdrB2KfeB+DDCaMdbZd2ERERvPTiAwA8/cwHHDlyzO8xFuaoUcjQ2IBUs2Y1el/chdzcXL7+\nZp5Pyti4cTtg7Z8IhRFPdkmN6wHG91Ns2bKTgwcziI+vSoMGtc/4rG9fe/OT9FMUxetEoZQKA94F\n+gLNgeuVUs0LHbYd6KG1Phd4FvjQ23KNkJ+fz5DbnyIrK5vBN1/OJZec5/K4yy7rQc+e7Tl8+CjP\nvzDJz1GezTE0VhJFwLI3P33+xU8+ub5jaGyI9E/Y+aqfwt7s1LVLq7MSq71De/6CpeTl5Rlabqgw\nokbREdiitd6mtc4BpgL9nQ/QWi/RWtvr4MuAOgaU67X3J3zDn3+uIj6+Kq+9+kiRxymleOWlhwF4\n860v2LHD2GpxSdnXeZI5FIFrwIBeVKhQnmXL1vpkjwXH0NgQ6Z+wc6z5ZHCNYumys5ud7Bo3rs85\n59Th8OGjMky2COEGXKM2kOb0ehfQyc3xQ4A5RX2olBoGDAOIj49n0aJFpQoqMzPT7bn79h/m0RGv\nAXDXnf1Zu3ZVsde8+KL2LPg5hSG3j2L0k4NLFVdJ4yxMa+1ov/3vvy1kZOw1JA53ShqjGQIxxq5d\nWjB/wQqeG/cut95inV9hVJyLF6cAkJt7wvC/t5n3MivL2rS7/K81bmMoaYwLFlhHNZUrl+/yvORz\nG7Bt2y7en/AF2dnG9SsF4veyVLTWXj2Aq4GJTq8HAe8UcWwvYCNQ1ZNrt2vXTpfWwoULi/wsPz9f\n977kDo0lWV99zcMeX/O///boqOj2GkuyXr58baljc+YuTlcyMo5qLMk6JraTzs/PNySG4pQ0RjME\nYowLFizVWJL1OY36Of6tjIozoc6FGkuy3rYtzZDrOTPzXm7fvktjSdY1Ey5we1xJYjxy5JhWYa10\nRFRbfepUlstjfvzxN40lWbfveH1Jwi1WIH4vnQEp2oOfxUY0Pe0G6jq9rmN77wxKqWRgItBfa23q\nOLQpU2Yyf8FSqlSpxDtvP+7xefXq1eLBB24C4JERr5kyCc+5IzuUOjFDUa9eHUhIqM62bbvO2APB\nW0eOHGPPngNER5ejfv0Ew64bCOrVq0VUVCT79h3k2LFMQ665fPk/tl88m1OuXJTLY3r16kBUVCQp\nKetJT5dhsoUZkShWAI2VUg2UUpHAQGCW8wFKqXrAdGCQ1nqTAWWW2t69B3jokVcAePP1EcTHl2z7\nyJGP3Ua1apX544+/mTlzoS9CdEs6soNHWFgYN95g/D4V9hFPzZo1wGIJrRHuFouFxgaPfHIMi+1S\n9GDL8uWj6WGbPzV//lJDyg0lXn/LtNa5wD3APKzNSt9ordcrpYYrpYbbDhsDVAXeU0qtVkqleFtu\nKWPlrrtiSX1DAAAdp0lEQVTHceTIcfr17caNN15a4mtUqhTLU2Otf60RI1/3ycQgdxw720lHdlBw\nXtIjOzvHkGuG6ognO/tud4YliiVnz8h2pWCYrMzSLsyQX0e01rO11kla64Za63G29yZorSfYng/V\nWlfWWre2PdobUW5JffvtfL6fuZDY2BgmvD+q1E03w26/iqSk+mzevJMPPvzO4Cjdk53tgsu55zam\nVasmZGQcY/bsPwy5ZqiOeLKzj3wyYohsXl4ey20jmbp0SXZ7rH05j3nzl8gw2UJCq97qxsGDGdxz\n3wsAvDz+Qa8mq1kn4T0IwFNPT+Do0eOGxOiJgqYnqVEEC6O3SXUs3SE1imJt2LCNY8cySUxMICGh\nhttjk5Lq06BBbQ4dOkJKynqvyw4lZSZRPPDgSxw4kEHPnu25/farvL7e5Zf3pHv3dhw6dIQXXvTf\nJDzZsCj42Jf0+PGn3zl27ITX1yuoUYRmojCyRmFvdurSufjFIJRSskhgEcpEovjxx9/44svZREeX\nY+KHTxnSAWidhPcQAG+8+QX//bfH62t6omDDIkkUwSIhoQYXXtCR06dzmfXDYq9Gyx0/foKdO/cS\nFRV51lIUocI+O3vTpv+8HlnovKOdJ2TZcddCPlEcPXqc4Xc9B8Bzz95Nw4Z1iznDcx06tOSG6/uR\nnZ3Dk6PeNuy6RcnLy2PXrnSAkFkIrqwYctsVAEya/BOXXnYPW7emFXOGa//+ax3x1KRJIuHhRsyX\nDTxVqlSiWrXKnDhxij170r26VsGIJ/cd2Xa9enUgMjKCFSvWc+DAYa/KDiUhnygeHfE6u3en06nT\nudx/342GX3/cc/cQFRXJF1/O9nm75r59B8nNzaV69cpER5fzaVnCWNdeewnvvfMkMTHRzJn7Jy2T\nr+LZ5z4o8eZGod4/YWfEUh7p6YfYsmUn5cuXIzm5sUfnxMSUp0f3dmitZZisk5BOFL/+upyPJk4j\nMjKCSR895ZPd4BITa3P/fTcAvp+El5Zm3dlOhsYGH6UUd955LZ9OeYJBN11GVlY2Y8a+R3Lrq1mw\nwPMfSI7NikK0f8Kuib2fInVHqa+xbNk/AHTqdG6Jal+ymuzZQjZRnDqVzdBhTwMwetQwWrRo5LOy\nHh85hKpV4/jttxR++OE3n5Vj39murjQ7Ba0qVSry6SfjWPjLRJo1O4fNm3fSu89wrhv4KLt37y/2\n/IIaRWgOjbVz1Ci8WFDR1Y52nigYJruU/Pz8UpcfSkI2UUz++Ce2b99Nq1ZNeGzErT4tKy6uImPH\n3AH4dhJewYZFUqMIdj17dmD1398w/sUHKF++HN98O5+mzQfw+hufkZubW+R5oT7iyc4+RDY11ZtE\ncfaOdh6V3SSRxMQEDh7MkGGyNiGZKJYuXcO06b8TFhbG5IlPExER4fMy7xh2DY0b1yM1dQcfTZzu\nkzIKZmXLiKdQEBkZwYhHb2XDuhkM6N+LzMyTPPTwK7TrcD2LF5+9mvHJk6fYvn034eHhNGpk3KCM\nQORtjSIn5zQrVlh/yHcuYaKQYbJnC7lEkZeXx5Dbn0JrzaOPDKZt22Z+KTcyMoLxL1h3whv71Ps+\nmYRX0PQkiSKU1K+fwIzpb/DDzLdITExg7dpNnN/9FoYMHXvGVqqpqTvQWpOUVM8vv/yYqVGjeiil\n2L59d6n2ql+9+l+ysrJp2rQBVavGlfh8GSZ7ppBLFGFhYbz1xmN07NCMsWOGF3+CgQYMuIDzz2/D\nwYMZjH/pY8Ovn7ZLOrND2WWX9WD9P9MZ9eTtRESEM/nj72nSrD8TJ04nPz+/zPRPAERFRZKYmEBe\nXh7btu0q8fmeLATozgUXdCQyMoK//lrns33Pg0nIJQqAiy7qzPgXhxe5pLCvOE/Ce/2Nzx01AKM4\nahSyzlPIKl8+mmefuYd/1kzjwgs7cfjwUW6/42nO6zaY722rFYd6/4RdQT/FjhKf67z1aWnExJSn\nezcZJmsXkonCTJ06JTPwuj5kZWUzavQ7hl331KksDhzIIDw8nJo1qxl2XRGYmjRJZMG8D5j65Xhq\n1arOsmVr+W7aAiD051DYebOUh7utTz3Vt680P9lJovCB58fdS2RkBJ99/iOrVm005Jq7bM1OderU\n8Ml8EBF4lFJcd10f/t3wPfffd6Nj6Zk2bZqaHJl/OC/lURJpafvYtWs/cXGxjmuUhvNqsmV9mKwk\nCh9o0KAO99w9EIBnnv3AkGs672wnypaKFSvwxusjWLv6W+bNeZ8kW5NMqEtqXLoahfNCgN6s69a0\naQPq10/gwIEMVq7cUOrrhAJJFD7y6CO3UK5cFN/PXMjatd5v6ic724kWLRrRu3dXs8Pwm9LWKEq6\nEGBRrMNkpfkJJFH4TM2a1RhmW878uXEfeX29gg2LJFGIsqF27RpER5cjPf0wR44c8/g8b0c8OZP5\nFFaSKHxoxKO3EBkZwXfTFrDRNqO2tByT7WTDIlFGWCyWEi8OePLkKVavTsVisdCx47lex2AfJrt8\n+T8cOnTE6+sFK0MShVKqj1IqVSm1RSk10sXnTZVSS5VS2UqpR4woMxjUrh3PbbcOQGvNuOe9q1XI\nPhSiLHL0U3g4RDYlZQO5ubm0apVEhQrlvS6/QoXydOvWtswPk/U6USilwoB3gb5Ac+B6pVTzQocd\nBu4DXvG2vGAz8rHbCA8P56upc9nsxQJnsrOdKIuaNCnZUh72hQA92dHOU9JPYUyNoiOwRWu9TWud\nA0wF+jsfoLVO11qvAHyzWl4Aq18/gcE3/4/8/Hyef2Fiqa6htZYahSiTCmoUHiYKLyfauWLvp5g7\nb3GZHSZrxBZZtQHn7bp2AZ1KezGl1DBgGEB8fDyLFi0q1XUyMzNLfa7RevVqycdTZvLpZz/S++LW\n1KpV1fGZJ3EeO3aCEydOER0dxapVK1FK+TjiMwXSvSxKMMQIwRFnIMWYecK6y9zfq9afEZOrGLXW\n/PZ7CgAWS5ZhfwetNfE1KrM/PYOPPvqcJk3qeXxuIN1Lr2itvXoAVwMTnV4PAt4p4tingEc8vXa7\ndu10aS1cuLDU5/rCoJuf0FiS9bA7nj7jfU/iXLVqo8aSrJu3HOCj6NwLtHvpSjDEqHVwxBlIMWZk\nHNVYknV0TEedl5fneN9VjKmp2zWWZF0z4QKdn59vaBx3DH9GY0nWzzw7oUTnBdK9dAVI0R78LDai\n6Wk34LzmcR3be8LJE48PRSnFx1NmOpqRPFUwh0JGPImyJS6uIjVqVOHUqSx273a/f7Zzs5PRte6y\nPkzWiESxAmislGqglIoEBgKzDLhuSGnatAHXXXsJp0/n8tLLJVtZVjqyRVmW5OG2qAXrOxnXP2F3\nwQUdiYgIZ/nyfzh8+Kjh1w90XicKrXUucA8wD9gIfKO1Xq+UGq6UGg6glKqplNoFPASMUkrtUkpV\n9LbsYPPkE0MB+GjidPbuPeDxedKRLcoy+yqyxY18KtjRzvhEERsbQ7dubcnPz2f+/CWGXz/QGTKP\nQms9W2udpLVuqLUeZ3tvgtZ6gu35Pq11Ha11Ra11nO2551MtQ0TLlo258ooLyc7O4eVXpnh8nmOd\nJ9mwSJRBntQojhw5xvr1W4mMjPDZZmVluflJZmb72agnbwdgwgffkZ5+yKNz7Mt3SI1ClEWOGoWb\n2dnLl/9jGwDT3Gf70NjnU5TFYbKSKPysTZtmXHZpd06dyuK11z/z6Jy0NNnZTpRd9sUB3a0ia+T6\nTkVp3rwhdevWJD39MKtW/euzcgKRJAoTjB41DIB33/uao0dPuD02Ly/PMdqjTh3Z2U6UPeecU4ew\nsDB27NhDVla2y2N8MdGusLK8mqwkChN07Hgul/TuSmbmSaZNX+T22L17D5KXl0d8fFWioiL9E6AQ\nASQyMoIGDWqjtWbr1rSzPs/Ly2P5X/8A0MWHiQLKbj+FJAqT2GsV02f87nYJ5YJ9sqV/QpRd7hYH\nXL9+K8ePnyAxMYFatar7NI4LL+xEREQ4y5atLVPDZCVRmOS889rQq1cHTpzI4u13viryOHuikA2L\nRFnmbnFA+452XbuUfn9sT8XGxnD++W3Iz89nwYKys5qsJAoTjRl1BwBvvPkFx4+77qtI2yUd2UK4\nq1EYtaOdp/pcYu+nKDvNT5IoTNSjR3vObXkOhw8f5b33v3Z5TEHTk3Rki7LLsS3q5p1nfeaPEU/O\nyuJqspIoTKSUYtCgSwB45dVPOXHi5FnHOIbGyjpPogwratJdevohtm5NIyYmmnPPbeyXWFq2bETt\n2jXYv/8Qq1en+qVMs0miMFn7dk3o2LElBw9m8MGH3531ecFe2VKjEGVXQkINYmKiOXToyBlbki5d\nuhaAjh1bEh5uxK4JxVNK0a9vNwBmz/nDL2WaTRKFyZRSjH7SOgLq5Vc+4dSprDM+d+yVLX0UogxT\nSrncP9u+o50/OrKd9etbtobJSqIIAJde2p02bZqyb99BJk2e4Xj/5MlTHDp0hIiIcOLjq7q5ghCh\nz9VSHv7un7BzHibrXMMJVZIoAoBzrWL8Sx+TnZ0DFPRP1KkTj8Ui/1SibHP0U9iW8sjJOc2KFesB\n6Nw52a+xxMbG0KN7e/Lz85k3L/RXk5WfPgGif/9etGzZiF279jNlykzAeXlxaXYSonCNYtWqjWRn\n59Cs2TlUqVLJ7/H062dtfvppduj3U0iiCBAWi4VRT1hXln3xpcmcPn26YGisrPEkxFk1CrOanewu\n7Wft0J47bzF5eXmmxOAvkigCyNVXX0yTJons2LGHzz//ybGzndQohChIFJs37yQ/P98x4qmLn5ud\n7Bo3rk/DhnU5fPgoy5f/Y0oM/iKJIoCEhYU5ahXPvziJ7dutW4/L8h1CQMWKFahZsxrZ2Tmkp2ew\neMkqALp29e+IJzullKNWMXtOaK8mK4kiwAwc2IeGDeuyZctOvvl2PiALAgphZ5+hvXLlJvbsOUDl\nyhUd75nBPkz2p9m/mxaDPxiSKJRSfZRSqUqpLUqpkS4+V0qpt2yfr1VKtTWi3FAUHh7OEyOHADjW\n3ped7YSwsq/5NGfecsC6P7aZIwJ79GhP+fLlWL06ld2795sWh695fYeVUmHAu0BfoDlwvVKqeaHD\n+gKNbY9hwPvelhvKBg26jPr1ExyvpUYhhJV9Fdn167cD/lsIsCjlykVx4QWdAJgbwpPvjEjFHYEt\nWuttWuscYCrQv9Ax/YFPtdUyIE4pJT20RYiIiGDkiFsB63jtSpViTY5IiMBgr1HYmTXiyVlB81Po\nDpM1YnGU2oDztlO7gE4eHFMb2GtA+SHp1lsHsOi3FL8tdCZEMHDuj7BYLHTo0NK8YGz62Tq0F/y8\njJyc00RGRpgckfH8s4pWCSilhmFtniI+Pp5FixaV6jqZmZmlPtef3MU5/I5+AKb/PYLhXgZDjBAc\ncQZyjLm5eYSFWcjLy6fhOQmkpPxldkgANGhQi+3b9/L2O5Np17aJ4/1AvpclorX26gF0AeY5vX4c\neLzQMR8A1zu9TgVqFXftdu3a6dJauHBhqc/1p2CIU2I0TjDEGegxJjX9n8aSrO++Z5zZoTg8NvJ1\njSVZP/Twy2e8H+j3EkjRHvycN6KPYgXQWCnVQCkVCQwEZhU6ZhZws230U2fgqNZamp2EECWWfG4S\nAN27tTM5kgL2ZcdDtZ/C66YnrXWuUuoeYB4QBkzWWq9XSg23fT4BmA30A7YAJ4FbvS1XCFE2vfzS\ngyQmVuXqqy82OxSHLl2SqVQpltTUHWzdmkbDhnXNDslQhgxA1lrP1lonaa0baq3H2d6bYEsS2Go5\nd9s+P1drnWJEuUKIsicxsTaX9usSUCsqR0REcEnvLgDMmRt6s7QD504LIUQQczQ//RR6zU+SKIQQ\nwgB9+54HwMJFKzh58pTJ0RhLEoUQQhigRo2qdOjQguzsHH79NTCG7RpFEoUQQhjk0n7dgdBbTVYS\nhRBCGMR5OQ/rNIXQIIlCCCEM0q5dc2rUqMLOnXvZsGGr2eEYRhKFEEIYxGKx0LdP6C0SKIlCCCEM\nZG9+CqV+CkkUQghhoN69uxAWFsaff64iMzM0hslKohBCCAPFxVXkvPNak5eXR8rKf80OxxCSKIQQ\nwmCX2vaoWL58g8mRGEMShRBCGMzeT7F8+Qby8/NNjsZ7kiiEEMJgLVo0ol69WmQcyeTvvzeaHY7X\nJFEIIYTBlFIhtZe2JAohhPCBgmGykiiEEEK4cMEFHYmICGfFivWkpx8yOxyvSKIQQggfiIkpT+vW\njdBaM3fuErPD8YokCiGE8JHOnVoAwd/85FWiUEpVUUotUEpttv1ZuYjjJiul0pVS67wpTwghgkmn\nTs0BmDd/Cbm5uSZHU3re1ihGAr9orRsDv9heuzIF6ONlWUIIEVRqJ1SjSZNEjhw5ztKla80Op9S8\nTRT9gU9szz8BBrg6SGv9O3DYy7KEECLoFAyT/d3kSErP20QRr7Xea3u+D4j38npCCBFS+vW1LucR\nzKvJquJ2YVJK/QzUdPHRk8AnWus4p2MztNZF9VMkAj9qrVsWU94wYBhAfHx8u6lTp7qNryiZmZlU\nqFChVOf6UzDEKTEaJxjilBiNk5mZSWRkOQZc+QSnTmUz9cuxxMdXMTssh169eq3UWrcv9kCtdakf\nQCpQy/a8FpDq5thEYF1Jrt+uXTtdWgsXLiz1uf4UDHFKjMYJhjglRuPY4xxwxf0aS7KeMOEbcwMq\nBEjRHvws9rbpaRYw2PZ8MDDTy+sJIUTIubRfdyB4l/PwNlG8CFyslNoMXGR7jVIqQSk1236QUuor\nYCnQRCm1Syk1xMtyhRAiaPTtex4Av/y6nKysbJOjKblwb07WWh8CLnTx/h6gn9Pr670pRwghglnt\n2vG0bt2E1atT+f33lfTu3dXskEpEZmYLIYQf2Ec/BWPzkyQKIYTwg4LVZINvmKwkCiGE8IPOnZOp\nUqUSW7bsZNOmHWaHUyKSKIQQwg/CwsK4xNY3EWy1CkkUQgjhJ5f2s/VT/BRc/RSSKIQQwk8uuaQr\nSil++z2FzMyTZofjMUkUQgjhJ9WqVaZz52ROn87ll1+Wmx2OxyRRCCGEHxWsJhs8zU+SKIQQwo8K\nVpP9w74OXsCTRCGEEH7Upk1TatWqzu7d6axdu8nscDwiiUIIIfxIKUXfPta1n4JlmKwkCiGE8DP7\nMNmZsxYGRfOTJAohhPCziy7qTGxsDMuX/8Pb73xpdjjFkkQhhBB+VrFiBSZ99BQADz/yGn/++be5\nARVDEoUQQpjgmmt68/BDN5Obm8s11z3K3r0HzA6pSJIohBDCJC++cD89e7Zn376DXHf9CE6fPm12\nSC5JohBCCJOEh4cz9cuXqF27Bn/88TcjHnvD7JBckkQhhBAmio+vyrdfv0JERDhvvPk5X301x+yQ\nzuJVolBKVVFKLVBKbbb9WdnFMXWVUguVUhuUUuuVUvd7U6YQQoSaLl1a8cbrIwAYOuwp1q3bbHJE\nZ/K2RjES+EVr3Rj4xfa6sFzgYa11c6AzcLdSqrmX5QohREi5c/i1DLrpMk6ezOLKqx/i6NHjZofk\n4G2i6A98Ynv+CTCg8AFa671a679tz48DG4HaXpYrhBAhRSnFhPdH0apVEzZv3sngW0aTn59vdliA\n94kiXmu91/Z8HxDv7mClVCLQBgie9XWFEMJPypePZtq3rxIXF8vMWQt5cfxks0MCQBU3fVwp9TNQ\n08VHTwKfaK3jnI7N0Fqf1U9h+6wC8BswTms93U15w4BhAPHx8e2mTp1a7F/ClczMTCpUqFCqc/0p\nGOKUGI0TDHFKjMYpbZzLlq/n8Sc+RCnF+BeH06F9Ux9EB7169VqptW5f7IFa61I/gFSglu15LSC1\niOMigHnAQyW5frt27XRpLVy4sNTn+lMwxCkxGicY4pQYjeNNnGOfek9jSdZVq3fXO3bsNi4oJ0CK\n9uBnsbdNT7OAwbbng4GZhQ9QSilgErBRa/2al+UJIUSZMGb0HfTr241Dh45w1TUPk5WVbVos3iaK\nF4GLlVKbgYtsr1FKJSilZtuOOQ8YBFyglFpte/TzslwhhAhpFouFzz4dR4MGtVm5cgP33veiabGE\ne3Oy1voQcKGL9/cA/WzP/wSUN+UIIURZVKVKJaZ/9xpdzruZiZOm06njuQwdeqXf45CZ2UIIEcBa\nt27KB++PBuDue59nxYp1fo9BEoUQQgS4m2/+H3cOv5acnNNcfe0jHDyY4dfyJVEIIUQQeOP1EXTu\nnMzOnXu5/oaR5OXl+a1sSRRCCBEEIiMj+PbrV6hRowo//7KMMWPf81vZkiiEECJI1KkTz9dfvURY\nWBjPvzCRmTMX+qVcSRRCCBFEevbswIsvWBfhvvmWUWzatMPnZUqiEEKIIPPwQzdz9VUXc+xYJuNf\n+tjn5Xk1j0IIIYT/KaWYPOlpWrVKYsSjt/q8PEkUQggRhGJjYxj15DC/lCVNT0IIIdySRCGEEMIt\nSRRCCCHckkQhhBDCLUkUQggh3JJEIYQQwi1JFEIIIdySRCGEEMItZd1fOzAppQ4A/5Xy9GrAQQPD\n8ZVgiFNiNE4wxCkxGifQ46yvta5e3EEBnSi8oZRK0Vq3NzuO4gRDnBKjcYIhTonROMESZ3Gk6UkI\nIYRbkiiEEEK4FcqJ4kOzA/BQMMQpMRonGOKUGI0TLHG6FbJ9FEIIIYwRyjUKIYQQBgj6RKGU6qOU\nSlVKbVFKjXTxuVJKvWX7fK1Sqq2f46urlFqolNqglFqvlLrfxTE9lVJHlVKrbY8x/ozRKY4dSql/\nbDGkuPjc7HvZxOkerVZKHVNKPVDoGFPupVJqslIqXSm1zum9KkqpBUqpzbY/KxdxrtvvsI9jfFkp\n9a/t33OGUiquiHPdfjd8HONTSqndTv+m/Yo41y/30U2cXzvFuEMptbqIc/1yLw2ltQ7aBxAGbAXO\nASKBNUDzQsf0A+YACugMLPdzjLWAtrbnscAmFzH2BH4MgPu5A6jm5nNT76WLf/t9WMeBm34vge5A\nW2Cd03svASNtz0cC44v4e7j9Dvs4xt5AuO35eFcxevLd8HGMTwGPePB98Mt9LCrOQp+/Cowx814a\n+Qj2GkVHYIvWepvWOgeYCvQvdEx/4FNttQyIU0rV8leAWuu9Wuu/bc+PAxuB2v4q32Cm3stCLgS2\naq1LOyHTUFrr34HDhd7uD3xie/4JMMDFqZ58h30Wo9Z6vtY61/ZyGVDHF2V7qoj76Am/3UdwH6dS\nSgHXAl/5qnx/C/ZEURtIc3q9i7N/CHtyjF8opRKBNsByFx93tVX/5yilWvg1sAIa+FkptVIp5WqP\nxYC5l8BAiv6PGAj3EiBea73X9nwfEO/imEC6p7dhrTG6Utx3w9futf2bTi6iCS+Q7mM3YL/WenMR\nn5t9L0ss2BNF0FBKVQCmAQ9orY8V+vhvoJ7WOhl4G/je3/HZnK+1bg30Be5WSnU3KQ63lFKRwOXA\nty4+DpR7eQZtbXMI2CGGSqkngVzgiyIOMfO78T7WJqXWwF6szTqB7Hrc1yaC4v+Zs2BPFLuBuk6v\n69jeK+kxPqWUisCaJL7QWk8v/LnW+pjWOtP2fDYQoZSq5s8YbWXvtv2ZDszAWp13Zvq9tOkL/K21\n3l/4g0C5lzb77U1ztj/TXRxj+j1VSt0CXAbcaEtoZ/Hgu+EzWuv9Wus8rXU+8FERZZt+HwGUUuHA\nlcDXRR1j5r0srWBPFCuAxkqpBrbfMgcCswodMwu42TZipzNw1Kk5wOds7ZWTgI1a69eKOKam7TiU\nUh2x/rsc8leMtnJjlFKx9udYOznXFTrM1HvppMjf2ALhXjqZBQy2PR8MzHRxjCffYZ9RSvUBRgCX\na61PFnGMJ98NX8bo3A92RRFlm3ofnVwE/Ku13uXqQ7PvZamZ3Zvu7QPrSJxNWEc8PGl7bzgw3PZc\nAe/aPv8HaO/n+M7H2uSwFlhte/QrFOM9wHqsIzWWAV1NuI/n2MpfY4sl4O6lLYYYrD/4Kzm9Z/q9\nxJq49gKnsbaPDwGqAr8Am4GfgSq2YxOA2e6+w36McQvWtn37d3NC4RiL+m74McbPbN+3tVh/+Ncy\n8z4WFaft/Sn276LTsabcSyMfMjNbCCGEW8He9CSEEMLHJFEIIYRwSxKFEEIItyRRCCGEcEsShRBC\nCLckUQghhHBLEoUQQgi3JFEIIYRw6/+6LO+6YUsGlAAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x7f0b9e90fba8>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZEAAAEJCAYAAABVFBp5AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAFDBJREFUeJzt3H+sZOV93/H3p7umqahTm+Jc1kC7WEGpaFUTfEOslFqX\nGqz1OvJCG7VYjU3qVCvU4MaRomQrpMiV/wFXbqJUyHSdopA2CVGTYK9ggwOUEaoUuywuXhYbwgZR\nmfXC1li1e50qGPvbP+7ZdnyZe+/sMzNn5u6+X9LonnOeH+fLM2f3wzlzZ1NVSJLU4i/NuwBJ0vZl\niEiSmhkikqRmhogkqZkhIklqZohIkpoZIpKkZoaIJKmZISJJarZz3gW0uPDCC2v37t3N47/97W9z\n/vnnT6+gGbDG6dkOdVrj9GyHOrdDjU888cTXq+otW3asqm33esc73lGTePTRRyca3wdrnJ7tUKc1\nTs92qHM71AgcqTH+PvZxliSpmSEiSWpmiEiSmhkikqRmhogkqZkhIklqZohIkpoZIpKkZtvyG+uT\n2H3gAQBeWJlw/O3vm7yGxjkWqoaV5hI0RWfDNaXtyTsRSVIzQ0SS1MwQkSQ1M0QkSc0MEUlSM0NE\nktTMEJEkNZtKiCTZk+TZJMeTHNik348leS3JTw0deyHJU0meTHJkGvVIkvox8ZcNk+wA7gSuB14E\nHk9yqKq+PKLfHcAfj5jm2qr6+qS1SJL6NY07kauB41X1fFW9CtwL7BvR7yPAHwCnpnBOSdICmMY/\ne3Ix8NWh/ReBHx/ukORi4EbgWuDH1o0v4OEk3wX+fVUdHHWSJPuB/QBLS0sMBoOJip73+K3mWF1d\n3fIcs66hj/F9GGct521aNc7y/Ry3xnmv9bn0fi+Cvv7trF8Dfrmqvpdkfds1VXUiyQ8BDyV5pqoe\nW9+pC5eDAMvLy7WystJWyYNr/77P3MaPOcdgMNi4vacaZjq+R5uu5YKYuMYe3s8ta1yQa+KceL8X\nyDRC5ARw6dD+Jd2xYcvAvV2AXAjsTfJaVX2mqk4AVNWpJPex9njsdSEiSVo80/hM5HHg8iSXJTkP\nuAk4NNyhqi6rqt1VtRv4feBfVNVnkpyf5I0ASc4H3gMcm0JNkqQeTHwnUlWvJbkV+BywA7i7qp5O\nckvXftcmw5eA+7o7lJ3A71TVg5PWJEnqx1Q+E6mqw8DhdcdGhkdV/czQ9vPA26dRgySpf35jXZLU\nzBCRJDUzRCRJzQwRSVIzQ0SS1MwQkSQ1M0QkSc0MEUlSM0NEktTMEJEkNTNEJEnNDBFJUjNDRJLU\nzBCRJDUzRCRJzQwRSVIzQ0SS1MwQkSQ1M0QkSc0MEUlSM0NEktTMEJEkNZtKiCTZk+TZJMeTHBjR\nvi/J0SRPJjmS5Jpxx0qSFtfEIZJkB3An8F7gCuADSa5Y1+0R4O1VdSXwYeA3zmCsJGlBTeNO5Grg\neFU9X1WvAvcC+4Y7VNVqVVW3ez5Q446VJC2uaYTIxcBXh/Zf7I59nyQ3JnkGeIC1u5Gxx0qSFtPO\nvk5UVfcB9yV5F/Bx4LozGZ9kP7AfYGlpicFgMFE98x6/1Ryrq6tbnmPWNfQxvg/jrOW8TavGWb6f\n49Y477U+l97vRTCNEDkBXDq0f0l3bKSqeizJ25JceCZjq+ogcBBgeXm5VlZW2qp98AEA5jZ+zDkG\ng8HG7T3VMNPxPdp0LRfExDX28H5uWeOCXBPnxPu9QKbxOOtx4PIklyU5D7gJODTcIckPJ0m3fRXw\nl4FXxhkrSVpcE9+JVNVrSW4FPgfsAO6uqqeT3NK13wX8I+BDSb4D/B/gn3QftI8cO2lNkqR+TOUz\nkao6DBxed+yuoe07gDvGHStJ2h78xrokqZkhIklqZohIkpoZIpKkZoaIJKmZISJJamaISJKaGSKS\npGaGiCSpmSEiSWpmiEiSmhkikqRmhogkqZkhIklqZohIkpoZIpKkZoaIJKmZISJJamaISJKaGSKS\npGaGiCSpmSEiSWo2lRBJsifJs0mOJzkwov1vJfmTJH+R5BfXtb2Q5KkkTyY5Mo16JEn92DnpBEl2\nAHcC1wMvAo8nOVRVXx7q9g3gXwI3bDDNtVX19UlrkST1axp3IlcDx6vq+ap6FbgX2DfcoapOVdXj\nwHemcD5J0oKYRohcDHx1aP/F7ti4Cng4yRNJ9k+hHklSTyZ+nDUF11TViSQ/BDyU5Jmqemx9py5g\n9gMsLS0xGAwmOum8x281x+rq6pbnmHUNfYzvwzhrOW/TqnGW7+e4Nc57rc+l93sRTCNETgCXDu1f\n0h0bS1Wd6H6eSnIfa4/HXhciVXUQOAiwvLxcKysrbdU++AAAcxs/5hyDwWDj9p5qmOn4Hm26lgti\n4hp7eD+3rHFBrolz4v1eINN4nPU4cHmSy5KcB9wEHBpnYJLzk7zx9DbwHuDYFGqSJPVg4juRqnot\nya3A54AdwN1V9XSSW7r2u5JcBBwBfhD4XpKPAlcAFwL3JTldy+9U1YOT1iRJ6sdUPhOpqsPA4XXH\n7hrafom1x1zrfQt4+zRqkCT1z2+sS5KaGSKSpGaGiCSpmSEiSWpmiEiSmhkikqRmhogkqZkhIklq\nZohIkpoZIpKkZoaIJKmZISJJamaISJKaGSKSpGaGiCSpmSEiSWpmiEiSmhkikqRmhogkqZkhIklq\nZohIkpoZIpKkZoaIJKnZVEIkyZ4kzyY5nuTAiPYk+fWu/WiSq8YdK0laXBOHSJIdwJ3Ae4ErgA8k\nuWJdt/cCl3ev/cCnzmCsJGlBTeNO5GrgeFU9X1WvAvcC+9b12Qf8Vq35PPCmJLvGHCtJWlCpqskm\nSH4K2FNV/7zb/yDw41V161Cf+4Hbq+q/dvuPAL8M7N5q7CjLy8t15MiR1oLbxknSdjPB3+9Jnqiq\n5a367Ww+Q8+S7GftURhLS0sMBoOmeVamV5IkLbTWvyfPxDRC5ARw6dD+Jd2xcfq8YYyxAFTVQeAg\nrN2JrKystFVbxWAwoHl8T86FGncfeACAF25/30zHb1ZnXzXMeo4+zPqaXIS17KuGWV6T3zdHD3+H\nTOMzkceBy5NcluQ84Cbg0Lo+h4APdb+l9U7gm1V1csyxkqQFNfGdSFW9luRW4HPADuDuqno6yS1d\n+13AYWAvcBz4c+CfbTZ20pokSf2YymciVXWYtaAYPnbX0HYBPzfuWEnS9uA31iVJzQwRSVIzQ0SS\n1MwQkSQ1M0QkSc0MEUlSM0NEktTMEJEkNTNEJEnNDBFJUjNDRJLUzBCRJDUzRCRJzQwRSVIzQ0SS\n1MwQkSQ1M0QkSc0MEUlSM0NEktTMEJEkNTNEJEnNDBFJUrOJQiTJBUkeSvJc9/PNG/S7O8mpJMfW\nHf9YkhNJnuxeeyepR5LUr0nvRA4Aj1TV5cAj3f4ovwns2aDtV6vqyu51eMJ6JEk9mjRE9gH3dNv3\nADeM6lRVjwHfmPBckqQFM2mILFXVyW77JWCpYY6PJDnaPfIa+ThMkrSYdm7VIcnDwEUjmm4b3qmq\nSlJneP5PAR8Hqvv5SeDDG9SxH9gPsLS0xGAwOMNT/X+rq6sTje/DuVTjpHNsNX6cOmddQ19zzFJf\n1+QirOWsa+jjmpzWHFvZMkSq6rqN2pK8nGRXVZ1Msgs4dSYnr6qXh+b6NHD/Jn0PAgcBlpeXa2Vl\n5UxO9X0GgwGTjO/DOVHjgw8AtM8x5vhN6+yphpnP0YOZX5OLsJY91TDTa3Jac4xp0sdZh4Cbu+2b\ngc+eyeAueE67ETi2UV9J0uKZNERuB65P8hxwXbdPkrcm+X+/aZXkd4E/AX4kyYtJfrZr+kSSp5Ic\nBa4FfmHCeiRJPdrycdZmquoV4N0jjn8N2Du0/4ENxn9wkvNLkubLb6xLkpoZIpKkZoaIJKmZISJJ\namaISJKaGSKSpGaGiCSpmSEiSWpmiEiSmhkikqRmhogkqZkhIklqZohIkpoZIpKkZoaIJKmZISJJ\namaISJKaGSKSpGaGiCSpmSEiSWpmiEiSmhkikqRmhogkqdlEIZLkgiQPJXmu+/nmEX1+IMl/S/Kl\nJE8n+ddnMl6StLgmvRM5ADxSVZcDj3T76/0F8A+q6u3AlcCeJO88g/GSpAU1aYjsA+7ptu8Bbljf\nodasdrtv6F417nhJ0uLaOeH4pao62W2/BCyN6pRkB/AE8MPAnVX1hTMZ382xH9gPsLS0xGAwaC56\ndXV1ovF9OJdqnHSOrcaPU+esa+hrjlnq65pchLWcdQ19XJPTmmMrW4ZIkoeBi0Y03Ta8U1WVpEb0\no6q+C1yZ5E3AfUn+TlUdG3d8134QOAiwvLxcKysrW5W+ocFgwCTj+3BO1PjgAwDtc4w5ftM6e6ph\n5nP0YObX5CKsZU81zPSanNYcY9oyRKrquo3akrycZFdVnUyyCzi1xVz/K8mjwB7gGHBG4yVJi2XS\nz0QOATd32zcDn13fIclbujsQkvwV4HrgmXHHS5IW16QhcjtwfZLngOu6fZK8Ncnhrs8u4NEkR4HH\ngYeq6v7NxkuStoeJPlivqleAd484/jVgb7d9FPjRMxkvSdoe/Ma6JKmZISJJamaISJKaGSKSpGaG\niCSpmSEiSWpmiEiSmhkikqRmhogkqZkhIklqZohIkpoZIpKkZoaIJKmZISJJamaISJKaGSKSpGaG\niCSpmSEiSWpmiEiSmhkikqRmhogkqZkhIklqNlGIJLkgyUNJnut+vnmTvjuS/Pck9w8d+1iSE0me\n7F57J6lHktSvSe9EDgCPVNXlwCPd/kZ+HvjKiOO/WlVXdq/DE9YjSerRpCGyD7in274HuGFUpySX\nAO8DfmPC80mSFsikIbJUVSe77ZeApQ36/RrwS8D3RrR9JMnRJHdv9jhMkrR4dm7VIcnDwEUjmm4b\n3qmqSlIjxv8kcKqqnkiysq75U8DHgep+fhL48AZ17Af2AywtLTEYDLYqfUOrq6sTje/DuVTjpHNs\nNX6cOmddQ19zzFJf1+QirOWsa+jjmpzWHFvZMkSq6rqN2pK8nGRXVZ1Msgs4NaLb3wPe331o/gPA\nDyb5T1X101X18tBcnwbuHzH+dB0HgYMAy8vLtbKyslXpGxoMBkwyvg/nRI0PPgDQPseY4zets6ca\nZj5HD2Z+TS7CWvZUw0yvyWnNMaZJH2cdAm7utm8GPru+Q1X9q6q6pKp2AzcB/6WqfhqgC57TbgSO\nTViPJKlHk4bI7cD1SZ4Druv2SfLWJOP8ptUnkjyV5ChwLfALE9YjSerRlo+zNlNVrwDvHnH8a8Dr\nvvNRVQNgMLT/wUnOL0maL7+xLklqZohIkpoZIpKkZoaIJKmZISJJamaISJKaTfQrvtIkXrj9fXMd\nv0g1LPo/edKHRVjLs+ma6ot3IpKkZoaIJKmZISJJamaISJKaGSKSpGaGiCSpmSEiSWpmiEiSmhki\nkqRmqap513DGkvxP4H9MMMWFwNenVM6sWOP0bIc6rXF6tkOd26HGv1lVb9mq07YMkUklOVJVy/Ou\nYzPWOD3boU5rnJ7tUOd2qHFcPs6SJDUzRCRJzc7VEDk47wLGYI3Tsx3qtMbp2Q51bocax3JOfiYi\nSZqOc/VORJI0BWdtiCTZk+TZJMeTHBjRniS/3rUfTXLVHGq8NMmjSb6c5OkkPz+iz0qSbyZ5snv9\nyhzqfCHJU935j4xon+taJvmRofV5Msm3knx0XZ+5rGOSu5OcSnJs6NgFSR5K8lz3880bjN30Gp5x\njf8myTPd+3lfkjdtMHbTa6OHOj+W5MTQ+7p3g7HzXMvfG6rvhSRPbjC2t7Wcqqo6617ADuDPgLcB\n5wFfAq5Y12cv8EdAgHcCX5hDnbuAq7rtNwJ/OqLOFeD+Oa/nC8CFm7TPfS3XvfcvsfY77nNfR+Bd\nwFXAsaFjnwAOdNsHgDs2+O/Y9BqecY3vAXZ223eMqnGca6OHOj8G/OIY18Tc1nJd+yeBX5n3Wk7z\ndbbeiVwNHK+q56vqVeBeYN+6PvuA36o1nwfelGRXn0VW1cmq+mK3/b+BrwAX91nDlMx9LYe8G/iz\nqprky6hTU1WPAd9Yd3gfcE+3fQ9ww4ih41zDM6uxqv64ql7rdj8PXDKLc5+JDdZyHHNdy9OSBPjH\nwO/O4tzzcraGyMXAV4f2X+T1fzmP06c3SXYDPwp8YUTzT3SPFf4oyd/utbA1BTyc5Ikk+0e0L9Ja\n3sTGf0jnvY6nLVXVyW77JWBpRJ9FWtMPs3anOcpW10YfPtK9r3dv8GhwUdby7wMvV9VzG7Qvwlqe\nsbM1RLaVJH8V+APgo1X1rXXNXwT+RlX9XeDfAZ/puz7gmqq6Engv8HNJ3jWHGraU5Dzg/cB/HtG8\nCOv4OrX2HGNhf0UyyW3Aa8Bvb9Bl3tfGp1h7THUlcJK1x0WL6gNsfhcy77VscraGyAng0qH9S7pj\nZ9pn5pK8gbUA+e2q+sP17VX1rapa7bYPA29IcmGfNVbVie7nKeA+1h4PDFuItWTtD98Xq+rl9Q2L\nsI5DXj79uK/7eWpEn7mvaZKfAX4S+Kdd2L3OGNfGTFXVy1X13ar6HvDpDc6/CGu5E/iHwO9t1Gfe\na9nqbA2Rx4HLk1zW/d/pTcChdX0OAR/qfrPoncA3hx4x9KJ7RvofgK9U1b/doM9FXT+SXM3ae/ZK\njzWen+SNp7dZ+8D12Lpuc1/Lzob/pzfvdVznEHBzt30z8NkRfca5hmcmyR7gl4D3V9Wfb9BnnGtj\nptZ99nbjBuef61p2rgOeqaoXRzUuwlo2m/cn+7N6sfYbQ3/K2m9l3NYduwW4pdsOcGfX/hSwPIca\nr2HtUcZR4MnutXddnbcCT7P2GyWfB36i5xrf1p37S10di7qW57MWCn9t6Njc15G1UDsJfIe1Z/E/\nC/x14BHgOeBh4IKu71uBw5tdwz3WeJy1zxFOX5d3ra9xo2uj5zr/Y3fNHWUtGHYt2lp2x3/z9LU4\n1HduaznNl99YlyQ1O1sfZ0mSemCISJKaGSKSpGaGiCSpmSEiSWpmiEiSmhkikqRmhogkqdn/Ba6n\nc985ABRfAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x7f0b9fcb5a58>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYoAAAEJCAYAAACKWmBmAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAFsFJREFUeJzt3X/sXfV93/Hne07YJugGlO5iMI2JZqVyt4TAdxR1LPtS\nTGVMVNMf6mBd4qyJvkILXSItajwhRZn6x8x+VFUnVsttEe6W1d2WUCzsmILrqyhqSW0yQvgZu8xT\nAAMK6UK+ZCpx8t4f93i7fHPv517fc+6598LzIX31PT8+53ze3889vi+fc+6PyEwkSRrmr8y6AEnS\nfDMoJElFBoUkqcigkCQVGRSSpCKDQpJUZFBIkooMCklSkUEhSSp626wLKLnoooty48aNE2372muv\nce655zZb0BQsQp3W2JxFqNMamzPvdT7yyCPfyMwfGdkwM+f256qrrspJHTlyZOJt27QIdVpjcxah\nTmtszrzXCRzLMZ6LvfQkSSoyKCRJRQaFJKnIoJAkFRkUkqQig0KSVGRQSJKKDApJUlEjQRERWyPi\nmYg4ERE7C+3+XkScjohfaKJflW3ceYCNOw/MugxJC652UETEOuAu4EZgM3BrRGwe0u5O4I/q9ilJ\nak8TZxRXAycy89nMfB3YB2wf0O5XgM8CLzfQpySpJdH7uI8aO+hdRtqamR+p5j8A/ERm3t7X5lLg\nvwDXAXcD92fmfx+yvxVgBaDT6Vy1b9++iepaXV3lvPPOm2jbNk2zzg8deg2Ae7bW+1CyRRjLRagR\nFqNOa2zOvNd53XXXPZKZS6PatfXpsb8BfDIzvx8RxYaZuQfYA7C0tJTLy8sTddjtdpl02zZNtc5D\nvfsTdfe/CGO5CDXCYtRpjc1ZlDpHaSIongcu65vfUC3rtwTsq0LiImBbRJzOzD9soP+5dOYm8sld\nN824Ekmqp4mgOApsiojL6QXELcA/7m+QmZefmY6Ie+hdenrThoQkvZnUDorMPB0RtwMPAOuAuzPz\niYi4rVq/u24fkqTZaeQeRWYeBA6uWTYwIDLzQ030KUlqh+/MliQVGRRD+K5mSeoxKCRJRQaFJKnI\noJAkFRkU0ltI3Xtv3rt7azIoJElFBoUkqcigkCQVGRSSpCKDQpJUZFDMMV9hImkeGBSSpCKDQpJU\n1NZXoWpB/b9v6luebR2SZqeRM4qI2BoRz0TEiYjYOWD99oh4LCIejYhjEXFtE/1Kkqav9hlFRKwD\n7gJuAJ4DjkbE/sx8sq/ZYWB/ZmZEvBv4r8CP1e1bkjR9TZxRXA2cyMxnM/N1YB+wvb9BZq5mZlaz\n5wKJJGkhNBEUlwJf75t/rlr2BhHxsxHxNHAA+OUG+pUktaC1m9mZeS9wb0S8D/g1YMugdhGxAqwA\ndDodut3uRP2trq5OvG2/uvsYtf04dU67hrb2MU1NPd7TNi91lmpo45isa17GcZRFqXOUJoLieeCy\nvvkN1bKBMvMLEfHOiLgoM78xYP0eYA/A0tJSLi8vT1RUt9tl0m0BONR7tc/E+xhz+2KdLdUw9X20\noPbj3ZKZ1znG4znVY7IhMx/HMS1KnaM0cenpKLApIi6PiHOAW4D9/Q0i4m9HRFTTVwJ/FXilgb4l\nSVNW+4wiM09HxO3AA8A64O7MfCIibqvW7wZ+HvhgRHwX+D/AP+q7uS1JmmON3KPIzIPAwTXLdvdN\n3wnc2URfkqR2+REekqQig0KSVGRQSJKKDApJUpFBIUkqMigkSUUGhSSpyKCQJBUZFJKkIoNCklRk\nUEiSigwKSVKRQSFJKjIoJElFBoUkqaiRoIiIrRHxTESciIidA9b/UkQ8FhFfjYg/iYj3NNGvJGn6\nagdFRKwD7gJuBDYDt0bE5jXN/ifwDzPz7wK/RvWd2JKk+dfEN9xdDZzIzGcBImIfsB148kyDzPyT\nvvYPAxsa6HeojTt7XwB/cnmavUjSW0MTl54uBb7eN/9ctWyYDwOfb6BfSVILGvnO7HFFxHX0guLa\nQpsVYAWg0+nQ7XYn7q/Otk3tY9T2q6urI9tMu4a29jFN44zjPJiXOks1tHFM1jUv4zjKotQ5ShNB\n8TxwWd/8hmrZG0TEu4HfAW7MzFeG7Swz91Ddw1haWsrl5eWzr+hQ79LTRNs2tY8xt+92u8PbtFTD\n1PfRguI4zpGZ1znG4znVY7IhMx/HMS1KnaM0cenpKLApIi6PiHOAW4D9/Q0i4keBzwEfyMyvNdCn\nJKkltc8oMvN0RNwOPACsA+7OzCci4rZq/W7gU8APA/8xIgBOZ+ZS3b4lSdPXyD2KzDwIHFyzbHff\n9EeAjzTRlySpXb4zW5JUZFBIkooMCklSkUEhSSoyKCRJRQaFJKnIoJAkFRkUkqQig0KSVGRQSJKK\nDApJUpFBIUkqMigkSUUGhSSpyKCQJBU1EhQRsTUinomIExGxc8D6H4uIP42Iv4yITzTRpySpHbW/\nuCgi1gF3ATcAzwFHI2J/Zj7Z1+ybwD8Hbq7bnySpXU2cUVwNnMjMZzPzdWAfsL2/QWa+nJlHge82\n0J8kqUVNBMWlwNf75p+rlkmS3gQa+c7sJkXECrAC0Ol06Ha7E++rzrZN7WPU9qurqyPbTLuGtvYx\nTeOM4zyYlzpLNbRxTNY1L+M4yqLUOUoTQfE8cFnf/IZq2UQycw+wB2BpaSmXl5fPfieHDgAw0bZN\n7WPM7bvd7vA2LdUw9X20oDiOc2TmdY7xeE71mGzIzMdxTItS5yhNXHo6CmyKiMsj4hzgFmB/A/uV\nJM2B2mcUmXk6Im4HHgDWAXdn5hMRcVu1fndEXAwcA/4G8P2I+DiwOTNfrdu/JGm6GrlHkZkHgYNr\nlu3um36R3iUpSdKC8Z3ZkqQig0KSVGRQSJKKDApJUpFBIUkqMigkSUUGhSSpyKCQJBUZFJKkIoNC\nklRkUEiSigwKSVKRQSFJKjIoJElFBoUkqaiRoIiIrRHxTESciIidA9ZHRPxmtf6xiLiyiX4lSdNX\nOygiYh1wF3AjsBm4NSI2r2l2I7Cp+lkBfqtuv5KkdjRxRnE1cCIzn83M14F9wPY1bbYDv5c9DwPn\nR8T6BvqWJE1ZZGa9HUT8ArA1Mz9SzX8A+InMvL2vzf3Arsz8YjV/GPhkZh4r7XtpaSmPHSs2GVbU\n2W8jSYuoxnN4RDySmUuj2jXyndlNiogVepen6HQ6dLvds97HcrMlSdLcmuQ58mw1ERTPA5f1zW+o\nlp1tGwAycw+wB3pnFMvLy2dfUSbdbpeJtm3ZItRZp8aNOw8AcHLXTRP3P84+RtVYt462/o5p1zCO\naR+T8zAObdVQGsvGamjh+aOJexRHgU0RcXlEnAPcAuxf02Y/8MHq1U/XAN/KzFMN9C1JmrLaZxSZ\neToibgceANYBd2fmExFxW7V+N3AQ2AacAL4D/NO6/UqS2tHIPYrMPEgvDPqX7e6bTuCjTfQlSWqX\n78yWJBXN3aueJA027ZvY0jAGhabKJzdp8XnpSZJUZFBIkooMCklSkUEhSSryZrakVvkCh8XjGYUk\nqcigkCQVGRSSpCKDQpJUZFBIkooMCklSkUEhSSoyKCRJRbWCIiIujIgHI+J49fuCIe3ujoiXI+Lx\nOv1JktpX94xiJ3A4MzcBh6v5Qe4BttbsS5I0A3WDYjuwt5reC9w8qFFmfgH4Zs2+JEkzUPeznjqZ\neaqafhHo1NwfEbECrAB0Oh263e5E+1ldXZ142zYtQp3zUmOphnFrrPt3NDEO8zCWJfPyeI8yD4/F\nqO3HGct5OCZHGRkUEfEQcPGAVXf0z2RmRkTWLSgz9wB7AJaWlnJ5eXmi/XS7XSbdtk2LUOfMazx0\nAKBYw8gax9hH3Rpa2UcLZv54jzIPj8WY2xfHch6OyTGNDIrM3DJsXUS8FBHrM/NURKwHXm60OknS\nzNW9R7Ef2FFN7wDuq7k/SdKcqRsUu4AbIuI4sKWaJyIuiYiDZxpFxO8Dfwq8KyKei4gP1+xXktSS\nWjezM/MV4PoBy18AtvXN31qnH0nS7PjObElSkUEhSSoyKCRJRQaFJKnIoJAkFRkUkqQig0KSVGRQ\nSJKKDApJUpFBIUkqMigkSUUGhSSpyKCQJBUZFJKkIoNCklRUKygi4sKIeDAijle/LxjQ5rKIOBIR\nT0bEExHxsTp9SpLaVfeMYidwODM3AYer+bVOA/8iMzcD1wAfjYjNNfuVJLWkblBsB/ZW03uBm9c2\nyMxTmfnlavrbwFPApTX7lSS1pNZXoQKdzDxVTb8IdEqNI2Ij8F7gS4U2K8AKQKfTodvtTlTY6urq\nxNu2aRHqnJcaSzWMW2Pdv6OJcZiHsSyZl8d7lHl4LEZtP85YzsMxOcrIoIiIh4CLB6y6o38mMzMi\nsrCf84DPAh/PzFeHtcvMPcAegKWlpVxeXh5V4kDdbpdJt23TItQ58xoPHQAo1jCyxjH2UbeGVvbR\ngpk/3qPMw2Mx5vbFsZyHY3JMI4MiM7cMWxcRL0XE+sw8FRHrgZeHtHs7vZD4TGZ+buJqJUmtq3uP\nYj+wo5reAdy3tkFEBPC7wFOZ+es1+5MktaxuUOwCboiI48CWap6IuCQiDlZt/j7wAeCnIuLR6mdb\nzX4lSS2pdTM7M18Brh+w/AVgWzX9RSDq9CNJmh3fmS1JKjIoJElFBoUkqcigkCQVGRSSpCKDQpJU\nZFBIkooMCklSkUEhSSoyKCRJRQaFJKnIoJAkFRkUkqQig0KSVGRQSJKKagVFRFwYEQ9GxPHq9wUD\n2vy1iPiziPhKRDwREf+qTp+SpHbVPaPYCRzOzE3A4Wp+rb8Efioz3wNcAWyNiGtq9itJakndoNgO\n7K2m9wI3r22QPavV7Nurn6zZrySpJXWDopOZp6rpF4HOoEYRsS4iHgVeBh7MzC/V7FeS1JKR35kd\nEQ8BFw9YdUf/TGZmRAw8U8jM7wFXRMT5wL0R8Xcy8/Eh/a0AKwCdTodutzuqxIFWV1cn3rZNi1Dn\nvNRYqmHcGuv+HU2MwzyMZcm8PN7D3LP13MZqnPbxME6d83BMjjIyKDJzy7B1EfFSRKzPzFMRsZ7e\nGUNpX/87Io4AW4GBQZGZe4A9AEtLS7m8vDyqxIG63S6TbtumRahz5jUeOgBQrGFkjWPso24Nreyj\nBTN/vMdQu8aWjodinfNwTI6p7qWn/cCOanoHcN/aBhHxI9WZBBHx14EbgKdr9itJakndoNgF3BAR\nx4Et1TwRcUlEHKzarAeORMRjwFF69yjur9mvJKklIy89lWTmK8D1A5a/AGyrph8D3lunH0nS7PjO\nbElSUa0zCqkNJ3fdNOsSpLc0g0KSztJb7T8vXnqSJBUZFJKkIi89SXrLeatdOqrLMwpJUpFBIUkq\nMigkSUUGhSSpyKCQJBX5qidJWkBtvnLLMwpJUpFBIUkqMigkSUW1giIiLoyIByPiePX7gkLbdRHx\nPyLCLy2SpAVS94xiJ3A4MzcBh6v5YT4GPFWzP2lhndx1E/dsPXfWZUhnrW5QbAf2VtN7gZsHNYqI\nDcBNwO/U7E+S1LK6QdHJzFPV9ItAZ0i73wB+Ffh+zf4kSS2LzCw3iHgIuHjAqjuAvZl5fl/bv8jM\nN9yniIj3A9sy859FxDLwicx8f6G/FWAFoNPpXLVv375x/5Y3WF1d5bzzzpto2zYtQp1vhho/dOg1\ngIkv/dTd/ow3w1jOg0WoEea/zuuuu+6RzFwa2TAzJ/4BngHWV9PrgWcGtPnXwHPASXpnHd8B/vM4\n+7/qqqtyUkeOHJl42zYtQp1vhhrf8cn78x2fvH/i/dfd/ow3w1jOg0WoMXP+6wSO5RjPxXUvPe0H\ndlTTO4D7BgTRv8zMDZm5EbgF+OPM/Cc1+5UktaRuUOwCboiI48CWap6IuCQiDtYtTpI0e7U+6ykz\nXwGuH7D8BWDbgOVdoFunT0lSu3xntiSpyKCQJBUZFJKkIoNCklRkUEiSigwKSVKRQSFJKvI7s/WW\n0Ob3C0tvNp5RSJKKPKOQxuAZid7KPKOQJBUZFJKkIoNCklRkUEiSigwKSVJRrVc9RcSFwB8AG+l9\n1ekvZuZfDGh3Evg28D3gdI7zHa2SpLlQ94xiJ3A4MzcBh6v5Ya7LzCsMCUlaLHWDYjuwt5reC9xc\nc3+SpDlTNyg6mXmqmn4R6Axpl8BDEfFIRKzU7FOS1KKR9ygi4iHg4gGr7uifycyMiByym2sz8/mI\n+FvAgxHxdGZ+YUh/K8CZMFmNiGdG1TjERcA3Jty2TYtQpzU2ZxHqtMbmzHud7xinUWQOe24fY+Pe\nk/hyZp6KiPVANzPfNWKbTwOrmfnvJu54vNqOLcL9kEWo0xqbswh1WmNzFqXOUepeetoP7KimdwD3\nrW0QEedGxA+dmQZ+Gni8Zr+SpJbUDYpdwA0RcRzYUs0TEZdExMGqTQf4YkR8Bfgz4EBmHqrZrySp\nJbXeR5GZrwDXD1j+ArCtmn4WeE+dfia0ZwZ9TmIR6rTG5ixCndbYnEWps6jWPQpJ0pufH+EhSSpa\n+KCIiK0R8UxEnIiIH3hnePT8ZrX+sYi4suX6LouIIxHxZEQ8EREfG9BmOSK+FRGPVj+farPGvjpO\nRsRXqxqODVg/67F8V98YPRoRr0bEx9e0mclYRsTdEfFyRDzet+zCiHgwIo5Xvy8Ysm3xGJ5yjf82\nIp6uHs97I+L8IdsWj40p1/jpiHi+7zHdNmTbVsaxUOcf9NV4MiIeHbJtK2PZqMxc2B9gHfDnwDuB\nc4CvAJvXtNkGfB4I4BrgSy3XuB64spr+IeBrA2pcBu6fg/E8CVxUWD/TsRzw2L8IvGMexhJ4H3Al\n8Hjfsn8D7KymdwJ3Dvk7isfwlGv8aeBt1fSdg2oc59iYco2fBj4xxvHQyjgOq3PN+n8PfGqWY9nk\nz6KfUVwNnMjMZzPzdWAfvY8V6bcd+L3seRg4v3rPRysy81Rmfrma/jbwFHBpW/03bKZjucb1wJ9n\n5v+aUf9vkL03kH5zzeJxPuJmnGN4ajVm5h9l5ulq9mFgwzT6HteQcRxHa+MI5TojIoBfBH5/Wv23\nbdGD4lLg633zz/GDT8LjtGlFRGwE3gt8acDqn6xO/z8fET/eamH/36iPWpmbsQRuYfg/xHkYSxjv\nI27maUx/md4Z4yCz/hieX6ke07uHXMKbp3H8B8BLmXl8yPpZj+VZW/SgWBgRcR7wWeDjmfnqmtVf\nBn40M98N/AfgD9uur3JtZl4B3Ah8NCLeN6M6iiLiHOBngP82YPW8jOUbZO+aw9y+xDAi7gBOA58Z\n0mSWx8Zv0bukdAVwit5lnXl2K+WziYX4d9Zv0YPieeCyvvkN1bKzbTNVEfF2eiHxmcz83Nr1mflq\nZq5W0weBt0fERW3WWPX9fPX7ZeBeeqfz/WY+lpUbgS9n5ktrV8zLWFZeOnNprvr98oA2Mx/TiPgQ\n8H7gl6pA+wFjHBtTk5kvZeb3MvP7wG8P6Xvm4wgQEW8Dfo7e9/QMNMuxnNSiB8VRYFNEXF79L/MW\neh8r0m8/8MHqFTvXAN/quxwwddX1yt8FnsrMXx/S5uKqHRFxNb3H5ZW2aqz6HeejVmY6ln2G/o9t\nHsayz8iPuGG8Y3hqImIr8KvAz2Tmd4a0menH8Ky5D/azQ/qe6Tj22QI8nZnPDVo567Gc2Kzvptf9\nofdKnK/Re8XDHdWy24DbqukA7qrWfxVYarm+a+ldcngMeLT62bamxtuBJ+i9UuNh4CdnMI7vrPr/\nSlXL3I1lVcO59J74/2bfspmPJb3gOgV8l9718Q8DP0zvC72OAw8BF1ZtLwEOlo7hFms8Qe/a/plj\nc/faGocdGy3W+J+q4+0xek/+62c5jsPqrJbfc+ZY7Gs7k7Fs8sd3ZkuSihb90pMkacoMCklSkUEh\nSSoyKCRJRQaFJKnIoJAkFRkUkqQig0KSVPR/AVFyTQNpd27sAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x7f0b9e907710>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "text/plain": [
       "<matplotlib.axes._axes.Axes at 0x7f0b9ea04d68>"
      ]
     },
     "execution_count": 17,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "batch_i =4\n",
    "\n",
    "data = np.array(inputs[batch_i][0])[:,[0]]\n",
    "labels = np.array(targets[batch_i][0])[:]\n",
    "predicted = np.array(outputs[batch_i][0])[:,[1]]\n",
    "#print(data)\n",
    "#print(labels)\n",
    "#print (predicted)\n",
    "gl.plot(np.array(range(data.size)), data, nf = 1)\n",
    "gl.stem(np.array(range(data.size)),labels - 0.5, nf = 1)\n",
    "gl.stem(np.array(range(data.size)),predicted - 0.5, nf = 1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.1"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
