{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import sys, os, _pickle as pickle\n",
    "import tensorflow as tf\n",
    "import numpy as np\n",
    "import nltk\n",
    "from sklearn.metrics import f1_score"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "data_dir = '../data'\n",
    "ckpt_dir = '../checkpoint'\n",
    "word_embd_dir = '../checkpoint/word_embd'\n",
    "model_dir = '../checkpoint/modelv3'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "word_embd_dim = 100\n",
    "pos_embd_dim = 25\n",
    "dep_embd_dim = 25\n",
    "word_vocab_size = 400001\n",
    "pos_vocab_size = 10\n",
    "dep_vocab_size = 21\n",
    "relation_classes = 19\n",
    "word_state_size = 100\n",
    "other_state_size = 100\n",
    "batch_size = 10\n",
    "channels = 3\n",
    "lambda_l2 = 0.0001\n",
    "max_len_path = 10"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "with tf.name_scope(\"input\"):\n",
    "    path_length = tf.placeholder(tf.int32, shape=[2, batch_size], name=\"path1_length\")\n",
    "    word_ids = tf.placeholder(tf.int32, shape=[2, batch_size, max_len_path], name=\"word_ids\")\n",
    "    pos_ids = tf.placeholder(tf.int32, [2, batch_size, max_len_path], name=\"pos_ids\")\n",
    "    dep_ids = tf.placeholder(tf.int32, [2, batch_size, max_len_path], name=\"dep_ids\")\n",
    "    y = tf.placeholder(tf.int32, [batch_size], name=\"y\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "with tf.name_scope(\"word_embedding\"):\n",
    "    W = tf.Variable(tf.constant(0.0, shape=[word_vocab_size, word_embd_dim]), name=\"W\")\n",
    "    embedding_placeholder = tf.placeholder(tf.float32,[word_vocab_size, word_embd_dim])\n",
    "    embedding_init = W.assign(embedding_placeholder)\n",
    "    embedded_word = tf.nn.embedding_lookup(W, word_ids)\n",
    "    word_embedding_saver = tf.train.Saver({\"word_embedding/W\": W})\n",
    "\n",
    "with tf.name_scope(\"pos_embedding\"):\n",
    "    W = tf.Variable(tf.random_uniform([pos_vocab_size, pos_embd_dim]), name=\"W\")\n",
    "    embedded_pos = tf.nn.embedding_lookup(W, pos_ids)\n",
    "    pos_embedding_saver = tf.train.Saver({\"pos_embedding/W\": W})\n",
    "\n",
    "with tf.name_scope(\"dep_embedding\"):\n",
    "    W = tf.Variable(tf.random_uniform([dep_vocab_size, dep_embd_dim]), name=\"W\")\n",
    "    embedded_dep = tf.nn.embedding_lookup(W, dep_ids)\n",
    "    dep_embedding_saver = tf.train.Saver({\"dep_embedding/W\": W})"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "with tf.name_scope(\"dropout\"):\n",
    "    embedded_word_drop = tf.nn.dropout(embedded_word, 0.3)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "word_hidden_state = tf.zeros([batch_size, word_state_size], name='word_hidden_state')\n",
    "word_cell_state = tf.zeros([batch_size, word_state_size], name='word_cell_state')\n",
    "word_init_state = tf.contrib.rnn.LSTMStateTuple(word_hidden_state, word_cell_state)\n",
    "\n",
    "other_hidden_states = tf.zeros([channels-1, batch_size, other_state_size], name=\"hidden_state\")\n",
    "other_cell_states = tf.zeros([channels-1, batch_size, other_state_size], name=\"cell_state\")\n",
    "\n",
    "other_init_states = [tf.contrib.rnn.LSTMStateTuple(other_hidden_states[i], other_cell_states[i]) for i in range(channels-1)]\n",
    "\n",
    "with tf.variable_scope(\"word_lstm1\"):\n",
    "    cell = tf.contrib.rnn.BasicLSTMCell(word_state_size)\n",
    "    state_series, current_state = tf.nn.dynamic_rnn(cell, embedded_word_drop[0], sequence_length=path_length[0], initial_state=word_init_state)\n",
    "    state_series_word1 = tf.reduce_max(state_series, axis=1)\n",
    "\n",
    "with tf.variable_scope(\"word_lstm2\"):\n",
    "    cell = tf.contrib.rnn.BasicLSTMCell(word_state_size)\n",
    "    state_series, current_state = tf.nn.dynamic_rnn(cell, embedded_word_drop[1], sequence_length=path_length[1], initial_state=word_init_state)\n",
    "    state_series_word2 = tf.reduce_max(state_series, axis=1)\n",
    "\n",
    "with tf.variable_scope(\"pos_lstm1\"):\n",
    "    cell = tf.contrib.rnn.BasicLSTMCell(other_state_size)\n",
    "    state_series, current_state = tf.nn.dynamic_rnn(cell, embedded_pos[0], sequence_length=path_length[0],initial_state=other_init_states[0])\n",
    "    state_series_pos1 = tf.reduce_max(state_series, axis=1)\n",
    "\n",
    "with tf.variable_scope(\"pos_lstm2\"):\n",
    "    cell = tf.contrib.rnn.BasicLSTMCell(other_state_size)\n",
    "    state_series, current_state = tf.nn.dynamic_rnn(cell, embedded_pos[1], sequence_length=path_length[1],initial_state=other_init_states[0])\n",
    "    state_series_pos2 = tf.reduce_max(state_series, axis=1)\n",
    "\n",
    "with tf.variable_scope(\"dep_lstm1\"):\n",
    "    cell = tf.contrib.rnn.BasicLSTMCell(other_state_size)\n",
    "    state_series, current_state = tf.nn.dynamic_rnn(cell, embedded_dep[0], sequence_length=path_length[0], initial_state=other_init_states[1])\n",
    "    state_series_dep1 = tf.reduce_max(state_series, axis=1)\n",
    "\n",
    "with tf.variable_scope(\"dep_lstm2\"):\n",
    "    cell = tf.contrib.rnn.BasicLSTMCell(other_state_size)\n",
    "    state_series, current_state = tf.nn.dynamic_rnn(cell, embedded_dep[1], sequence_length=path_length[1], initial_state=other_init_states[1])\n",
    "    state_series_dep2 = tf.reduce_max(state_series, axis=1)\n",
    "\n",
    "state_series1 = tf.concat([state_series_word1, state_series_pos1, state_series_dep1], 1)\n",
    "state_series2 = tf.concat([state_series_word2, state_series_pos2, state_series_dep2], 1)\n",
    "\n",
    "state_series = tf.concat([state_series1, state_series2], 1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "with tf.name_scope(\"hidden_layer\"):\n",
    "    W = tf.Variable(tf.truncated_normal([600, 100], -0.1, 0.1), name=\"W\")\n",
    "    b = tf.Variable(tf.zeros([100]), name=\"b\")\n",
    "    y_hidden_layer = tf.matmul(state_series, W) + b\n",
    "\n",
    "with tf.name_scope(\"softmax_layer\"):\n",
    "    W = tf.Variable(tf.truncated_normal([100, relation_classes], -0.1, 0.1), name=\"W\")\n",
    "    b = tf.Variable(tf.zeros([relation_classes]), name=\"b\")\n",
    "    logits = tf.matmul(y_hidden_layer, W) + b\n",
    "    predictions = tf.argmax(logits, 1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "tv_all = tf.trainable_variables()\n",
    "tv_regu = []\n",
    "non_reg = [\"word_embedding/W:0\",\"pos_embedding/W:0\",'dep_embedding/W:0',\"global_step:0\",'hidden_layer/b:0','softmax_layer/b:0']\n",
    "for t in tv_all:\n",
    "    if t.name not in non_reg:\n",
    "        if(t.name.find('biases')==-1):\n",
    "            tv_regu.append(t)\n",
    "\n",
    "with tf.name_scope(\"loss\"):\n",
    "    l2_loss = lambda_l2 * tf.reduce_sum([ tf.nn.l2_loss(v) for v in tv_regu ])\n",
    "    loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=y))\n",
    "    total_loss = loss + l2_loss\n",
    "\n",
    "global_step = tf.Variable(0, name=\"global_step\")\n",
    "\n",
    "optimizer = tf.train.AdamOptimizer(0.001).minimize(total_loss, global_step=global_step)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "f = open(data_dir + '/vocab.pkl', 'rb')\n",
    "vocab = pickle.load(f)\n",
    "f.close()\n",
    "\n",
    "word2id = dict((w, i) for i,w in enumerate(vocab))\n",
    "id2word = dict((i, w) for i,w in enumerate(vocab))\n",
    "\n",
    "unknown_token = \"UNKNOWN_TOKEN\"\n",
    "word2id[unknown_token] = word_vocab_size -1\n",
    "id2word[word_vocab_size-1] = unknown_token\n",
    "\n",
    "pos_tags_vocab = []\n",
    "for line in open(data_dir + '/pos_tags.txt'):\n",
    "        pos_tags_vocab.append(line.strip())\n",
    "\n",
    "dep_vocab = []\n",
    "for line in open(data_dir + '/dependency_types.txt'):\n",
    "    dep_vocab.append(line.strip())\n",
    "\n",
    "relation_vocab = []\n",
    "for line in open(data_dir + '/relation_types.txt'):\n",
    "    relation_vocab.append(line.strip())\n",
    "\n",
    "\n",
    "rel2id = dict((w, i) for i,w in enumerate(relation_vocab))\n",
    "id2rel = dict((i, w) for i,w in enumerate(relation_vocab))\n",
    "\n",
    "pos_tag2id = dict((w, i) for i,w in enumerate(pos_tags_vocab))\n",
    "id2pos_tag = dict((i, w) for i,w in enumerate(pos_tags_vocab))\n",
    "\n",
    "dep2id = dict((w, i) for i,w in enumerate(dep_vocab))\n",
    "id2dep = dict((i, w) for i,w in enumerate(dep_vocab))\n",
    "\n",
    "pos_tag2id['OTH'] = 9\n",
    "id2pos_tag[9] = 'OTH'\n",
    "\n",
    "dep2id['OTH'] = 20\n",
    "id2dep[20] = 'OTH'\n",
    "\n",
    "JJ_pos_tags = ['JJ', 'JJR', 'JJS']\n",
    "NN_pos_tags = ['NN', 'NNS', 'NNP', 'NNPS']\n",
    "RB_pos_tags = ['RB', 'RBR', 'RBS']\n",
    "PRP_pos_tags = ['PRP', 'PRP$']\n",
    "VB_pos_tags = ['VB', 'VBD', 'VBG', 'VBN', 'VBP', 'VBZ']\n",
    "_pos_tags = ['CC', 'CD', 'DT', 'IN']\n",
    "\n",
    "def pos_tag(x):\n",
    "    if x in JJ_pos_tags:\n",
    "        return pos_tag2id['JJ']\n",
    "    if x in NN_pos_tags:\n",
    "        return pos_tag2id['NN']\n",
    "    if x in RB_pos_tags:\n",
    "        return pos_tag2id['RB']\n",
    "    if x in PRP_pos_tags:\n",
    "        return pos_tag2id['PRP']\n",
    "    if x in VB_pos_tags:\n",
    "        return pos_tag2id['VB']\n",
    "    if x in _pos_tags:\n",
    "        return pos_tag2id[x]\n",
    "    else:\n",
    "        return 9"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "sess = tf.Session()\n",
    "sess.run(tf.global_variables_initializer())\n",
    "saver = tf.train.Saver()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# f = open('data/word_embedding', 'rb')\n",
    "# word_embedding = pickle.load(f)\n",
    "# f.close()\n",
    "\n",
    "# sess.run(embedding_init, feed_dict={embedding_placeholder:word_embedding})\n",
    "# word_embedding_saver.save(sess, word_embd_dir + '/word_embd')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# model = tf.train.latest_checkpoint(model_dir)\n",
    "# saver.restore(sess, model)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Restoring parameters from checkpoint/word_embd/word_embd\n"
     ]
    }
   ],
   "source": [
    "latest_embd = tf.train.latest_checkpoint(word_embd_dir)\n",
    "word_embedding_saver.restore(sess, latest_embd)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "f = open(data_dir + '/train_paths', 'rb')\n",
    "word_p1, word_p2, dep_p1, dep_p2, pos_p1, pos_p2 = pickle.load(f)\n",
    "f.close()\n",
    "\n",
    "relations = []\n",
    "for line in open(data_dir + '/train_relations.txt'):\n",
    "    relations.append(line.strip().split()[1])\n",
    "\n",
    "length = len(word_p1)\n",
    "num_batches = int(length/batch_size)\n",
    "\n",
    "for i in range(length):\n",
    "    for j, word in enumerate(word_p1[i]):\n",
    "        word = word.lower()\n",
    "        word_p1[i][j] = word if word in word2id else unknown_token \n",
    "    for k, word in enumerate(word_p2[i]):\n",
    "        word = word.lower()\n",
    "        word_p2[i][k] = word if word in word2id else unknown_token \n",
    "    for l, d in enumerate(dep_p1[i]):\n",
    "        dep_p1[i][l] = d if d in dep2id else 'OTH'\n",
    "    for m, d in enumerate(dep_p2[i]):\n",
    "        dep_p2[i][m] = d if d in dep2id else 'OTH'\n",
    "\n",
    "word_p1_ids = np.ones([length, max_len_path],dtype=int)\n",
    "word_p2_ids = np.ones([length, max_len_path],dtype=int)\n",
    "pos_p1_ids = np.ones([length, max_len_path],dtype=int)\n",
    "pos_p2_ids = np.ones([length, max_len_path],dtype=int)\n",
    "dep_p1_ids = np.ones([length, max_len_path],dtype=int)\n",
    "dep_p2_ids = np.ones([length, max_len_path],dtype=int)\n",
    "rel_ids = np.array([rel2id[rel] for rel in relations])\n",
    "path1_len = np.array([len(w) for w in word_p1], dtype=int)\n",
    "path2_len = np.array([len(w) for w in word_p2])\n",
    "\n",
    "for i in range(length):\n",
    "    for j, w in enumerate(word_p1[i]):\n",
    "        word_p1_ids[i][j] = word2id[w]\n",
    "    for j, w in enumerate(word_p2[i]):\n",
    "        word_p2_ids[i][j] = word2id[w]\n",
    "    for j, w in enumerate(pos_p1[i]):\n",
    "        pos_p1_ids[i][j] = pos_tag(w)\n",
    "    for j, w in enumerate(pos_p2[i]):\n",
    "        pos_p2_ids[i][j] = pos_tag(w)\n",
    "    for j, w in enumerate(dep_p1[i]):\n",
    "        dep_p1_ids[i][j] = dep2id[w]\n",
    "    for j, w in enumerate(dep_p2[i]):\n",
    "        dep_p2_ids[i][j] = dep2id[w]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step: 10 loss: 2.58496\n",
      "Step: 20 loss: 3.10737\n",
      "Step: 30 loss: 2.76529\n",
      "Step: 40 loss: 2.86065\n",
      "Step: 50 loss: 2.87625\n",
      "Step: 60 loss: 2.70682\n",
      "Step: 70 loss: 2.63103\n",
      "Step: 80 loss: 2.64477\n",
      "Step: 90 loss: 2.60822\n",
      "Step: 100 loss: 2.66668\n",
      "Step: 110 loss: 2.41839\n",
      "Step: 120 loss: 2.55943\n",
      "Step: 130 loss: 2.3208\n",
      "Step: 140 loss: 2.14755\n",
      "Step: 150 loss: 2.53709\n",
      "Step: 160 loss: 2.61406\n",
      "Step: 170 loss: 2.29024\n",
      "Step: 180 loss: 2.43227\n",
      "Step: 190 loss: 2.44584\n",
      "Step: 200 loss: 2.57538\n",
      "Step: 210 loss: 2.40568\n",
      "Step: 220 loss: 2.267\n",
      "Step: 230 loss: 2.50777\n",
      "Step: 240 loss: 1.94963\n",
      "Step: 250 loss: 1.83509\n",
      "Step: 260 loss: 2.26527\n",
      "Step: 270 loss: 2.09075\n",
      "Step: 280 loss: 1.53659\n",
      "Step: 290 loss: 2.21628\n",
      "Step: 300 loss: 1.84958\n",
      "Step: 310 loss: 1.6308\n",
      "Step: 320 loss: 1.86496\n",
      "Step: 330 loss: 1.96894\n",
      "Step: 340 loss: 2.60143\n",
      "Step: 350 loss: 3.13792\n",
      "Step: 360 loss: 2.17429\n",
      "Step: 370 loss: 1.34178\n",
      "Step: 380 loss: 1.69578\n",
      "Step: 390 loss: 2.02371\n",
      "Step: 400 loss: 1.84938\n",
      "Step: 410 loss: 2.09912\n",
      "Step: 420 loss: 1.61329\n",
      "Step: 430 loss: 1.50105\n",
      "Step: 440 loss: 0.97728\n",
      "Step: 450 loss: 1.80587\n",
      "Step: 460 loss: 2.07404\n",
      "Step: 470 loss: 2.01102\n",
      "Step: 480 loss: 1.89253\n",
      "Step: 490 loss: 1.77081\n",
      "Step: 500 loss: 2.50133\n",
      "Step: 510 loss: 2.08549\n",
      "Step: 520 loss: 1.68818\n",
      "Step: 530 loss: 1.67397\n",
      "Step: 540 loss: 1.59765\n",
      "Step: 550 loss: 2.21121\n",
      "Step: 560 loss: 1.81977\n",
      "Step: 570 loss: 1.52956\n",
      "Step: 580 loss: 1.63405\n",
      "Step: 590 loss: 1.8459\n",
      "Step: 600 loss: 1.73701\n",
      "Step: 610 loss: 1.79957\n",
      "Step: 620 loss: 1.53202\n",
      "Step: 630 loss: 1.41413\n",
      "Step: 640 loss: 1.61207\n",
      "Step: 650 loss: 1.34977\n",
      "Step: 660 loss: 1.7873\n",
      "Step: 670 loss: 1.47119\n",
      "Step: 680 loss: 1.28865\n",
      "Step: 690 loss: 1.05733\n",
      "Step: 700 loss: 1.32457\n",
      "Step: 710 loss: 1.88985\n",
      "Step: 720 loss: 2.49815\n",
      "Step: 730 loss: 3.11626\n",
      "Step: 740 loss: 1.67553\n",
      "Step: 750 loss: 1.28452\n",
      "Step: 760 loss: 1.70221\n",
      "Step: 770 loss: 2.00524\n",
      "Step: 780 loss: 2.22623\n",
      "Step: 790 loss: 1.48202\n",
      "Step: 800 loss: 1.65865\n",
      "Step: 810 loss: 1.0157\n",
      "Step: 820 loss: 1.71945\n",
      "Step: 830 loss: 1.91186\n",
      "Step: 840 loss: 1.49805\n",
      "Step: 850 loss: 2.06705\n",
      "Step: 860 loss: 1.43394\n",
      "Step: 870 loss: 1.43885\n",
      "Step: 880 loss: 1.39357\n",
      "Step: 890 loss: 1.40258\n",
      "Step: 900 loss: 1.57415\n",
      "Step: 910 loss: 1.4068\n",
      "Step: 920 loss: 1.55346\n",
      "Step: 930 loss: 1.03177\n",
      "Step: 940 loss: 1.06327\n",
      "Step: 950 loss: 1.43706\n",
      "Step: 960 loss: 1.61414\n",
      "Step: 970 loss: 1.42475\n",
      "Step: 980 loss: 1.59038\n",
      "Step: 990 loss: 2.01438\n",
      "Step: 1000 loss: 2.12198\n",
      "Saved Model\n",
      "Step: 1010 loss: 1.35049\n",
      "Step: 1020 loss: 1.59077\n",
      "Step: 1030 loss: 1.85515\n",
      "Step: 1040 loss: 1.07525\n",
      "Step: 1050 loss: 1.40516\n",
      "Step: 1060 loss: 1.77059\n",
      "Step: 1070 loss: 0.832953\n",
      "Step: 1080 loss: 1.33374\n",
      "Step: 1090 loss: 1.09609\n",
      "Step: 1100 loss: 1.50664\n",
      "Step: 1110 loss: 1.30985\n",
      "Step: 1120 loss: 1.26431\n",
      "Step: 1130 loss: 1.57895\n",
      "Step: 1140 loss: 1.51821\n",
      "Step: 1150 loss: 2.87981\n",
      "Step: 1160 loss: 1.76808\n",
      "Step: 1170 loss: 0.892032\n",
      "Step: 1180 loss: 1.54939\n",
      "Step: 1190 loss: 1.74507\n",
      "Step: 1200 loss: 1.02011\n",
      "Step: 1210 loss: 1.50654\n",
      "Step: 1220 loss: 1.1439\n",
      "Step: 1230 loss: 1.09325\n",
      "Step: 1240 loss: 0.625248\n",
      "Step: 1250 loss: 1.1975\n",
      "Step: 1260 loss: 1.144\n",
      "Step: 1270 loss: 1.39368\n",
      "Step: 1280 loss: 1.79273\n",
      "Step: 1290 loss: 1.1386\n",
      "Step: 1300 loss: 1.88069\n",
      "Step: 1310 loss: 2.4626\n",
      "Step: 1320 loss: 1.09815\n",
      "Step: 1330 loss: 1.60917\n",
      "Step: 1340 loss: 0.844376\n",
      "Step: 1350 loss: 2.08966\n",
      "Step: 1360 loss: 1.60296\n",
      "Step: 1370 loss: 1.5787\n",
      "Step: 1380 loss: 1.30464\n",
      "Step: 1390 loss: 1.65552\n",
      "Step: 1400 loss: 1.33147\n",
      "Step: 1410 loss: 1.13525\n",
      "Step: 1420 loss: 1.61861\n",
      "Step: 1430 loss: 1.47297\n",
      "Step: 1440 loss: 1.0966\n",
      "Step: 1450 loss: 0.801956\n",
      "Step: 1460 loss: 1.1317\n",
      "Step: 1470 loss: 0.699975\n",
      "Step: 1480 loss: 0.947208\n",
      "Step: 1490 loss: 0.586001\n",
      "Step: 1500 loss: 0.819361\n",
      "Step: 1510 loss: 1.83626\n",
      "Step: 1520 loss: 2.08206\n",
      "Step: 1530 loss: 2.83379\n",
      "Step: 1540 loss: 1.44576\n",
      "Step: 1550 loss: 1.01821\n",
      "Step: 1560 loss: 1.3858\n",
      "Step: 1570 loss: 1.82515\n",
      "Step: 1580 loss: 1.41062\n",
      "Step: 1590 loss: 0.83511\n",
      "Step: 1600 loss: 1.29458\n",
      "Step: 1610 loss: 0.774013\n",
      "Step: 1620 loss: 1.5578\n",
      "Step: 1630 loss: 1.78453\n",
      "Step: 1640 loss: 0.884262\n",
      "Step: 1650 loss: 1.49239\n",
      "Step: 1660 loss: 1.03614\n",
      "Step: 1670 loss: 1.43087\n",
      "Step: 1680 loss: 0.844527\n",
      "Step: 1690 loss: 0.944398\n",
      "Step: 1700 loss: 0.816472\n",
      "Step: 1710 loss: 0.913608\n",
      "Step: 1720 loss: 1.72381\n",
      "Step: 1730 loss: 0.661992\n",
      "Step: 1740 loss: 0.691906\n",
      "Step: 1750 loss: 1.66041\n",
      "Step: 1760 loss: 1.21576\n",
      "Step: 1770 loss: 1.43475\n",
      "Step: 1780 loss: 1.60353\n",
      "Step: 1790 loss: 1.31433\n",
      "Step: 1800 loss: 1.32488\n",
      "Step: 1810 loss: 1.4134\n",
      "Step: 1820 loss: 0.798665\n",
      "Step: 1830 loss: 1.76746\n",
      "Step: 1840 loss: 1.03225\n",
      "Step: 1850 loss: 0.894145\n",
      "Step: 1860 loss: 1.7942\n",
      "Step: 1870 loss: 0.983424\n",
      "Step: 1880 loss: 1.23703\n",
      "Step: 1890 loss: 1.15247\n",
      "Step: 1900 loss: 1.1428\n",
      "Step: 1910 loss: 1.36592\n",
      "Step: 1920 loss: 0.861303\n",
      "Step: 1930 loss: 0.989477\n",
      "Step: 1940 loss: 1.74074\n",
      "Step: 1950 loss: 2.10599\n",
      "Step: 1960 loss: 1.64768\n",
      "Step: 1970 loss: 0.98275\n",
      "Step: 1980 loss: 1.35782\n",
      "Step: 1990 loss: 1.46975\n",
      "Step: 2000 loss: 1.10711\n",
      "Saved Model\n",
      "Step: 2010 loss: 2.07712\n",
      "Step: 2020 loss: 0.931058\n",
      "Step: 2030 loss: 0.576309\n",
      "Step: 2040 loss: 0.585342\n",
      "Step: 2050 loss: 0.855627\n",
      "Step: 2060 loss: 1.21685\n",
      "Step: 2070 loss: 1.59706\n",
      "Step: 2080 loss: 1.85212\n",
      "Step: 2090 loss: 1.21957\n",
      "Step: 2100 loss: 1.49887\n",
      "Step: 2110 loss: 2.16836\n",
      "Step: 2120 loss: 1.06346\n",
      "Step: 2130 loss: 1.27316\n",
      "Step: 2140 loss: 0.963483\n",
      "Step: 2150 loss: 2.20629\n",
      "Step: 2160 loss: 1.4757\n",
      "Step: 2170 loss: 1.34223\n",
      "Step: 2180 loss: 1.23065\n",
      "Step: 2190 loss: 1.85278\n",
      "Step: 2200 loss: 1.55619\n",
      "Step: 2210 loss: 1.04193\n",
      "Step: 2220 loss: 0.764177\n",
      "Step: 2230 loss: 0.804077\n",
      "Step: 2240 loss: 0.939392\n",
      "Step: 2250 loss: 0.66663\n",
      "Step: 2260 loss: 0.964159\n",
      "Step: 2270 loss: 0.987535\n",
      "Step: 2280 loss: 0.834114\n",
      "Step: 2290 loss: 0.697496\n",
      "Step: 2300 loss: 1.1618\n",
      "Step: 2310 loss: 1.37042\n",
      "Step: 2320 loss: 1.08\n",
      "Step: 2330 loss: 2.70377\n",
      "Step: 2340 loss: 1.75124\n",
      "Step: 2350 loss: 0.884108\n",
      "Step: 2360 loss: 1.04206\n",
      "Step: 2370 loss: 1.86924\n",
      "Step: 2380 loss: 1.24307\n",
      "Step: 2390 loss: 1.01407\n",
      "Step: 2400 loss: 1.21662\n",
      "Step: 2410 loss: 0.790178\n",
      "Step: 2420 loss: 1.7276\n",
      "Step: 2430 loss: 1.49129\n",
      "Step: 2440 loss: 0.797509\n",
      "Step: 2450 loss: 1.57571\n",
      "Step: 2460 loss: 1.30196\n",
      "Step: 2470 loss: 1.30365\n",
      "Step: 2480 loss: 0.983886\n",
      "Step: 2490 loss: 0.780603\n",
      "Step: 2500 loss: 1.27146\n",
      "Step: 2510 loss: 1.00512\n",
      "Step: 2520 loss: 1.37058\n",
      "Step: 2530 loss: 0.496444\n",
      "Step: 2540 loss: 0.397409\n",
      "Step: 2550 loss: 1.7391\n",
      "Step: 2560 loss: 1.55185\n",
      "Step: 2570 loss: 0.95644\n",
      "Step: 2580 loss: 1.52873\n",
      "Step: 2590 loss: 1.75474\n",
      "Step: 2600 loss: 1.7472\n",
      "Step: 2610 loss: 1.05877\n",
      "Step: 2620 loss: 0.729419\n",
      "Step: 2630 loss: 1.51125\n",
      "Step: 2640 loss: 0.697421\n",
      "Step: 2650 loss: 1.08352\n",
      "Step: 2660 loss: 1.27108\n",
      "Step: 2670 loss: 0.744215\n",
      "Step: 2680 loss: 0.860689\n",
      "Step: 2690 loss: 0.88198\n",
      "Step: 2700 loss: 0.977602\n",
      "Step: 2710 loss: 1.03649\n",
      "Step: 2720 loss: 1.20945\n",
      "Step: 2730 loss: 1.01377\n",
      "Step: 2740 loss: 1.84821\n",
      "Step: 2750 loss: 2.30044\n",
      "Step: 2760 loss: 1.20987\n",
      "Step: 2770 loss: 0.570791\n",
      "Step: 2780 loss: 1.23336\n",
      "Step: 2790 loss: 0.817107\n",
      "Step: 2800 loss: 1.19927\n",
      "Step: 2810 loss: 1.44477\n",
      "Step: 2820 loss: 0.75026\n",
      "Step: 2830 loss: 0.598293\n",
      "Step: 2840 loss: 1.02176\n",
      "Step: 2850 loss: 0.811285\n",
      "Step: 2860 loss: 1.6373\n",
      "Step: 2870 loss: 1.40527\n",
      "Step: 2880 loss: 1.30027\n",
      "Step: 2890 loss: 1.62488\n",
      "Step: 2900 loss: 1.64671\n",
      "Step: 2910 loss: 1.62715\n",
      "Step: 2920 loss: 0.975012\n",
      "Step: 2930 loss: 0.794114\n",
      "Step: 2940 loss: 0.976072\n",
      "Step: 2950 loss: 1.87416\n",
      "Step: 2960 loss: 1.15603\n",
      "Step: 2970 loss: 1.49051\n",
      "Step: 2980 loss: 1.26622\n",
      "Step: 2990 loss: 1.60787\n",
      "Step: 3000 loss: 1.2106\n",
      "Saved Model\n",
      "Step: 3010 loss: 0.725911\n",
      "Step: 3020 loss: 0.695837\n",
      "Step: 3030 loss: 1.0466\n",
      "Step: 3040 loss: 0.812644\n",
      "Step: 3050 loss: 0.338174\n",
      "Step: 3060 loss: 0.689493\n",
      "Step: 3070 loss: 0.594191\n",
      "Step: 3080 loss: 0.864203\n",
      "Step: 3090 loss: 0.437232\n",
      "Step: 3100 loss: 0.781747\n",
      "Step: 3110 loss: 1.957\n",
      "Step: 3120 loss: 1.00162\n",
      "Step: 3130 loss: 2.73654\n",
      "Step: 3140 loss: 1.71228\n",
      "Step: 3150 loss: 1.05423\n",
      "Step: 3160 loss: 1.19582\n",
      "Step: 3170 loss: 1.49649\n",
      "Step: 3180 loss: 0.813894\n",
      "Step: 3190 loss: 0.64773\n",
      "Step: 3200 loss: 1.58146\n",
      "Step: 3210 loss: 0.841334\n",
      "Step: 3220 loss: 1.43991\n",
      "Step: 3230 loss: 1.29305\n",
      "Step: 3240 loss: 0.648234\n",
      "Step: 3250 loss: 1.68618\n",
      "Step: 3260 loss: 1.30669\n",
      "Step: 3270 loss: 0.935257\n",
      "Step: 3280 loss: 0.500432\n",
      "Step: 3290 loss: 0.771572\n",
      "Step: 3300 loss: 1.12055\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step: 3310 loss: 1.29887\n",
      "Step: 3320 loss: 1.29867\n",
      "Step: 3330 loss: 0.443141\n",
      "Step: 3340 loss: 0.454681\n",
      "Step: 3350 loss: 1.124\n",
      "Step: 3360 loss: 1.11588\n",
      "Step: 3370 loss: 0.939808\n",
      "Step: 3380 loss: 1.22224\n",
      "Step: 3390 loss: 1.02613\n",
      "Step: 3400 loss: 1.35341\n",
      "Step: 3410 loss: 0.489673\n",
      "Step: 3420 loss: 0.418773\n",
      "Step: 3430 loss: 1.12018\n",
      "Step: 3440 loss: 0.651813\n",
      "Step: 3450 loss: 0.940705\n",
      "Step: 3460 loss: 1.46314\n",
      "Step: 3470 loss: 0.754336\n",
      "Step: 3480 loss: 1.34626\n",
      "Step: 3490 loss: 0.632453\n",
      "Step: 3500 loss: 0.942931\n",
      "Step: 3510 loss: 1.01058\n",
      "Step: 3520 loss: 0.921788\n",
      "Step: 3530 loss: 0.574035\n",
      "Step: 3540 loss: 1.17512\n",
      "Step: 3550 loss: 2.10246\n",
      "Step: 3560 loss: 1.22662\n",
      "Step: 3570 loss: 0.783618\n",
      "Step: 3580 loss: 1.29106\n",
      "Step: 3590 loss: 1.17994\n",
      "Step: 3600 loss: 1.00297\n",
      "Step: 3610 loss: 1.47327\n",
      "Step: 3620 loss: 0.591666\n",
      "Step: 3630 loss: 0.624228\n",
      "Step: 3640 loss: 0.619013\n",
      "Step: 3650 loss: 0.970902\n",
      "Step: 3660 loss: 1.09406\n",
      "Step: 3670 loss: 1.27819\n",
      "Step: 3680 loss: 1.43964\n",
      "Step: 3690 loss: 1.18516\n",
      "Step: 3700 loss: 1.30339\n",
      "Step: 3710 loss: 1.66385\n",
      "Step: 3720 loss: 0.985713\n",
      "Step: 3730 loss: 0.972599\n",
      "Step: 3740 loss: 0.728473\n",
      "Step: 3750 loss: 1.79347\n",
      "Step: 3760 loss: 1.15034\n",
      "Step: 3770 loss: 1.13924\n",
      "Step: 3780 loss: 1.09135\n",
      "Step: 3790 loss: 1.18354\n",
      "Step: 3800 loss: 0.744034\n",
      "Step: 3810 loss: 0.610228\n",
      "Step: 3820 loss: 0.837816\n",
      "Step: 3830 loss: 0.755945\n",
      "Step: 3840 loss: 0.955844\n",
      "Step: 3850 loss: 0.511125\n",
      "Step: 3860 loss: 0.435998\n",
      "Step: 3870 loss: 0.772284\n",
      "Step: 3880 loss: 0.820116\n",
      "Step: 3890 loss: 0.835383\n",
      "Step: 3900 loss: 1.01854\n",
      "Step: 3910 loss: 1.22501\n",
      "Step: 3920 loss: 0.821469\n",
      "Step: 3930 loss: 1.80253\n",
      "Step: 3940 loss: 1.33736\n",
      "Step: 3950 loss: 0.587142\n",
      "Step: 3960 loss: 0.780401\n",
      "Step: 3970 loss: 1.37075\n",
      "Step: 3980 loss: 1.14265\n",
      "Step: 3990 loss: 0.912981\n",
      "Step: 4000 loss: 1.06097\n",
      "Saved Model\n",
      "Step: 4010 loss: 0.390524\n",
      "Step: 4020 loss: 1.06456\n",
      "Step: 4030 loss: 1.55767\n",
      "Step: 4040 loss: 0.909522\n",
      "Step: 4050 loss: 1.03073\n",
      "Step: 4060 loss: 1.01802\n",
      "Step: 4070 loss: 0.97097\n",
      "Step: 4080 loss: 0.50511\n",
      "Step: 4090 loss: 0.725557\n",
      "Step: 4100 loss: 0.940354\n",
      "Step: 4110 loss: 0.77441\n",
      "Step: 4120 loss: 1.08466\n",
      "Step: 4130 loss: 0.396734\n",
      "Step: 4140 loss: 0.235926\n",
      "Step: 4150 loss: 0.740242\n",
      "Step: 4160 loss: 1.27756\n",
      "Step: 4170 loss: 0.944757\n",
      "Step: 4180 loss: 0.772819\n",
      "Step: 4190 loss: 1.25977\n",
      "Step: 4200 loss: 1.45452\n",
      "Step: 4210 loss: 0.735484\n",
      "Step: 4220 loss: 0.372611\n",
      "Step: 4230 loss: 0.974768\n",
      "Step: 4240 loss: 0.701365\n",
      "Step: 4250 loss: 1.04675\n",
      "Step: 4260 loss: 1.44547\n",
      "Step: 4270 loss: 0.667938\n",
      "Step: 4280 loss: 0.592555\n",
      "Step: 4290 loss: 0.733652\n",
      "Step: 4300 loss: 0.822104\n",
      "Step: 4310 loss: 0.459669\n",
      "Step: 4320 loss: 0.920926\n",
      "Step: 4330 loss: 0.542365\n",
      "Step: 4340 loss: 1.36479\n",
      "Step: 4350 loss: 2.2645\n",
      "Step: 4360 loss: 1.51566\n",
      "Step: 4370 loss: 0.55009\n",
      "Step: 4380 loss: 1.00061\n",
      "Step: 4390 loss: 1.21798\n",
      "Step: 4400 loss: 0.934975\n",
      "Step: 4410 loss: 0.499443\n",
      "Step: 4420 loss: 0.465067\n",
      "Step: 4430 loss: 0.476826\n",
      "Step: 4440 loss: 0.520494\n",
      "Step: 4450 loss: 0.624403\n",
      "Step: 4460 loss: 1.21071\n",
      "Step: 4470 loss: 1.62456\n",
      "Step: 4480 loss: 1.29787\n",
      "Step: 4490 loss: 0.903898\n",
      "Step: 4500 loss: 0.791657\n",
      "Step: 4510 loss: 1.74211\n",
      "Step: 4520 loss: 0.816809\n",
      "Step: 4530 loss: 0.664668\n",
      "Step: 4540 loss: 1.0696\n",
      "Step: 4550 loss: 1.81585\n",
      "Step: 4560 loss: 1.03483\n",
      "Step: 4570 loss: 1.02653\n",
      "Step: 4580 loss: 0.61926\n",
      "Step: 4590 loss: 1.44998\n",
      "Step: 4600 loss: 1.12985\n",
      "Step: 4610 loss: 0.651151\n",
      "Step: 4620 loss: 0.524491\n",
      "Step: 4630 loss: 0.879583\n",
      "Step: 4640 loss: 1.08714\n",
      "Step: 4650 loss: 0.416062\n",
      "Step: 4660 loss: 0.842088\n",
      "Step: 4670 loss: 0.378754\n",
      "Step: 4680 loss: 0.693548\n",
      "Step: 4690 loss: 0.551506\n",
      "Step: 4700 loss: 1.28341\n",
      "Step: 4710 loss: 1.5352\n",
      "Step: 4720 loss: 0.935478\n",
      "Step: 4730 loss: 2.40733\n",
      "Step: 4740 loss: 1.59954\n",
      "Step: 4750 loss: 0.830217\n",
      "Step: 4760 loss: 0.43519\n",
      "Step: 4770 loss: 0.963868\n",
      "Step: 4780 loss: 1.68126\n",
      "Step: 4790 loss: 0.901914\n",
      "Step: 4800 loss: 1.42302\n",
      "Step: 4810 loss: 0.548432\n",
      "Step: 4820 loss: 1.98473\n",
      "Step: 4830 loss: 1.94111\n",
      "Step: 4840 loss: 0.883514\n",
      "Step: 4850 loss: 1.14388\n",
      "Step: 4860 loss: 0.483978\n",
      "Step: 4870 loss: 1.28123\n",
      "Step: 4880 loss: 0.52201\n",
      "Step: 4890 loss: 0.328587\n",
      "Step: 4900 loss: 0.680411\n",
      "Step: 4910 loss: 0.941578\n",
      "Step: 4920 loss: 1.35697\n",
      "Step: 4930 loss: 0.450344\n",
      "Step: 4940 loss: 0.322001\n",
      "Step: 4950 loss: 0.653481\n",
      "Step: 4960 loss: 1.31823\n",
      "Step: 4970 loss: 0.632045\n",
      "Step: 4980 loss: 1.16559\n",
      "Step: 4990 loss: 0.839974\n",
      "Step: 5000 loss: 1.05817\n",
      "Saved Model\n",
      "Step: 5010 loss: 0.759227\n",
      "Step: 5020 loss: 0.32919\n",
      "Step: 5030 loss: 1.03777\n",
      "Step: 5040 loss: 0.475681\n",
      "Step: 5050 loss: 1.02875\n",
      "Step: 5060 loss: 0.882518\n",
      "Step: 5070 loss: 0.6202\n",
      "Step: 5080 loss: 0.474811\n",
      "Step: 5090 loss: 0.434578\n",
      "Step: 5100 loss: 0.749078\n",
      "Step: 5110 loss: 0.520399\n",
      "Step: 5120 loss: 0.481408\n",
      "Step: 5130 loss: 0.746087\n",
      "Step: 5140 loss: 0.679224\n",
      "Step: 5150 loss: 1.49081\n",
      "Step: 5160 loss: 1.01975\n",
      "Step: 5170 loss: 0.742056\n",
      "Step: 5180 loss: 0.876072\n",
      "Step: 5190 loss: 1.15101\n",
      "Step: 5200 loss: 0.793314\n",
      "Step: 5210 loss: 1.02004\n",
      "Step: 5220 loss: 0.78706\n",
      "Step: 5230 loss: 0.692108\n",
      "Step: 5240 loss: 0.321017\n",
      "Step: 5250 loss: 0.393469\n",
      "Step: 5260 loss: 0.942507\n",
      "Step: 5270 loss: 0.889896\n",
      "Step: 5280 loss: 1.31456\n",
      "Step: 5290 loss: 0.658592\n",
      "Step: 5300 loss: 1.14988\n",
      "Step: 5310 loss: 1.32277\n",
      "Step: 5320 loss: 0.55885\n",
      "Step: 5330 loss: 0.775435\n",
      "Step: 5340 loss: 0.718799\n",
      "Step: 5350 loss: 1.60931\n",
      "Step: 5360 loss: 1.1905\n",
      "Step: 5370 loss: 0.816971\n",
      "Step: 5380 loss: 0.616491\n",
      "Step: 5390 loss: 1.39553\n",
      "Step: 5400 loss: 0.664417\n",
      "Step: 5410 loss: 0.38624\n",
      "Step: 5420 loss: 0.667116\n",
      "Step: 5430 loss: 0.577456\n",
      "Step: 5440 loss: 0.352484\n",
      "Step: 5450 loss: 0.211872\n",
      "Step: 5460 loss: 0.474249\n",
      "Step: 5470 loss: 0.453246\n",
      "Step: 5480 loss: 0.706434\n",
      "Step: 5490 loss: 0.426729\n",
      "Step: 5500 loss: 0.52425\n",
      "Step: 5510 loss: 0.853899\n",
      "Step: 5520 loss: 0.75828\n",
      "Step: 5530 loss: 2.36887\n",
      "Step: 5540 loss: 1.56508\n",
      "Step: 5550 loss: 0.631172\n",
      "Step: 5560 loss: 0.307049\n",
      "Step: 5570 loss: 1.12535\n",
      "Step: 5580 loss: 1.28942\n",
      "Step: 5590 loss: 0.843004\n",
      "Step: 5600 loss: 1.60441\n",
      "Step: 5610 loss: 0.656596\n",
      "Step: 5620 loss: 1.14915\n",
      "Step: 5630 loss: 1.13599\n",
      "Step: 5640 loss: 0.872554\n",
      "Step: 5650 loss: 1.05353\n",
      "Step: 5660 loss: 0.816986\n",
      "Step: 5670 loss: 0.743855\n",
      "Step: 5680 loss: 0.480969\n",
      "Step: 5690 loss: 0.693893\n",
      "Step: 5700 loss: 1.02392\n",
      "Step: 5710 loss: 0.97554\n",
      "Step: 5720 loss: 1.14121\n",
      "Step: 5730 loss: 0.33332\n",
      "Step: 5740 loss: 0.172379\n",
      "Step: 5750 loss: 0.845975\n",
      "Step: 5760 loss: 0.804938\n",
      "Step: 5770 loss: 0.459435\n",
      "Step: 5780 loss: 1.17011\n",
      "Step: 5790 loss: 0.994597\n",
      "Step: 5800 loss: 0.742494\n",
      "Step: 5810 loss: 0.677014\n",
      "Step: 5820 loss: 0.31159\n",
      "Step: 5830 loss: 0.609288\n",
      "Step: 5840 loss: 0.698825\n",
      "Step: 5850 loss: 1.0766\n",
      "Step: 5860 loss: 1.1423\n",
      "Step: 5870 loss: 0.92005\n",
      "Step: 5880 loss: 0.839544\n",
      "Step: 5890 loss: 0.277548\n",
      "Step: 5900 loss: 0.688242\n",
      "Step: 5910 loss: 0.513663\n",
      "Step: 5920 loss: 1.07939\n",
      "Step: 5930 loss: 0.83374\n",
      "Step: 5940 loss: 1.1681\n",
      "Step: 5950 loss: 1.79118\n",
      "Step: 5960 loss: 1.07221\n",
      "Step: 5970 loss: 0.287278\n",
      "Step: 5980 loss: 0.968502\n",
      "Step: 5990 loss: 0.483481\n",
      "Step: 6000 loss: 1.21118\n",
      "Saved Model\n",
      "Step: 6010 loss: 0.814402\n",
      "Step: 6020 loss: 0.717279\n",
      "Step: 6030 loss: 0.382565\n",
      "Step: 6040 loss: 0.343708\n",
      "Step: 6050 loss: 0.582648\n",
      "Step: 6060 loss: 1.00145\n",
      "Step: 6070 loss: 0.694206\n",
      "Step: 6080 loss: 1.34108\n",
      "Step: 6090 loss: 0.331623\n",
      "Step: 6100 loss: 0.871158\n",
      "Step: 6110 loss: 1.91419\n",
      "Step: 6120 loss: 0.532188\n",
      "Step: 6130 loss: 0.59366\n",
      "Step: 6140 loss: 0.848319\n",
      "Step: 6150 loss: 1.28134\n",
      "Step: 6160 loss: 0.750221\n",
      "Step: 6170 loss: 1.01036\n",
      "Step: 6180 loss: 0.676829\n",
      "Step: 6190 loss: 0.757268\n",
      "Step: 6200 loss: 0.700409\n",
      "Step: 6210 loss: 0.901653\n",
      "Step: 6220 loss: 0.757459\n",
      "Step: 6230 loss: 0.721098\n",
      "Step: 6240 loss: 1.04816\n",
      "Step: 6250 loss: 0.21931\n",
      "Step: 6260 loss: 0.622797\n",
      "Step: 6270 loss: 0.37771\n",
      "Step: 6280 loss: 0.77826\n",
      "Step: 6290 loss: 0.252749\n",
      "Step: 6300 loss: 0.232237\n",
      "Step: 6310 loss: 0.821251\n",
      "Step: 6320 loss: 0.407244\n",
      "Step: 6330 loss: 2.48115\n",
      "Step: 6340 loss: 0.972943\n",
      "Step: 6350 loss: 0.624904\n",
      "Step: 6360 loss: 0.896282\n",
      "Step: 6370 loss: 0.962505\n",
      "Step: 6380 loss: 1.05641\n",
      "Step: 6390 loss: 0.814654\n",
      "Step: 6400 loss: 1.36493\n",
      "Step: 6410 loss: 0.980963\n",
      "Step: 6420 loss: 1.06977\n",
      "Step: 6430 loss: 0.998995\n",
      "Step: 6440 loss: 0.701565\n",
      "Step: 6450 loss: 0.871905\n",
      "Step: 6460 loss: 0.439747\n",
      "Step: 6470 loss: 0.794077\n",
      "Step: 6480 loss: 0.767207\n",
      "Step: 6490 loss: 0.901303\n",
      "Step: 6500 loss: 0.759219\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step: 6510 loss: 0.820532\n",
      "Step: 6520 loss: 1.22425\n",
      "Step: 6530 loss: 0.519022\n",
      "Step: 6540 loss: 0.296085\n",
      "Step: 6550 loss: 0.566166\n",
      "Step: 6560 loss: 0.901533\n",
      "Step: 6570 loss: 0.422791\n",
      "Step: 6580 loss: 1.06561\n",
      "Step: 6590 loss: 0.935549\n",
      "Step: 6600 loss: 0.604364\n",
      "Step: 6610 loss: 0.515405\n",
      "Step: 6620 loss: 0.39008\n",
      "Step: 6630 loss: 0.983397\n",
      "Step: 6640 loss: 0.522433\n",
      "Step: 6650 loss: 0.605176\n",
      "Step: 6660 loss: 0.388548\n",
      "Step: 6670 loss: 0.723514\n",
      "Step: 6680 loss: 0.580454\n",
      "Step: 6690 loss: 0.346255\n",
      "Step: 6700 loss: 0.562923\n",
      "Step: 6710 loss: 0.378275\n",
      "Step: 6720 loss: 0.664454\n",
      "Step: 6730 loss: 0.560324\n",
      "Step: 6740 loss: 1.11171\n",
      "Step: 6750 loss: 1.28284\n",
      "Step: 6760 loss: 0.816377\n",
      "Step: 6770 loss: 0.3546\n",
      "Step: 6780 loss: 0.872111\n",
      "Step: 6790 loss: 0.602099\n",
      "Step: 6800 loss: 1.25797\n",
      "Step: 6810 loss: 0.788008\n",
      "Step: 6820 loss: 0.776907\n",
      "Step: 6830 loss: 0.332655\n",
      "Step: 6840 loss: 0.584151\n",
      "Step: 6850 loss: 0.302892\n",
      "Step: 6860 loss: 0.814614\n",
      "Step: 6870 loss: 0.758526\n",
      "Step: 6880 loss: 1.26902\n",
      "Step: 6890 loss: 0.885789\n",
      "Step: 6900 loss: 0.990651\n",
      "Step: 6910 loss: 0.878724\n",
      "Step: 6920 loss: 0.425199\n",
      "Step: 6930 loss: 0.469521\n",
      "Step: 6940 loss: 0.906546\n",
      "Step: 6950 loss: 1.52324\n",
      "Step: 6960 loss: 1.02792\n",
      "Step: 6970 loss: 0.525311\n",
      "Step: 6980 loss: 0.955596\n",
      "Step: 6990 loss: 1.09761\n",
      "Step: 7000 loss: 0.802379\n",
      "Saved Model\n",
      "Step: 7010 loss: 0.9121\n",
      "Step: 7020 loss: 0.357099\n",
      "Step: 7030 loss: 1.101\n",
      "Step: 7040 loss: 0.357942\n",
      "Step: 7050 loss: 0.409395\n",
      "Step: 7060 loss: 0.573967\n",
      "Step: 7070 loss: 0.463615\n",
      "Step: 7080 loss: 0.94682\n",
      "Step: 7090 loss: 0.619891\n",
      "Step: 7100 loss: 0.855413\n",
      "Step: 7110 loss: 1.42419\n",
      "Step: 7120 loss: 0.681216\n",
      "Step: 7130 loss: 1.9459\n",
      "Step: 7140 loss: 1.93496\n",
      "Step: 7150 loss: 0.640788\n",
      "Step: 7160 loss: 0.248415\n",
      "Step: 7170 loss: 0.791884\n",
      "Step: 7180 loss: 0.94091\n",
      "Step: 7190 loss: 0.567827\n",
      "Step: 7200 loss: 1.24787\n",
      "Step: 7210 loss: 0.575323\n",
      "Step: 7220 loss: 1.42185\n",
      "Step: 7230 loss: 0.869377\n",
      "Step: 7240 loss: 1.21908\n",
      "Step: 7250 loss: 0.980432\n",
      "Step: 7260 loss: 0.411679\n",
      "Step: 7270 loss: 0.785342\n",
      "Step: 7280 loss: 0.402316\n",
      "Step: 7290 loss: 0.452899\n",
      "Step: 7300 loss: 0.520111\n",
      "Step: 7310 loss: 0.797529\n",
      "Step: 7320 loss: 1.08549\n",
      "Step: 7330 loss: 0.281697\n",
      "Step: 7340 loss: 0.306729\n",
      "Step: 7350 loss: 0.691284\n",
      "Step: 7360 loss: 1.38673\n",
      "Step: 7370 loss: 0.755121\n",
      "Step: 7380 loss: 1.08226\n",
      "Step: 7390 loss: 0.750938\n",
      "Step: 7400 loss: 0.64653\n",
      "Step: 7410 loss: 0.47319\n",
      "Step: 7420 loss: 0.293102\n",
      "Step: 7430 loss: 0.635683\n",
      "Step: 7440 loss: 0.490909\n",
      "Step: 7450 loss: 0.695677\n",
      "Step: 7460 loss: 0.548634\n",
      "Step: 7470 loss: 0.787288\n",
      "Step: 7480 loss: 0.613089\n",
      "Step: 7490 loss: 0.404874\n",
      "Step: 7500 loss: 0.562694\n",
      "Step: 7510 loss: 0.951026\n",
      "Step: 7520 loss: 0.707245\n",
      "Step: 7530 loss: 0.423091\n",
      "Step: 7540 loss: 0.941493\n",
      "Step: 7550 loss: 1.29539\n",
      "Step: 7560 loss: 0.824791\n",
      "Step: 7570 loss: 0.526854\n",
      "Step: 7580 loss: 0.463023\n",
      "Step: 7590 loss: 0.954217\n",
      "Step: 7600 loss: 0.757973\n",
      "Step: 7610 loss: 0.685717\n",
      "Step: 7620 loss: 0.435718\n",
      "Step: 7630 loss: 0.427669\n",
      "Step: 7640 loss: 0.33744\n",
      "Step: 7650 loss: 0.424115\n",
      "Step: 7660 loss: 0.6156\n",
      "Step: 7670 loss: 0.780878\n",
      "Step: 7680 loss: 0.955969\n",
      "Step: 7690 loss: 0.538522\n",
      "Step: 7700 loss: 0.882085\n",
      "Step: 7710 loss: 1.1369\n",
      "Step: 7720 loss: 0.658544\n",
      "Step: 7730 loss: 0.752382\n",
      "Step: 7740 loss: 0.558673\n",
      "Step: 7750 loss: 1.08996\n",
      "Step: 7760 loss: 0.789387\n",
      "Step: 7770 loss: 1.30892\n",
      "Step: 7780 loss: 0.469326\n",
      "Step: 7790 loss: 1.63036\n",
      "Step: 7800 loss: 0.686388\n",
      "Step: 7810 loss: 0.781494\n",
      "Step: 7820 loss: 0.598617\n",
      "Step: 7830 loss: 0.790354\n",
      "Step: 7840 loss: 0.728604\n",
      "Step: 7850 loss: 0.371993\n",
      "Step: 7860 loss: 0.863311\n",
      "Step: 7870 loss: 0.328159\n",
      "Step: 7880 loss: 0.814793\n",
      "Step: 7890 loss: 0.553194\n",
      "Step: 7900 loss: 0.476994\n",
      "Step: 7910 loss: 0.826444\n",
      "Step: 7920 loss: 0.451457\n",
      "Step: 7930 loss: 2.07433\n",
      "Step: 7940 loss: 1.2047\n",
      "Step: 7950 loss: 0.442317\n",
      "Step: 7960 loss: 0.368624\n",
      "Step: 7970 loss: 1.25298\n",
      "Step: 7980 loss: 0.79219\n",
      "Step: 7990 loss: 0.878433\n",
      "Step: 8000 loss: 1.06332\n",
      "Saved Model\n",
      "Step: 8010 loss: 0.619728\n",
      "Step: 8020 loss: 1.35029\n",
      "Step: 8030 loss: 1.1786\n",
      "Step: 8040 loss: 0.830503\n",
      "Step: 8050 loss: 1.10155\n",
      "Step: 8060 loss: 0.329566\n",
      "Step: 8070 loss: 0.787416\n",
      "Step: 8080 loss: 0.637005\n",
      "Step: 8090 loss: 0.828562\n",
      "Step: 8100 loss: 0.731026\n",
      "Step: 8110 loss: 0.749044\n",
      "Step: 8120 loss: 1.17442\n",
      "Step: 8130 loss: 0.356548\n",
      "Step: 8140 loss: 0.20176\n",
      "Step: 8150 loss: 0.643085\n",
      "Step: 8160 loss: 1.36542\n",
      "Step: 8170 loss: 0.574774\n",
      "Step: 8180 loss: 0.871129\n",
      "Step: 8190 loss: 0.947338\n",
      "Step: 8200 loss: 0.470516\n",
      "Step: 8210 loss: 0.61855\n",
      "Step: 8220 loss: 0.59069\n",
      "Step: 8230 loss: 0.579407\n",
      "Step: 8240 loss: 0.462727\n",
      "Step: 8250 loss: 0.504803\n",
      "Step: 8260 loss: 0.899854\n",
      "Step: 8270 loss: 0.498773\n",
      "Step: 8280 loss: 0.515931\n",
      "Step: 8290 loss: 0.669014\n",
      "Step: 8300 loss: 0.483752\n",
      "Step: 8310 loss: 0.367542\n",
      "Step: 8320 loss: 0.857474\n",
      "Step: 8330 loss: 0.786036\n",
      "Step: 8340 loss: 1.60193\n",
      "Step: 8350 loss: 1.55164\n",
      "Step: 8360 loss: 0.426921\n",
      "Step: 8370 loss: 0.272792\n",
      "Step: 8380 loss: 1.15722\n",
      "Step: 8390 loss: 0.883714\n",
      "Step: 8400 loss: 0.781825\n",
      "Step: 8410 loss: 1.01623\n",
      "Step: 8420 loss: 0.686535\n",
      "Step: 8430 loss: 0.577403\n",
      "Step: 8440 loss: 0.283688\n",
      "Step: 8450 loss: 0.28914\n",
      "Step: 8460 loss: 1.00796\n",
      "Step: 8470 loss: 0.459863\n",
      "Step: 8480 loss: 0.903881\n",
      "Step: 8490 loss: 0.439129\n",
      "Step: 8500 loss: 0.622026\n",
      "Step: 8510 loss: 1.36481\n",
      "Step: 8520 loss: 0.77694\n",
      "Step: 8530 loss: 0.417748\n",
      "Step: 8540 loss: 0.432273\n",
      "Step: 8550 loss: 0.985087\n",
      "Step: 8560 loss: 0.631641\n",
      "Step: 8570 loss: 0.972092\n",
      "Step: 8580 loss: 0.632967\n",
      "Step: 8590 loss: 1.25179\n",
      "Step: 8600 loss: 0.831507\n",
      "Step: 8610 loss: 0.637352\n",
      "Step: 8620 loss: 0.459515\n",
      "Step: 8630 loss: 0.553187\n",
      "Step: 8640 loss: 0.573032\n",
      "Step: 8650 loss: 0.484561\n",
      "Step: 8660 loss: 0.214201\n",
      "Step: 8670 loss: 0.753728\n",
      "Step: 8680 loss: 0.415732\n",
      "Step: 8690 loss: 0.401174\n",
      "Step: 8700 loss: 0.34534\n",
      "Step: 8710 loss: 1.57033\n",
      "Step: 8720 loss: 0.427931\n",
      "Step: 8730 loss: 1.2192\n",
      "Step: 8740 loss: 1.1741\n",
      "Step: 8750 loss: 0.306966\n",
      "Step: 8760 loss: 0.386583\n",
      "Step: 8770 loss: 0.703321\n",
      "Step: 8780 loss: 0.769787\n",
      "Step: 8790 loss: 0.641434\n",
      "Step: 8800 loss: 1.11665\n",
      "Step: 8810 loss: 0.518078\n",
      "Step: 8820 loss: 1.76734\n",
      "Step: 8830 loss: 1.11695\n",
      "Step: 8840 loss: 0.718668\n",
      "Step: 8850 loss: 0.550956\n",
      "Step: 8860 loss: 0.519402\n",
      "Step: 8870 loss: 0.722428\n",
      "Step: 8880 loss: 0.581462\n",
      "Step: 8890 loss: 0.64684\n",
      "Step: 8900 loss: 0.539876\n",
      "Step: 8910 loss: 0.626239\n",
      "Step: 8920 loss: 1.1107\n",
      "Step: 8930 loss: 0.233395\n",
      "Step: 8940 loss: 0.201726\n",
      "Step: 8950 loss: 0.6325\n",
      "Step: 8960 loss: 0.67523\n",
      "Step: 8970 loss: 0.337512\n",
      "Step: 8980 loss: 0.902774\n",
      "Step: 8990 loss: 0.588273\n",
      "Step: 9000 loss: 0.810342\n",
      "Saved Model\n",
      "Step: 9010 loss: 0.309222\n",
      "Step: 9020 loss: 0.126274\n",
      "Step: 9030 loss: 0.777647\n",
      "Step: 9040 loss: 0.75982\n",
      "Step: 9050 loss: 0.440398\n",
      "Step: 9060 loss: 0.405009\n",
      "Step: 9070 loss: 0.247344\n",
      "Step: 9080 loss: 0.668735\n",
      "Step: 9090 loss: 0.300758\n",
      "Step: 9100 loss: 0.495606\n",
      "Step: 9110 loss: 0.773503\n",
      "Step: 9120 loss: 0.583587\n",
      "Step: 9130 loss: 0.595793\n",
      "Step: 9140 loss: 1.07535\n",
      "Step: 9150 loss: 1.36291\n",
      "Step: 9160 loss: 0.451779\n",
      "Step: 9170 loss: 0.270297\n",
      "Step: 9180 loss: 0.740584\n",
      "Step: 9190 loss: 1.17597\n",
      "Step: 9200 loss: 0.88153\n",
      "Step: 9210 loss: 1.27793\n",
      "Step: 9220 loss: 0.514497\n",
      "Step: 9230 loss: 0.648059\n",
      "Step: 9240 loss: 0.458908\n",
      "Step: 9250 loss: 0.215948\n",
      "Step: 9260 loss: 0.710211\n",
      "Step: 9270 loss: 0.75332\n",
      "Step: 9280 loss: 1.15643\n",
      "Step: 9290 loss: 0.318019\n",
      "Step: 9300 loss: 0.513615\n",
      "Step: 9310 loss: 0.542612\n",
      "Step: 9320 loss: 0.554531\n",
      "Step: 9330 loss: 0.512759\n",
      "Step: 9340 loss: 0.502476\n",
      "Step: 9350 loss: 1.25656\n",
      "Step: 9360 loss: 0.830807\n",
      "Step: 9370 loss: 0.868371\n",
      "Step: 9380 loss: 0.764316\n",
      "Step: 9390 loss: 1.04495\n",
      "Step: 9400 loss: 0.498914\n",
      "Step: 9410 loss: 0.677148\n",
      "Step: 9420 loss: 0.785966\n",
      "Step: 9430 loss: 0.514933\n",
      "Step: 9440 loss: 0.656146\n",
      "Step: 9450 loss: 0.154395\n",
      "Step: 9460 loss: 0.279507\n",
      "Step: 9470 loss: 0.320672\n",
      "Step: 9480 loss: 0.493123\n",
      "Step: 9490 loss: 0.389025\n",
      "Step: 9500 loss: 0.274387\n",
      "Step: 9510 loss: 0.363604\n",
      "Step: 9520 loss: 0.685665\n",
      "Step: 9530 loss: 1.82888\n",
      "Step: 9540 loss: 1.97931\n",
      "Step: 9550 loss: 0.739951\n",
      "Step: 9560 loss: 0.51066\n",
      "Step: 9570 loss: 1.15677\n",
      "Step: 9580 loss: 0.451697\n",
      "Step: 9590 loss: 0.374197\n",
      "Step: 9600 loss: 1.152\n",
      "Step: 9610 loss: 0.539661\n",
      "Step: 9620 loss: 1.08924\n",
      "Step: 9630 loss: 0.91693\n",
      "Step: 9640 loss: 0.657026\n",
      "Step: 9650 loss: 0.490616\n",
      "Step: 9660 loss: 0.213125\n",
      "Step: 9670 loss: 0.3487\n",
      "Step: 9680 loss: 0.316474\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step: 9690 loss: 0.328597\n",
      "Step: 9700 loss: 0.416362\n",
      "Step: 9710 loss: 0.644922\n",
      "Step: 9720 loss: 0.860482\n",
      "Step: 9730 loss: 0.380986\n",
      "Step: 9740 loss: 0.26137\n",
      "Step: 9750 loss: 0.514324\n",
      "Step: 9760 loss: 1.00244\n",
      "Step: 9770 loss: 0.216283\n",
      "Step: 9780 loss: 0.9377\n",
      "Step: 9790 loss: 0.519854\n",
      "Step: 9800 loss: 0.689478\n",
      "Step: 9810 loss: 0.24088\n",
      "Step: 9820 loss: 0.476405\n",
      "Step: 9830 loss: 0.470158\n",
      "Step: 9840 loss: 0.694973\n",
      "Step: 9850 loss: 1.13002\n",
      "Step: 9860 loss: 0.383115\n",
      "Step: 9870 loss: 0.647203\n",
      "Step: 9880 loss: 0.238431\n",
      "Step: 9890 loss: 0.481531\n",
      "Step: 9900 loss: 0.326495\n",
      "Step: 9910 loss: 0.309332\n",
      "Step: 9920 loss: 1.03055\n",
      "Step: 9930 loss: 0.600358\n",
      "Step: 9940 loss: 0.727766\n",
      "Step: 9950 loss: 1.26204\n",
      "Step: 9960 loss: 0.594648\n",
      "Step: 9970 loss: 0.367045\n",
      "Step: 9980 loss: 0.767916\n",
      "Step: 9990 loss: 0.666148\n",
      "Step: 10000 loss: 1.3215\n",
      "Saved Model\n",
      "Step: 10010 loss: 0.419958\n",
      "Step: 10020 loss: 0.606528\n",
      "Step: 10030 loss: 0.231338\n",
      "Step: 10040 loss: 0.440588\n",
      "Step: 10050 loss: 0.500521\n",
      "Step: 10060 loss: 0.439139\n",
      "Step: 10070 loss: 1.15317\n",
      "Step: 10080 loss: 1.00785\n",
      "Step: 10090 loss: 0.228512\n",
      "Step: 10100 loss: 0.610309\n",
      "Step: 10110 loss: 0.775887\n",
      "Step: 10120 loss: 0.464868\n",
      "Step: 10130 loss: 0.458771\n",
      "Step: 10140 loss: 0.513666\n",
      "Step: 10150 loss: 0.873043\n",
      "Step: 10160 loss: 0.988353\n",
      "Step: 10170 loss: 0.764793\n",
      "Step: 10180 loss: 0.650372\n",
      "Step: 10190 loss: 0.949692\n",
      "Step: 10200 loss: 0.513202\n",
      "Step: 10210 loss: 0.564516\n",
      "Step: 10220 loss: 0.285461\n",
      "Step: 10230 loss: 0.239299\n",
      "Step: 10240 loss: 0.579268\n",
      "Step: 10250 loss: 0.273027\n",
      "Step: 10260 loss: 0.38622\n",
      "Step: 10270 loss: 0.285795\n",
      "Step: 10280 loss: 0.397204\n",
      "Step: 10290 loss: 0.567522\n",
      "Step: 10300 loss: 0.385042\n",
      "Step: 10310 loss: 0.382528\n",
      "Step: 10320 loss: 0.567505\n",
      "Step: 10330 loss: 1.74704\n",
      "Step: 10340 loss: 1.2349\n",
      "Step: 10350 loss: 0.263448\n",
      "Step: 10360 loss: 0.713878\n",
      "Step: 10370 loss: 0.913598\n",
      "Step: 10380 loss: 0.543519\n",
      "Step: 10390 loss: 0.563107\n",
      "Step: 10400 loss: 1.00008\n",
      "Step: 10410 loss: 0.447364\n",
      "Step: 10420 loss: 0.89229\n",
      "Step: 10430 loss: 0.537917\n",
      "Step: 10440 loss: 0.931017\n",
      "Step: 10450 loss: 0.535521\n",
      "Step: 10460 loss: 0.179355\n",
      "Step: 10470 loss: 0.751744\n",
      "Step: 10480 loss: 0.31655\n",
      "Step: 10490 loss: 0.335945\n",
      "Step: 10500 loss: 0.531454\n",
      "Step: 10510 loss: 1.00287\n",
      "Step: 10520 loss: 1.1217\n",
      "Step: 10530 loss: 0.56742\n",
      "Step: 10540 loss: 0.247172\n",
      "Step: 10550 loss: 0.479207\n",
      "Step: 10560 loss: 0.961106\n",
      "Step: 10570 loss: 0.506103\n",
      "Step: 10580 loss: 1.19335\n",
      "Step: 10590 loss: 0.334742\n",
      "Step: 10600 loss: 0.489985\n",
      "Step: 10610 loss: 0.592851\n",
      "Step: 10620 loss: 0.322139\n",
      "Step: 10630 loss: 0.612753\n",
      "Step: 10640 loss: 0.315709\n",
      "Step: 10650 loss: 0.714644\n",
      "Step: 10660 loss: 0.413545\n",
      "Step: 10670 loss: 0.452742\n",
      "Step: 10680 loss: 0.6743\n",
      "Step: 10690 loss: 0.26945\n",
      "Step: 10700 loss: 0.573667\n",
      "Step: 10710 loss: 0.605083\n",
      "Step: 10720 loss: 0.409318\n",
      "Step: 10730 loss: 0.663085\n",
      "Step: 10740 loss: 1.42792\n",
      "Step: 10750 loss: 1.32101\n",
      "Step: 10760 loss: 0.51356\n",
      "Step: 10770 loss: 0.25041\n",
      "Step: 10780 loss: 1.13085\n",
      "Step: 10790 loss: 0.66819\n",
      "Step: 10800 loss: 0.783727\n",
      "Step: 10810 loss: 0.403609\n",
      "Step: 10820 loss: 0.448587\n",
      "Step: 10830 loss: 0.243513\n",
      "Step: 10840 loss: 0.371006\n",
      "Step: 10850 loss: 0.41133\n",
      "Step: 10860 loss: 0.610784\n",
      "Step: 10870 loss: 0.707113\n",
      "Step: 10880 loss: 0.804031\n",
      "Step: 10890 loss: 0.43575\n",
      "Step: 10900 loss: 0.366033\n",
      "Step: 10910 loss: 1.48105\n",
      "Step: 10920 loss: 0.626931\n",
      "Step: 10930 loss: 0.585137\n",
      "Step: 10940 loss: 0.558007\n",
      "Step: 10950 loss: 0.486237\n",
      "Step: 10960 loss: 0.629966\n",
      "Step: 10970 loss: 0.410118\n",
      "Step: 10980 loss: 0.754902\n",
      "Step: 10990 loss: 0.686937\n",
      "Step: 11000 loss: 0.234706\n",
      "Saved Model\n",
      "Step: 11010 loss: 0.614863\n",
      "Step: 11020 loss: 0.276781\n",
      "Step: 11030 loss: 0.537251\n",
      "Step: 11040 loss: 0.346715\n",
      "Step: 11050 loss: 0.161915\n",
      "Step: 11060 loss: 0.290979\n",
      "Step: 11070 loss: 0.611249\n",
      "Step: 11080 loss: 0.312884\n",
      "Step: 11090 loss: 0.270268\n",
      "Step: 11100 loss: 0.406968\n",
      "Step: 11110 loss: 0.720036\n",
      "Step: 11120 loss: 0.735995\n",
      "Step: 11130 loss: 1.516\n",
      "Step: 11140 loss: 1.7004\n",
      "Step: 11150 loss: 0.73793\n",
      "Step: 11160 loss: 0.879179\n",
      "Step: 11170 loss: 0.634442\n",
      "Step: 11180 loss: 0.558393\n",
      "Step: 11190 loss: 0.39819\n",
      "Step: 11200 loss: 0.738318\n",
      "Step: 11210 loss: 0.352104\n",
      "Step: 11220 loss: 1.28691\n",
      "Step: 11230 loss: 0.799631\n",
      "Step: 11240 loss: 0.785863\n",
      "Step: 11250 loss: 1.01694\n",
      "Step: 11260 loss: 0.376241\n",
      "Step: 11270 loss: 0.527879\n",
      "Step: 11280 loss: 0.363038\n",
      "Step: 11290 loss: 0.481607\n",
      "Step: 11300 loss: 0.557649\n",
      "Step: 11310 loss: 0.40495\n",
      "Step: 11320 loss: 0.684205\n",
      "Step: 11330 loss: 0.384933\n",
      "Step: 11340 loss: 0.181003\n",
      "Step: 11350 loss: 0.299229\n",
      "Step: 11360 loss: 1.1752\n",
      "Step: 11370 loss: 0.498978\n",
      "Step: 11380 loss: 0.834911\n",
      "Step: 11390 loss: 0.247975\n",
      "Step: 11400 loss: 0.497774\n",
      "Step: 11410 loss: 0.153501\n",
      "Step: 11420 loss: 0.282068\n",
      "Step: 11430 loss: 0.716906\n",
      "Step: 11440 loss: 0.521458\n",
      "Step: 11450 loss: 0.361381\n",
      "Step: 11460 loss: 0.576687\n",
      "Step: 11470 loss: 0.53191\n",
      "Step: 11480 loss: 0.980059\n",
      "Step: 11490 loss: 0.279761\n",
      "Step: 11500 loss: 0.344067\n",
      "Step: 11510 loss: 0.150723\n",
      "Step: 11520 loss: 0.476029\n",
      "Step: 11530 loss: 0.291162\n",
      "Step: 11540 loss: 0.382647\n",
      "Step: 11550 loss: 1.00661\n",
      "Step: 11560 loss: 0.419241\n",
      "Step: 11570 loss: 0.266786\n",
      "Step: 11580 loss: 0.229647\n",
      "Step: 11590 loss: 0.523193\n",
      "Step: 11600 loss: 0.848834\n",
      "Step: 11610 loss: 0.398511\n",
      "Step: 11620 loss: 0.366715\n",
      "Step: 11630 loss: 0.372157\n",
      "Step: 11640 loss: 0.356357\n",
      "Step: 11650 loss: 0.324321\n",
      "Step: 11660 loss: 0.971444\n",
      "Step: 11670 loss: 0.421418\n",
      "Step: 11680 loss: 0.748\n",
      "Step: 11690 loss: 0.376737\n",
      "Step: 11700 loss: 0.481487\n",
      "Step: 11710 loss: 0.662312\n",
      "Step: 11720 loss: 0.327552\n",
      "Step: 11730 loss: 0.515285\n",
      "Step: 11740 loss: 0.473607\n",
      "Step: 11750 loss: 0.645741\n",
      "Step: 11760 loss: 0.602893\n",
      "Step: 11770 loss: 1.26563\n",
      "Step: 11780 loss: 0.649038\n",
      "Step: 11790 loss: 1.32799\n",
      "Step: 11800 loss: 0.513444\n",
      "Step: 11810 loss: 0.46877\n",
      "Step: 11820 loss: 0.529181\n",
      "Step: 11830 loss: 0.570323\n",
      "Step: 11840 loss: 0.548957\n",
      "Step: 11850 loss: 0.203442\n",
      "Step: 11860 loss: 0.157514\n",
      "Step: 11870 loss: 0.1723\n",
      "Step: 11880 loss: 0.49217\n",
      "Step: 11890 loss: 0.204273\n",
      "Step: 11900 loss: 0.21155\n",
      "Step: 11910 loss: 0.851592\n",
      "Step: 11920 loss: 0.502308\n",
      "Step: 11930 loss: 1.15247\n",
      "Step: 11940 loss: 1.14991\n",
      "Step: 11950 loss: 0.663738\n",
      "Step: 11960 loss: 0.25551\n",
      "Step: 11970 loss: 0.628537\n",
      "Step: 11980 loss: 0.795684\n",
      "Step: 11990 loss: 0.35979\n",
      "Step: 12000 loss: 0.999575\n",
      "Saved Model\n",
      "Step: 12010 loss: 0.699989\n",
      "Step: 12020 loss: 0.824629\n",
      "Step: 12030 loss: 0.322874\n",
      "Step: 12040 loss: 1.16507\n",
      "Step: 12050 loss: 0.339066\n",
      "Step: 12060 loss: 0.304294\n",
      "Step: 12070 loss: 0.360035\n",
      "Step: 12080 loss: 0.746037\n",
      "Step: 12090 loss: 0.296051\n",
      "Step: 12100 loss: 0.938215\n",
      "Step: 12110 loss: 0.652547\n",
      "Step: 12120 loss: 1.05826\n",
      "Step: 12130 loss: 0.366756\n",
      "Step: 12140 loss: 0.243255\n",
      "Step: 12150 loss: 0.390322\n",
      "Step: 12160 loss: 0.711321\n",
      "Step: 12170 loss: 0.385548\n",
      "Step: 12180 loss: 0.995513\n",
      "Step: 12190 loss: 0.230834\n",
      "Step: 12200 loss: 0.406231\n",
      "Step: 12210 loss: 0.150246\n",
      "Step: 12220 loss: 0.242306\n",
      "Step: 12230 loss: 0.506879\n",
      "Step: 12240 loss: 0.407975\n",
      "Step: 12250 loss: 0.335448\n",
      "Step: 12260 loss: 0.655165\n",
      "Step: 12270 loss: 0.596931\n",
      "Step: 12280 loss: 0.606598\n",
      "Step: 12290 loss: 0.221118\n",
      "Step: 12300 loss: 0.520816\n",
      "Step: 12310 loss: 0.288107\n",
      "Step: 12320 loss: 0.495613\n",
      "Step: 12330 loss: 0.30802\n",
      "Step: 12340 loss: 0.51707\n",
      "Step: 12350 loss: 0.658506\n",
      "Step: 12360 loss: 0.443431\n",
      "Step: 12370 loss: 0.170928\n",
      "Step: 12380 loss: 0.195983\n",
      "Step: 12390 loss: 0.711616\n",
      "Step: 12400 loss: 0.636535\n",
      "Step: 12410 loss: 1.00676\n",
      "Step: 12420 loss: 0.793383\n",
      "Step: 12430 loss: 0.542812\n",
      "Step: 12440 loss: 0.381795\n",
      "Step: 12450 loss: 0.446964\n",
      "Step: 12460 loss: 0.341213\n",
      "Step: 12470 loss: 0.500199\n",
      "Step: 12480 loss: 1.1395\n",
      "Step: 12490 loss: 0.360624\n",
      "Step: 12500 loss: 0.410429\n",
      "Step: 12510 loss: 0.725649\n",
      "Step: 12520 loss: 0.419425\n",
      "Step: 12530 loss: 0.630995\n",
      "Step: 12540 loss: 0.374175\n",
      "Step: 12550 loss: 0.614909\n",
      "Step: 12560 loss: 0.920866\n",
      "Step: 12570 loss: 0.451195\n",
      "Step: 12580 loss: 0.573534\n",
      "Step: 12590 loss: 0.751956\n",
      "Step: 12600 loss: 0.234828\n",
      "Step: 12610 loss: 0.410741\n",
      "Step: 12620 loss: 0.67071\n",
      "Step: 12630 loss: 0.794175\n",
      "Step: 12640 loss: 0.226317\n",
      "Step: 12650 loss: 0.259998\n",
      "Step: 12660 loss: 0.160666\n",
      "Step: 12670 loss: 0.616671\n",
      "Step: 12680 loss: 0.282624\n",
      "Step: 12690 loss: 0.513389\n",
      "Step: 12700 loss: 0.294694\n",
      "Step: 12710 loss: 0.277397\n",
      "Step: 12720 loss: 0.175376\n",
      "Step: 12730 loss: 1.16857\n",
      "Step: 12740 loss: 1.05146\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step: 12750 loss: 0.355162\n",
      "Step: 12760 loss: 0.434311\n",
      "Step: 12770 loss: 0.854917\n",
      "Step: 12780 loss: 0.847007\n",
      "Step: 12790 loss: 0.661022\n",
      "Step: 12800 loss: 0.766422\n",
      "Step: 12810 loss: 0.597429\n",
      "Step: 12820 loss: 0.873615\n",
      "Step: 12830 loss: 1.00413\n",
      "Step: 12840 loss: 0.735962\n",
      "Step: 12850 loss: 0.564719\n",
      "Step: 12860 loss: 0.452358\n",
      "Step: 12870 loss: 0.189261\n",
      "Step: 12880 loss: 0.619311\n",
      "Step: 12890 loss: 0.337902\n",
      "Step: 12900 loss: 0.485164\n",
      "Step: 12910 loss: 0.395688\n",
      "Step: 12920 loss: 0.668513\n",
      "Step: 12930 loss: 0.480374\n",
      "Step: 12940 loss: 0.206199\n",
      "Step: 12950 loss: 0.500718\n",
      "Step: 12960 loss: 0.509961\n",
      "Step: 12970 loss: 0.419273\n",
      "Step: 12980 loss: 0.55534\n",
      "Step: 12990 loss: 0.768301\n",
      "Step: 13000 loss: 0.948246\n",
      "Saved Model\n",
      "Step: 13010 loss: 0.479753\n",
      "Step: 13020 loss: 0.118635\n",
      "Step: 13030 loss: 0.683778\n",
      "Step: 13040 loss: 0.841048\n",
      "Step: 13050 loss: 0.393523\n",
      "Step: 13060 loss: 0.558165\n",
      "Step: 13070 loss: 0.246032\n",
      "Step: 13080 loss: 0.525319\n",
      "Step: 13090 loss: 0.524355\n",
      "Step: 13100 loss: 0.370541\n",
      "Step: 13110 loss: 0.389814\n",
      "Step: 13120 loss: 0.369781\n",
      "Step: 13130 loss: 0.307027\n",
      "Step: 13140 loss: 0.480864\n",
      "Step: 13150 loss: 0.889559\n",
      "Step: 13160 loss: 0.303721\n",
      "Step: 13170 loss: 0.210728\n",
      "Step: 13180 loss: 0.230629\n",
      "Step: 13190 loss: 0.506184\n",
      "Step: 13200 loss: 1.37063\n",
      "Step: 13210 loss: 0.430646\n",
      "Step: 13220 loss: 0.452145\n",
      "Step: 13230 loss: 0.400909\n",
      "Step: 13240 loss: 0.159671\n",
      "Step: 13250 loss: 0.135948\n",
      "Step: 13260 loss: 0.796148\n",
      "Step: 13270 loss: 0.612184\n",
      "Step: 13280 loss: 1.48128\n",
      "Step: 13290 loss: 0.292554\n",
      "Step: 13300 loss: 0.722101\n",
      "Step: 13310 loss: 0.48346\n",
      "Step: 13320 loss: 0.27947\n",
      "Step: 13330 loss: 0.351129\n",
      "Step: 13340 loss: 0.217346\n",
      "Step: 13350 loss: 0.579228\n",
      "Step: 13360 loss: 0.871303\n",
      "Step: 13370 loss: 0.507417\n",
      "Step: 13380 loss: 0.43428\n",
      "Step: 13390 loss: 0.633749\n",
      "Step: 13400 loss: 0.570689\n",
      "Step: 13410 loss: 0.585066\n",
      "Step: 13420 loss: 0.343918\n",
      "Step: 13430 loss: 0.363055\n",
      "Step: 13440 loss: 0.449264\n",
      "Step: 13450 loss: 0.426851\n",
      "Step: 13460 loss: 0.578403\n",
      "Step: 13470 loss: 0.756435\n",
      "Step: 13480 loss: 0.560618\n",
      "Step: 13490 loss: 0.174432\n",
      "Step: 13500 loss: 0.471616\n",
      "Step: 13510 loss: 0.400404\n",
      "Step: 13520 loss: 0.328294\n",
      "Step: 13530 loss: 1.23514\n",
      "Step: 13540 loss: 1.15415\n",
      "Step: 13550 loss: 0.321109\n",
      "Step: 13560 loss: 0.274446\n",
      "Step: 13570 loss: 0.89852\n",
      "Step: 13580 loss: 0.546908\n",
      "Step: 13590 loss: 0.351044\n",
      "Step: 13600 loss: 0.821982\n",
      "Step: 13610 loss: 0.281406\n",
      "Step: 13620 loss: 1.03986\n",
      "Step: 13630 loss: 0.836391\n",
      "Step: 13640 loss: 0.416924\n",
      "Step: 13650 loss: 0.853339\n",
      "Step: 13660 loss: 0.221114\n",
      "Step: 13670 loss: 0.296628\n",
      "Step: 13680 loss: 0.363671\n",
      "Step: 13690 loss: 0.35437\n",
      "Step: 13700 loss: 0.225467\n",
      "Step: 13710 loss: 0.478754\n",
      "Step: 13720 loss: 0.815328\n",
      "Step: 13730 loss: 0.16859\n",
      "Step: 13740 loss: 0.17085\n",
      "Step: 13750 loss: 0.882966\n",
      "Step: 13760 loss: 0.732259\n",
      "Step: 13770 loss: 0.167062\n",
      "Step: 13780 loss: 0.476654\n",
      "Step: 13790 loss: 0.229873\n",
      "Step: 13800 loss: 0.696802\n",
      "Step: 13810 loss: 0.291313\n",
      "Step: 13820 loss: 0.217458\n",
      "Step: 13830 loss: 0.385865\n",
      "Step: 13840 loss: 0.467121\n",
      "Step: 13850 loss: 0.475554\n",
      "Step: 13860 loss: 0.237382\n",
      "Step: 13870 loss: 0.242841\n",
      "Step: 13880 loss: 0.418565\n",
      "Step: 13890 loss: 0.489503\n",
      "Step: 13900 loss: 0.199143\n",
      "Step: 13910 loss: 0.231037\n",
      "Step: 13920 loss: 0.653356\n",
      "Step: 13930 loss: 0.192973\n",
      "Step: 13940 loss: 0.250624\n",
      "Step: 13950 loss: 0.503154\n",
      "Step: 13960 loss: 0.574747\n",
      "Step: 13970 loss: 0.305848\n",
      "Step: 13980 loss: 0.280306\n",
      "Step: 13990 loss: 0.433789\n",
      "Step: 14000 loss: 0.890627\n",
      "Saved Model\n",
      "Step: 14010 loss: 0.395895\n",
      "Step: 14020 loss: 0.572337\n",
      "Step: 14030 loss: 0.743238\n",
      "Step: 14040 loss: 0.552312\n",
      "Step: 14050 loss: 0.170759\n",
      "Step: 14060 loss: 0.43207\n",
      "Step: 14070 loss: 0.791652\n",
      "Step: 14080 loss: 1.46772\n",
      "Step: 14090 loss: 0.220416\n",
      "Step: 14100 loss: 0.359149\n",
      "Step: 14110 loss: 0.523517\n",
      "Step: 14120 loss: 0.478242\n",
      "Step: 14130 loss: 0.298729\n",
      "Step: 14140 loss: 0.315641\n",
      "Step: 14150 loss: 0.643096\n",
      "Step: 14160 loss: 0.561859\n",
      "Step: 14170 loss: 0.44241\n",
      "Step: 14180 loss: 0.584549\n",
      "Step: 14190 loss: 0.60588\n",
      "Step: 14200 loss: 0.717226\n",
      "Step: 14210 loss: 0.568865\n",
      "Step: 14220 loss: 0.231914\n",
      "Step: 14230 loss: 1.48431\n",
      "Step: 14240 loss: 0.366651\n",
      "Step: 14250 loss: 0.352524\n",
      "Step: 14260 loss: 0.392902\n",
      "Step: 14270 loss: 0.378524\n",
      "Step: 14280 loss: 0.271865\n",
      "Step: 14290 loss: 0.14569\n",
      "Step: 14300 loss: 0.180116\n",
      "Step: 14310 loss: 0.790975\n",
      "Step: 14320 loss: 0.270387\n",
      "Step: 14330 loss: 1.34943\n",
      "Step: 14340 loss: 1.59851\n",
      "Step: 14350 loss: 0.434444\n",
      "Step: 14360 loss: 0.315699\n",
      "Step: 14370 loss: 0.886179\n",
      "Step: 14380 loss: 0.678958\n",
      "Step: 14390 loss: 0.324433\n",
      "Step: 14400 loss: 0.742231\n",
      "Step: 14410 loss: 0.295454\n",
      "Step: 14420 loss: 1.0744\n",
      "Step: 14430 loss: 0.621016\n",
      "Step: 14440 loss: 0.347393\n",
      "Step: 14450 loss: 0.591816\n",
      "Step: 14460 loss: 0.205549\n",
      "Step: 14470 loss: 0.363721\n",
      "Step: 14480 loss: 0.323497\n",
      "Step: 14490 loss: 0.259569\n",
      "Step: 14500 loss: 0.601972\n",
      "Step: 14510 loss: 0.811291\n",
      "Step: 14520 loss: 1.3741\n",
      "Step: 14530 loss: 0.250739\n",
      "Step: 14540 loss: 0.296215\n",
      "Step: 14550 loss: 0.323266\n",
      "Step: 14560 loss: 0.833058\n",
      "Step: 14570 loss: 0.233212\n",
      "Step: 14580 loss: 0.458378\n",
      "Step: 14590 loss: 0.158171\n",
      "Step: 14600 loss: 0.553959\n",
      "Step: 14610 loss: 0.330103\n",
      "Step: 14620 loss: 0.188312\n",
      "Step: 14630 loss: 0.734145\n",
      "Step: 14640 loss: 0.249814\n",
      "Step: 14650 loss: 0.39825\n",
      "Step: 14660 loss: 0.294617\n",
      "Step: 14670 loss: 0.288885\n",
      "Step: 14680 loss: 0.586226\n",
      "Step: 14690 loss: 0.287421\n",
      "Step: 14700 loss: 0.520058\n",
      "Step: 14710 loss: 0.797291\n",
      "Step: 14720 loss: 0.312173\n",
      "Step: 14730 loss: 0.920242\n",
      "Step: 14740 loss: 0.86331\n",
      "Step: 14750 loss: 1.63834\n",
      "Step: 14760 loss: 0.392779\n",
      "Step: 14770 loss: 0.143108\n",
      "Step: 14780 loss: 0.343614\n",
      "Step: 14790 loss: 0.905016\n",
      "Step: 14800 loss: 0.571828\n",
      "Step: 14810 loss: 0.730039\n",
      "Step: 14820 loss: 0.296898\n",
      "Step: 14830 loss: 0.247013\n",
      "Step: 14840 loss: 0.194877\n",
      "Step: 14850 loss: 0.157484\n",
      "Step: 14860 loss: 0.561919\n",
      "Step: 14870 loss: 0.443817\n",
      "Step: 14880 loss: 0.763551\n",
      "Step: 14890 loss: 0.177762\n",
      "Step: 14900 loss: 0.558944\n",
      "Step: 14910 loss: 0.496078\n",
      "Step: 14920 loss: 0.591225\n",
      "Step: 14930 loss: 0.283086\n",
      "Step: 14940 loss: 0.651366\n",
      "Step: 14950 loss: 1.17431\n",
      "Step: 14960 loss: 0.349183\n",
      "Step: 14970 loss: 0.639292\n",
      "Step: 14980 loss: 0.447799\n",
      "Step: 14990 loss: 0.725273\n",
      "Step: 15000 loss: 0.302986\n",
      "Saved Model\n",
      "Step: 15010 loss: 0.296301\n",
      "Step: 15020 loss: 0.268009\n",
      "Step: 15030 loss: 0.689316\n",
      "Step: 15040 loss: 0.459\n",
      "Step: 15050 loss: 0.325568\n",
      "Step: 15060 loss: 0.269643\n",
      "Step: 15070 loss: 0.23354\n",
      "Step: 15080 loss: 0.29648\n",
      "Step: 15090 loss: 0.898724\n",
      "Step: 15100 loss: 0.244206\n",
      "Step: 15110 loss: 0.342625\n",
      "Step: 15120 loss: 0.177682\n",
      "Step: 15130 loss: 0.537046\n",
      "Step: 15140 loss: 1.17288\n",
      "Step: 15150 loss: 0.35734\n",
      "Step: 15160 loss: 0.342657\n",
      "Step: 15170 loss: 0.869368\n",
      "Step: 15180 loss: 0.869892\n",
      "Step: 15190 loss: 0.294338\n",
      "Step: 15200 loss: 0.783484\n",
      "Step: 15210 loss: 0.462689\n",
      "Step: 15220 loss: 0.685338\n",
      "Step: 15230 loss: 0.383195\n",
      "Step: 15240 loss: 0.983189\n",
      "Step: 15250 loss: 0.37734\n",
      "Step: 15260 loss: 0.269836\n",
      "Step: 15270 loss: 0.319959\n",
      "Step: 15280 loss: 0.272234\n",
      "Step: 15290 loss: 0.2\n",
      "Step: 15300 loss: 0.80537\n",
      "Step: 15310 loss: 0.587087\n",
      "Step: 15320 loss: 0.728249\n",
      "Step: 15330 loss: 0.123841\n",
      "Step: 15340 loss: 0.16737\n",
      "Step: 15350 loss: 0.332795\n",
      "Step: 15360 loss: 0.763571\n",
      "Step: 15370 loss: 0.176487\n",
      "Step: 15380 loss: 0.714262\n",
      "Step: 15390 loss: 0.469731\n",
      "Step: 15400 loss: 0.290347\n",
      "Step: 15410 loss: 0.235977\n",
      "Step: 15420 loss: 0.339888\n",
      "Step: 15430 loss: 0.473294\n",
      "Step: 15440 loss: 0.869291\n",
      "Step: 15450 loss: 0.238842\n",
      "Step: 15460 loss: 0.293904\n",
      "Step: 15470 loss: 0.457864\n",
      "Step: 15480 loss: 0.503343\n",
      "Step: 15490 loss: 0.192355\n",
      "Step: 15500 loss: 0.199567\n",
      "Step: 15510 loss: 0.498357\n",
      "Step: 15520 loss: 0.802334\n",
      "Step: 15530 loss: 0.351317\n",
      "Step: 15540 loss: 0.802591\n",
      "Step: 15550 loss: 1.76407\n",
      "Step: 15560 loss: 0.776209\n",
      "Step: 15570 loss: 0.168274\n",
      "Step: 15580 loss: 0.466903\n",
      "Step: 15590 loss: 0.737156\n",
      "Step: 15600 loss: 1.1007\n",
      "Step: 15610 loss: 0.373782\n",
      "Step: 15620 loss: 0.963291\n",
      "Step: 15630 loss: 0.609053\n",
      "Step: 15640 loss: 0.264907\n",
      "Step: 15650 loss: 0.198669\n",
      "Step: 15660 loss: 0.439873\n",
      "Step: 15670 loss: 0.465342\n",
      "Step: 15680 loss: 0.826577\n",
      "Step: 15690 loss: 0.255699\n",
      "Step: 15700 loss: 0.34848\n",
      "Step: 15710 loss: 0.7286\n",
      "Step: 15720 loss: 0.268051\n",
      "Step: 15730 loss: 0.228008\n",
      "Step: 15740 loss: 0.26479\n",
      "Step: 15750 loss: 1.18292\n",
      "Step: 15760 loss: 0.394825\n",
      "Step: 15770 loss: 0.412338\n",
      "Step: 15780 loss: 0.576716\n",
      "Step: 15790 loss: 0.412908\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step: 15800 loss: 0.417705\n",
      "Step: 15810 loss: 0.814612\n",
      "Step: 15820 loss: 0.505755\n",
      "Step: 15830 loss: 0.649637\n",
      "Step: 15840 loss: 0.261104\n",
      "Step: 15850 loss: 0.529636\n",
      "Step: 15860 loss: 0.129469\n",
      "Step: 15870 loss: 0.252445\n",
      "Step: 15880 loss: 0.34366\n",
      "Step: 15890 loss: 0.299679\n",
      "Step: 15900 loss: 0.125859\n",
      "Step: 15910 loss: 0.368362\n",
      "Step: 15920 loss: 0.424977\n",
      "Step: 15930 loss: 1.15646\n",
      "Step: 15940 loss: 0.680904\n",
      "Step: 15950 loss: 0.70376\n",
      "Step: 15960 loss: 0.297565\n",
      "Step: 15970 loss: 0.591049\n",
      "Step: 15980 loss: 0.786074\n",
      "Step: 15990 loss: 0.28476\n",
      "Step: 16000 loss: 0.715381\n",
      "Saved Model\n"
     ]
    }
   ],
   "source": [
    "num_epochs = 20\n",
    "for i in range(num_epochs):\n",
    "    for j in range(num_batches):\n",
    "        path_dict = [path1_len[j*batch_size:(j+1)*batch_size], path2_len[j*batch_size:(j+1)*batch_size]]\n",
    "        word_dict = [word_p1_ids[j*batch_size:(j+1)*batch_size], word_p2_ids[j*batch_size:(j+1)*batch_size]]\n",
    "        pos_dict = [pos_p1_ids[j*batch_size:(j+1)*batch_size], pos_p2_ids[j*batch_size:(j+1)*batch_size]]\n",
    "        dep_dict = [dep_p1_ids[j*batch_size:(j+1)*batch_size], dep_p2_ids[j*batch_size:(j+1)*batch_size]]\n",
    "        y_dict = rel_ids[j*batch_size:(j+1)*batch_size]\n",
    "        \n",
    "        feed_dict = {\n",
    "            path_length:path_dict,\n",
    "            word_ids:word_dict,\n",
    "            pos_ids:pos_dict,\n",
    "            dep_ids:dep_dict,\n",
    "            y:y_dict}\n",
    "        _, loss, step = sess.run([optimizer, total_loss, global_step], feed_dict)\n",
    "        if step%10==0:\n",
    "            print(\"Step:\", step, \"loss:\",loss)\n",
    "        if step % 1000 == 0:\n",
    "            saver.save(sess, model_dir + '/model')\n",
    "            print(\"Saved Model\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "training accuracy 84.0375\n"
     ]
    }
   ],
   "source": [
    "# training accuracy\n",
    "all_predictions = []\n",
    "for j in range(num_batches):\n",
    "    path_dict = [path1_len[j*batch_size:(j+1)*batch_size], path2_len[j*batch_size:(j+1)*batch_size]]\n",
    "    word_dict = [word_p1_ids[j*batch_size:(j+1)*batch_size], word_p2_ids[j*batch_size:(j+1)*batch_size]]\n",
    "    pos_dict = [pos_p1_ids[j*batch_size:(j+1)*batch_size], pos_p2_ids[j*batch_size:(j+1)*batch_size]]\n",
    "    dep_dict = [dep_p1_ids[j*batch_size:(j+1)*batch_size], dep_p2_ids[j*batch_size:(j+1)*batch_size]]\n",
    "    y_dict = rel_ids[j*batch_size:(j+1)*batch_size]\n",
    "\n",
    "    feed_dict = {\n",
    "        path_length:path_dict,\n",
    "        word_ids:word_dict,\n",
    "        pos_ids:pos_dict,\n",
    "        dep_ids:dep_dict,\n",
    "        y:y_dict}\n",
    "    batch_predictions = sess.run(predictions, feed_dict)\n",
    "    all_predictions.append(batch_predictions)\n",
    "\n",
    "y_pred = []\n",
    "for i in range(num_batches):\n",
    "    for pred in all_predictions[i]:\n",
    "        y_pred.append(pred)\n",
    "\n",
    "count = 0\n",
    "for i in range(batch_size*num_batches):\n",
    "    count += y_pred[i]==rel_ids[i]\n",
    "accuracy = count/(batch_size*num_batches) * 100\n",
    "\n",
    "print(\"training accuracy\", accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "f = open(data_dir + '/test_paths', 'rb')\n",
    "word_p1, word_p2, dep_p1, dep_p2, pos_p1, pos_p2 = pickle.load(f)\n",
    "f.close()\n",
    "\n",
    "relations = []\n",
    "for line in open(data_dir + '/test_relations.txt'):\n",
    "    relations.append(line.strip().split()[0])\n",
    "\n",
    "length = len(word_p1)\n",
    "num_batches = int(length/batch_size)\n",
    "\n",
    "for i in range(length):\n",
    "    for j, word in enumerate(word_p1[i]):\n",
    "        word = word.lower()\n",
    "        word_p1[i][j] = word if word in word2id else unknown_token \n",
    "    for k, word in enumerate(word_p2[i]):\n",
    "        word = word.lower()\n",
    "        word_p2[i][k] = word if word in word2id else unknown_token \n",
    "    for l, d in enumerate(dep_p1[i]):\n",
    "        dep_p1[i][l] = d if d in dep2id else 'OTH'\n",
    "    for m, d in enumerate(dep_p2[i]):\n",
    "        dep_p2[i][m] = d if d in dep2id else 'OTH'\n",
    "\n",
    "word_p1_ids = np.ones([length, max_len_path],dtype=int)\n",
    "word_p2_ids = np.ones([length, max_len_path],dtype=int)\n",
    "pos_p1_ids = np.ones([length, max_len_path],dtype=int)\n",
    "pos_p2_ids = np.ones([length, max_len_path],dtype=int)\n",
    "dep_p1_ids = np.ones([length, max_len_path],dtype=int)\n",
    "dep_p2_ids = np.ones([length, max_len_path],dtype=int)\n",
    "rel_ids = np.array([rel2id[rel] for rel in relations])\n",
    "path1_len = np.array([len(w) for w in word_p1], dtype=int)\n",
    "path2_len = np.array([len(w) for w in word_p2])\n",
    "\n",
    "for i in range(length):\n",
    "    for j, w in enumerate(word_p1[i]):\n",
    "        word_p1_ids[i][j] = word2id[w]\n",
    "    for j, w in enumerate(word_p2[i]):\n",
    "        word_p2_ids[i][j] = word2id[w]\n",
    "    for j, w in enumerate(pos_p1[i]):\n",
    "        pos_p1_ids[i][j] = pos_tag(w)\n",
    "    for j, w in enumerate(pos_p2[i]):\n",
    "        pos_p2_ids[i][j] = pos_tag(w)\n",
    "    for j, w in enumerate(dep_p1[i]):\n",
    "        dep_p1_ids[i][j] = dep2id[w]\n",
    "    for j, w in enumerate(dep_p2[i]):\n",
    "        dep_p2_ids[i][j] = dep2id[w]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "test accuracy 60.405904059\n"
     ]
    }
   ],
   "source": [
    "# test \n",
    "all_predictions = []\n",
    "for j in range(num_batches):\n",
    "    path_dict = [path1_len[j*batch_size:(j+1)*batch_size], path2_len[j*batch_size:(j+1)*batch_size]]\n",
    "    word_dict = [word_p1_ids[j*batch_size:(j+1)*batch_size], word_p2_ids[j*batch_size:(j+1)*batch_size]]\n",
    "    pos_dict = [pos_p1_ids[j*batch_size:(j+1)*batch_size], pos_p2_ids[j*batch_size:(j+1)*batch_size]]\n",
    "    dep_dict = [dep_p1_ids[j*batch_size:(j+1)*batch_size], dep_p2_ids[j*batch_size:(j+1)*batch_size]]\n",
    "    y_dict = rel_ids[j*batch_size:(j+1)*batch_size]\n",
    "\n",
    "    feed_dict = {\n",
    "        path_length:path_dict,\n",
    "        word_ids:word_dict,\n",
    "        pos_ids:pos_dict,\n",
    "        dep_ids:dep_dict,\n",
    "        y:y_dict}\n",
    "    batch_predictions = sess.run(predictions, feed_dict)\n",
    "    all_predictions.append(batch_predictions)\n",
    "\n",
    "y_pred = []\n",
    "for i in range(num_batches):\n",
    "    for pred in all_predictions[i]:\n",
    "        y_pred.append(pred)\n",
    "\n",
    "count = 0\n",
    "for i in range(batch_size*num_batches):\n",
    "    count += y_pred[i]==rel_ids[i]\n",
    "accuracy = count/(batch_size*num_batches) * 100\n",
    "\n",
    "print(\"test accuracy\", accuracy)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.5.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
