{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#  1. Prepare data\n",
    "* read from redis\n",
    "* parse the title and abstract\n",
    "* calculate the term frequency and document frequency\n",
    "* build character 2 index dict and index to character index"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "import redis\n",
    "import json\n",
    "import h5py\n",
    "import pickle\n",
    "import numpy as np\n",
    "import random\n",
    "import tensorflow as tf\n",
    "from tensorflow.contrib import rnn\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class Char:\n",
    "    def __init__(self,val,tf,df):\n",
    "        self.val = val\n",
    "        self.tf = tf\n",
    "        self.df = df"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "def parse_all_crawled_data():\n",
    "    res = []\n",
    "    keys =  r.keys()\n",
    "    print \"Get [%s] Docs\"%(len(keys))\n",
    "    for data in r.mget(keys):\n",
    "        data = json.loads(data)\n",
    "        key = data.get(\"group_id\")\n",
    "        title = data.get(\"title\").replace('\\t',' ')\n",
    "        abstract = data.get(\"abstract\").replace('\\t',' ')\n",
    "        res.append((key,title,abstract))\n",
    "    return res    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "def cal_char_tf_df(corpus):\n",
    "    chars = {}\n",
    "    for doc in corpus:    \n",
    "        title, abstract = doc[1],doc[2]\n",
    "        # traverse every char in the text\n",
    "        text = (title + abstract).lower()\n",
    "        # acumulate the term frequency\n",
    "        for char in text:\n",
    "            if not chars.get(char):\n",
    "                chars[char] = Char(val = char,tf = 1,df = 0)\n",
    "            else:\n",
    "                chars[char].tf += 1\n",
    "        # acummulate the doc frequency\n",
    "        for char in set(text):\n",
    "            chars[char].df += 1\n",
    "    return chars"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "def build_idx_for_chars_tf_df(chars,tf_thres = 12,df_thres = 6):\n",
    "    id_beg = 0\n",
    "    id_eos = 1\n",
    "    id_emp = 2\n",
    "    id_unk = 3\n",
    "    \n",
    "    start_idx = id_unk + 1\n",
    "\n",
    "    char2idx = {}\n",
    "    idx2char = {}\n",
    "\n",
    "    char2idx['<eos>'] = id_eos\n",
    "    char2idx['<unk>'] = id_unk\n",
    "    char2idx['<emp>'] = id_emp\n",
    "    char2idx['<beg>'] = id_beg\n",
    "    #filter out tf>20 and df > 10 terms\n",
    "    chars = filter(lambda char:char.tf > tf_thres and char.df > df_thres,chars)\n",
    "    char2idx.update(dict([(char.val,start_idx + idx) for idx,char in enumerate(chars)]))\n",
    "    idx2char = dict([(idx,char) for char,idx in char2idx.items()])\n",
    "    return char2idx, idx2char\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def prt(label, x):\n",
    "    print label+':',\n",
    "    for w in x:\n",
    "        if w == id_emp:\n",
    "            continue\n",
    "        print idx2char[w],\n",
    "    print"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "1. Prepare Data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Get [26658] Docs\n",
      "Got [5692] Uniq charaters\n",
      "the Top 10 are:\n",
      "，\t156756\t25825\n",
      "的\t103125\t24579\n",
      "。\t52691\t21783\n",
      "一\t44808\t19812\n",
      "是\t44700\t18907\n",
      "了\t34809\t16933\n",
      "不\t32371\t16205\n",
      "人\t27265\t13617\n",
      "有\t27182\t15050\n",
      "在\t25308\t15057\n",
      "vocabsize is :[3525]\n",
      "H: 手 机 曲 面 屏 到 底 有 什 么 用 ？\n",
      "D: 对 于 手 机 的 曲 面 屏 ， 其 实 并 不 是 一 定 要 有 什 么 作 用 ， 首 先 它 和 现 在 大 部 分 同 质 化 手 机 完 全 不 同 ， 而 且 带 来 了 更 好 的 手 感 和 视 觉 效 果 ， 现 在 的 曲 面 屏 都 是 高 端 手 机 代 表 ， 因 为 它 对 工 艺 技 术 的 有 更 高 的 要 求 ， 所 以 并 不 是 随 便 一 家 手 机 厂 商 都 可 以 做 曲 面 屏 的 。\n",
      "1000\n",
      "25000\n"
     ]
    }
   ],
   "source": [
    "id_beg = 0\n",
    "id_eos = 1\n",
    "id_emp = 2\n",
    "id_unk = 3\n",
    "\n",
    "total_samples = 26000\n",
    "val_samples = 1000\n",
    "train_samples = total_samples - val_samples\n",
    "\n",
    "\n",
    "DataFile = \"data/basic_data_tf.pkl\"\n",
    "UseStoredData = False\n",
    "\n",
    "if UseStoredData:\n",
    "    print \"use the stored data\"\n",
    "    char2idx, idx2char,X_train, X_test, Y_train, Y_test = pickle.load(open(DataFile))\n",
    "else:\n",
    "    r = redis.StrictRedis(host='localhost', port=6379, db=0)\n",
    "    corpus = parse_all_crawled_data()\n",
    "    chars_dict = cal_char_tf_df(corpus)\n",
    "\n",
    "    print \"Got [%s] Uniq charaters\"%len(chars_dict)\n",
    "    chars_tf_reverse = sorted(chars_dict.values(),key = lambda x:x.tf,reverse = True)\n",
    "    print \"the Top 10 are:\"\n",
    "    print \"\\n\".join([\"%s\\t%s\\t%s\" %(char.val,char.tf,char.df) for char in chars_tf_reverse[:10]])\n",
    "\n",
    "    char2idx, idx2char = build_idx_for_chars_tf_df(chars_dict.values())\n",
    "    titles = [[char2idx.get(char,id_unk) for char in doc[1]] for doc in corpus][:total_samples]\n",
    "    abstracts = [[char2idx.get(char,id_unk) for char in doc[2]] for doc in corpus][:total_samples]\n",
    "        \n",
    "    from sklearn.model_selection import train_test_split\n",
    "    X_train, X_test, Y_train, Y_test = train_test_split(abstracts, titles, test_size=val_samples, random_state=10)\n",
    "    len(X_train), len(Y_train), len(X_test), len(Y_test)\n",
    "\n",
    "    pickle.dump((char2idx, idx2char,X_train, X_test, Y_train, Y_test),open(DataFile,\"wb\"),-1)\n",
    "    \n",
    "\n",
    "\n",
    "vocab_size = len(char2idx)\n",
    "print \"vocabsize is :[%d]\"%vocab_size\n",
    "i = random.randint(0,len(X_train))\n",
    "prt('H',Y_train[i])\n",
    "prt('D',X_train[i])\n",
    "\n",
    "print len(X_test)\n",
    "print len(X_train)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 2. Model"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 1. parameters"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "learning_rate = 0.001\n",
    "batch_size = 32\n",
    "display_step = 10\n",
    "dropout_keep_prob = 1.0\n",
    "\n",
    "\n",
    "maxlena=150 # 0 - if we dont want to use description at all\n",
    "maxlent=40\n",
    "maxlen = maxlena + maxlent\n",
    "maxlenh = maxlent\n",
    "maxlend = maxlena\n",
    "\n",
    "vocab_size = len(char2idx)\n",
    "embedding_size = 100\n",
    "\n",
    "empty = id_emp\n",
    "eos = id_eos\n",
    "unk = id_unk\n",
    "beg = id_beg\n",
    "\n",
    "\n",
    "# for cnn encoder use\n",
    "filter_sizes = [2,3,4,5,6,8,10,13]\n",
    "num_filters = 16\n",
    "\n",
    "# for rnn deocoder use ,GRU cell memory size. same as encoder state\n",
    "memory_dim = 128"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "encoder_inputs = tf.placeholder(tf.int32, shape=[None,maxlend], name='encoder_inputs')\n",
    "decoder_targets = tf.placeholder(tf.int32,shape=(None, maxlenh), name='decoder_targets')\n",
    "decoder_inputs = tf.placeholder(tf.int32, [None, maxlenh], name = \"decoder_inputs\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "embeddings = tf.Variable(\n",
    "    tf.random_uniform([vocab_size, embedding_size], -1.0, 1.0))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "# cnn as encode\n",
    "def CNNEncoder(encoder_inputs):\n",
    "    #train_labels = tf.placeholder(tf.int32, shape=[batch_size, 1])\n",
    "    encoder_inputs_embedded = tf.nn.embedding_lookup(embeddings, encoder_inputs)\n",
    "    # to expand one dim for CNN\n",
    "    embed_expanded = tf.expand_dims(encoder_inputs_embedded,-1)\n",
    "\n",
    "    pooled_outputs = []\n",
    "    for i, filter_size in enumerate(filter_sizes):\n",
    "        with tf.name_scope(\"conv-maxpool-%s\" % filter_size):\n",
    "            # Convolution Layer\n",
    "            filter_shape = [filter_size, embedding_size, 1, num_filters]\n",
    "            W = tf.Variable(tf.truncated_normal(filter_shape, stddev=0.1), name=\"W\")\n",
    "            b = tf.Variable(tf.constant(0.1, shape=[num_filters]), name=\"b\")\n",
    "            conv = tf.nn.conv2d(\n",
    "                embed_expanded,\n",
    "                W,  \n",
    "                strides=[1, 1, 1, 1], \n",
    "                padding=\"VALID\",\n",
    "                name=\"conv\")\n",
    "            # Apply nonlinearity\n",
    "            h = tf.nn.relu(tf.nn.bias_add(conv, b), name=\"relu\")\n",
    "            #print h.shape\n",
    "            # Max-pooling over the outputs\n",
    "            pooled = tf.nn.max_pool(\n",
    "                h,  \n",
    "                ksize=[1, maxlend - filter_size + 1, 1, 1], \n",
    "                strides=[1, 1, 1, 1], \n",
    "                padding='VALID',\n",
    "                name=\"pool\")          \n",
    "            pooled_outputs.append(pooled)\n",
    "    # Combine all the pooled features\n",
    "    num_filters_total = num_filters * len(filter_sizes)\n",
    "    h_pool = tf.concat(pooled_outputs,3)\n",
    "    #print h_pool.shape\n",
    "    h_pool_flat = tf.reshape(h_pool, [-1, num_filters_total])\n",
    "    #print h_pool_flat.shape\n",
    "\n",
    "    with tf.name_scope(\"dropout\"):\n",
    "        h_drop = tf.nn.dropout(h_pool_flat, dropout_keep_prob)\n",
    "    return h_drop"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    " RNN AS Decoder"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "def RNNDecoder(encoder_state,decoder_inputs):\n",
    "    decoder_inputs_embedded = tf.nn.embedding_lookup(embeddings, decoder_inputs)\n",
    "    #from tensorflow.models.rnn import rnn_cell, seq2seq\n",
    "    cell = rnn.GRUCell(memory_dim)\n",
    "    decoder_outputs, decoder_final_state = tf.nn.dynamic_rnn(\n",
    "        cell, decoder_inputs_embedded,\n",
    "        initial_state=encoder_state,\n",
    "        dtype=tf.float32,scope=\"plain_decoder1\")\n",
    "    return decoder_outputs, decoder_final_state \n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "ename": "ValueError",
     "evalue": "Variable plain_decoder1/gru_cell/gates/weights already exists, disallowed. Did you mean to set reuse=True in VarScope? Originally defined at:\n\n  File \"/home/hewei/.local/lib/python2.7/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.py\", line 1044, in _linear\n    _WEIGHTS_VARIABLE_NAME, [total_arg_size, output_size], dtype=dtype)\n  File \"/home/hewei/.local/lib/python2.7/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.py\", line 150, in __call__\n    [inputs, state], 2 * self._num_units, True, 1.0))\n  File \"<ipython-input-14-d003c7c96826>\", line 8, in RNNDecoder\n    dtype=tf.float32,scope=\"plain_decoder1\")\n",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m\u001b[0m",
      "\u001b[0;31mValueError\u001b[0mTraceback (most recent call last)",
      "\u001b[0;32m<ipython-input-32-bd4f53fa0c1c>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[0mencoder_state\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mCNNEncoder\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mencoder_inputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mdecoder_outputs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mRNNDecoder\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mencoder_state\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mdecoder_inputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      3\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      4\u001b[0m \u001b[0mdecoder_logits\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcontrib\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayers\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlinear\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdecoder_outputs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvocab_size\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      5\u001b[0m \u001b[0mlabels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mone_hot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdecoder_targets\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdepth\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mvocab_size\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdtype\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfloat32\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m<ipython-input-14-d003c7c96826>\u001b[0m in \u001b[0;36mRNNDecoder\u001b[0;34m(encoder_state, decoder_inputs)\u001b[0m\n\u001b[1;32m      6\u001b[0m         \u001b[0mcell\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdecoder_inputs_embedded\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      7\u001b[0m         \u001b[0minitial_state\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mencoder_state\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 8\u001b[0;31m         dtype=tf.float32,scope=\"plain_decoder1\")\n\u001b[0m\u001b[1;32m      9\u001b[0m     \u001b[0;32mreturn\u001b[0m \u001b[0mdecoder_outputs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdecoder_final_state\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/ops/rnn.pyc\u001b[0m in \u001b[0;36mdynamic_rnn\u001b[0;34m(cell, inputs, sequence_length, initial_state, dtype, parallel_iterations, swap_memory, time_major, scope)\u001b[0m\n\u001b[1;32m    551\u001b[0m         \u001b[0mswap_memory\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mswap_memory\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    552\u001b[0m         \u001b[0msequence_length\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msequence_length\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 553\u001b[0;31m         dtype=dtype)\n\u001b[0m\u001b[1;32m    554\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    555\u001b[0m     \u001b[0;31m# Outputs of _dynamic_rnn_loop are always shaped [time, batch, depth].\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/ops/rnn.pyc\u001b[0m in \u001b[0;36m_dynamic_rnn_loop\u001b[0;34m(cell, inputs, initial_state, parallel_iterations, swap_memory, sequence_length, dtype)\u001b[0m\n\u001b[1;32m    718\u001b[0m       \u001b[0mloop_vars\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtime\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moutput_ta\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstate\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    719\u001b[0m       \u001b[0mparallel_iterations\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mparallel_iterations\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 720\u001b[0;31m       swap_memory=swap_memory)\n\u001b[0m\u001b[1;32m    721\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    722\u001b[0m   \u001b[0;31m# Unpack final output if not using output tuples.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/ops/control_flow_ops.pyc\u001b[0m in \u001b[0;36mwhile_loop\u001b[0;34m(cond, body, loop_vars, shape_invariants, parallel_iterations, back_prop, swap_memory, name)\u001b[0m\n\u001b[1;32m   2621\u001b[0m     \u001b[0mcontext\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mWhileContext\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mparallel_iterations\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mback_prop\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mswap_memory\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   2622\u001b[0m     \u001b[0mops\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd_to_collection\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mops\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mGraphKeys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mWHILE_CONTEXT\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcontext\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2623\u001b[0;31m     \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcontext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mBuildLoop\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcond\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbody\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mloop_vars\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mshape_invariants\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   2624\u001b[0m     \u001b[0;32mreturn\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   2625\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/ops/control_flow_ops.pyc\u001b[0m in \u001b[0;36mBuildLoop\u001b[0;34m(self, pred, body, loop_vars, shape_invariants)\u001b[0m\n\u001b[1;32m   2454\u001b[0m       \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mEnter\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   2455\u001b[0m       original_body_result, exit_vars = self._BuildLoop(\n\u001b[0;32m-> 2456\u001b[0;31m           pred, body, original_loop_vars, loop_vars, shape_invariants)\n\u001b[0m\u001b[1;32m   2457\u001b[0m     \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   2458\u001b[0m       \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mExit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/ops/control_flow_ops.pyc\u001b[0m in \u001b[0;36m_BuildLoop\u001b[0;34m(self, pred, body, original_loop_vars, loop_vars, shape_invariants)\u001b[0m\n\u001b[1;32m   2404\u001b[0m         \u001b[0mstructure\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0moriginal_loop_vars\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   2405\u001b[0m         flat_sequence=vars_for_body_with_tensor_arrays)\n\u001b[0;32m-> 2406\u001b[0;31m     \u001b[0mbody_result\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbody\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mpacked_vars_for_body\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   2407\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mnest\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mis_sequence\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbody_result\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   2408\u001b[0m       \u001b[0mbody_result\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mbody_result\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/ops/rnn.pyc\u001b[0m in \u001b[0;36m_time_step\u001b[0;34m(time, output_ta_t, state)\u001b[0m\n\u001b[1;32m    703\u001b[0m           skip_conditionals=True)\n\u001b[1;32m    704\u001b[0m     \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 705\u001b[0;31m       \u001b[0;34m(\u001b[0m\u001b[0moutput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnew_state\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcall_cell\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    706\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    707\u001b[0m     \u001b[0;31m# Pack state if using state tuples\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/ops/rnn.pyc\u001b[0m in \u001b[0;36m<lambda>\u001b[0;34m()\u001b[0m\n\u001b[1;32m    689\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    690\u001b[0m     \u001b[0minput_t\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnest\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpack_sequence_as\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstructure\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0minputs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mflat_sequence\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0minput_t\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 691\u001b[0;31m     \u001b[0mcall_cell\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mlambda\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mcell\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput_t\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstate\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    692\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    693\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0msequence_length\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.pyc\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, inputs, state, scope)\u001b[0m\n\u001b[1;32m    148\u001b[0m         \u001b[0;31m# We start with bias of 1.0 to not reset and not update.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    149\u001b[0m         value = sigmoid(_linear(\n\u001b[0;32m--> 150\u001b[0;31m           [inputs, state], 2 * self._num_units, True, 1.0))\n\u001b[0m\u001b[1;32m    151\u001b[0m         r, u = array_ops.split(\n\u001b[1;32m    152\u001b[0m             \u001b[0mvalue\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.pyc\u001b[0m in \u001b[0;36m_linear\u001b[0;34m(args, output_size, bias, bias_start)\u001b[0m\n\u001b[1;32m   1042\u001b[0m   \u001b[0;32mwith\u001b[0m \u001b[0mvs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvariable_scope\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mscope\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mouter_scope\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1043\u001b[0m     weights = vs.get_variable(\n\u001b[0;32m-> 1044\u001b[0;31m         _WEIGHTS_VARIABLE_NAME, [total_arg_size, output_size], dtype=dtype)\n\u001b[0m\u001b[1;32m   1045\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1046\u001b[0m       \u001b[0mres\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmath_ops\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmatmul\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mweights\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/ops/variable_scope.pyc\u001b[0m in \u001b[0;36mget_variable\u001b[0;34m(name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter)\u001b[0m\n\u001b[1;32m   1047\u001b[0m       \u001b[0mcollections\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcollections\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcaching_device\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcaching_device\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1048\u001b[0m       \u001b[0mpartitioner\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mpartitioner\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalidate_shape\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mvalidate_shape\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1049\u001b[0;31m       use_resource=use_resource, custom_getter=custom_getter)\n\u001b[0m\u001b[1;32m   1050\u001b[0m get_variable_or_local_docstring = (\n\u001b[1;32m   1051\u001b[0m     \"\"\"%s\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/ops/variable_scope.pyc\u001b[0m in \u001b[0;36mget_variable\u001b[0;34m(self, var_store, name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter)\u001b[0m\n\u001b[1;32m    946\u001b[0m           \u001b[0mcollections\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcollections\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcaching_device\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcaching_device\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    947\u001b[0m           \u001b[0mpartitioner\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mpartitioner\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalidate_shape\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mvalidate_shape\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 948\u001b[0;31m           use_resource=use_resource, custom_getter=custom_getter)\n\u001b[0m\u001b[1;32m    949\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    950\u001b[0m   def _get_partitioned_variable(self,\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/ops/variable_scope.pyc\u001b[0m in \u001b[0;36mget_variable\u001b[0;34m(self, name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter)\u001b[0m\n\u001b[1;32m    354\u001b[0m           \u001b[0mreuse\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mreuse\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrainable\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtrainable\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcollections\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcollections\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    355\u001b[0m           \u001b[0mcaching_device\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcaching_device\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpartitioner\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mpartitioner\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 356\u001b[0;31m           validate_shape=validate_shape, use_resource=use_resource)\n\u001b[0m\u001b[1;32m    357\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    358\u001b[0m   def _get_partitioned_variable(\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/ops/variable_scope.pyc\u001b[0m in \u001b[0;36m_true_getter\u001b[0;34m(name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource)\u001b[0m\n\u001b[1;32m    339\u001b[0m           \u001b[0mtrainable\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtrainable\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcollections\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcollections\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    340\u001b[0m           \u001b[0mcaching_device\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcaching_device\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalidate_shape\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mvalidate_shape\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 341\u001b[0;31m           use_resource=use_resource)\n\u001b[0m\u001b[1;32m    342\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    343\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mcustom_getter\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/ops/variable_scope.pyc\u001b[0m in \u001b[0;36m_get_single_variable\u001b[0;34m(self, name, shape, dtype, initializer, regularizer, partition_info, reuse, trainable, collections, caching_device, validate_shape, use_resource)\u001b[0m\n\u001b[1;32m    651\u001b[0m                          \u001b[0;34m\" Did you mean to set reuse=True in VarScope? \"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    652\u001b[0m                          \"Originally defined at:\\n\\n%s\" % (\n\u001b[0;32m--> 653\u001b[0;31m                              name, \"\".join(traceback.format_list(tb))))\n\u001b[0m\u001b[1;32m    654\u001b[0m       \u001b[0mfound_var\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_vars\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    655\u001b[0m       \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mshape\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mis_compatible_with\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfound_var\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_shape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mValueError\u001b[0m: Variable plain_decoder1/gru_cell/gates/weights already exists, disallowed. Did you mean to set reuse=True in VarScope? Originally defined at:\n\n  File \"/home/hewei/.local/lib/python2.7/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.py\", line 1044, in _linear\n    _WEIGHTS_VARIABLE_NAME, [total_arg_size, output_size], dtype=dtype)\n  File \"/home/hewei/.local/lib/python2.7/site-packages/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.py\", line 150, in __call__\n    [inputs, state], 2 * self._num_units, True, 1.0))\n  File \"<ipython-input-14-d003c7c96826>\", line 8, in RNNDecoder\n    dtype=tf.float32,scope=\"plain_decoder1\")\n"
     ]
    }
   ],
   "source": [
    "encoder_state = CNNEncoder(encoder_inputs)\n",
    "decoder_outputs, _ = RNNDecoder(encoder_state,decoder_inputs)\n",
    "\n",
    "decoder_logits = tf.contrib.layers.linear(decoder_outputs, vocab_size)\n",
    "labels = tf.one_hot(decoder_targets, depth=vocab_size, dtype=tf.float32)\n",
    "stepwise_cross_entropy = tf.nn.softmax_cross_entropy_with_logits(\n",
    "    labels = labels,\n",
    "    logits=decoder_logits,\n",
    ")\n",
    "\n",
    "loss = tf.reduce_mean(stepwise_cross_entropy)\n",
    "\n",
    "decoder_prediction = tf.argmax(decoder_logits, 2)\n",
    "\n",
    "train_op = tf.train.AdamOptimizer(learning_rate=0.001).minimize(loss)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "labels_ = tf.argmax(labels,2)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 3. Training:"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def rpadd(x, maxlen=maxlenh, eos=eos,lpad=True,prefix=None):\n",
    "    assert maxlen >= 0\n",
    "    \n",
    "    if prefix != None:\n",
    "        x = [prefix] + x\n",
    "    n = len(x)\n",
    "    if n > maxlen - 1 :\n",
    "        x = x[:maxlen - 1]\n",
    "        n = maxlen - 1\n",
    "    res = x + [eos] + [empty] * (maxlen - n - 1) \n",
    "    assert len(res) == maxlen\n",
    "    return res"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "sess = tf.InteractiveSession()\n",
    "sess.run(tf.global_variables_initializer())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Runing in EPOC[0] Batch [10]\n",
      "loss 2.55794\n",
      "Runing in EPOC[0] Batch [20]\n",
      "loss 2.51331\n",
      "Runing in EPOC[0] Batch [30]\n",
      "loss 2.6242\n",
      "Runing in EPOC[0] Batch [40]\n",
      "loss 2.57741\n",
      "Runing in EPOC[0] Batch [50]\n",
      "loss 2.71939\n",
      "Runing in EPOC[0] Batch [60]\n",
      "loss 2.76793\n",
      "Runing in EPOC[0] Batch [70]\n",
      "loss 2.66793\n",
      "Runing in EPOC[0] Batch [80]\n",
      "loss 2.76089\n",
      "Runing in EPOC[0] Batch [90]\n",
      "loss 2.82169\n",
      "Runing in EPOC[0] Batch [100]\n",
      "loss 2.75839\n",
      "Runing in EPOC[0] Batch [110]\n",
      "loss 2.83365\n",
      "Runing in EPOC[0] Batch [120]\n",
      "loss 2.7559\n",
      "Runing in EPOC[0] Batch [130]\n",
      "loss 2.89953\n",
      "Runing in EPOC[0] Batch [140]\n",
      "loss 2.67833\n",
      "Runing in EPOC[0] Batch [150]\n",
      "loss 2.74658\n",
      "Runing in EPOC[0] Batch [160]\n",
      "loss 2.58725\n",
      "Runing in EPOC[0] Batch [170]\n",
      "loss 2.64656\n",
      "Runing in EPOC[0] Batch [180]\n",
      "loss 2.67401\n",
      "Runing in EPOC[0] Batch [190]\n",
      "loss 2.75071\n",
      "Runing in EPOC[1] Batch [10]\n",
      "loss 2.53219\n",
      "Runing in EPOC[1] Batch [20]\n",
      "loss 2.49097\n",
      "Runing in EPOC[1] Batch [30]\n",
      "loss 2.60765\n",
      "Runing in EPOC[1] Batch [40]\n",
      "loss 2.55826\n",
      "Runing in EPOC[1] Batch [50]\n",
      "loss 2.67064\n",
      "Runing in EPOC[1] Batch [60]\n",
      "loss 2.72047\n",
      "Runing in EPOC[1] Batch [70]\n",
      "loss 2.62489\n",
      "Runing in EPOC[1] Batch [80]\n",
      "loss 2.71597\n",
      "Runing in EPOC[1] Batch [90]\n",
      "loss 2.78098\n",
      "Runing in EPOC[1] Batch [100]\n",
      "loss 2.71815\n",
      "Runing in EPOC[1] Batch [110]\n",
      "loss 2.79154\n",
      "Runing in EPOC[1] Batch [120]\n",
      "loss 2.71693\n",
      "Runing in EPOC[1] Batch [130]\n",
      "loss 2.85554\n",
      "Runing in EPOC[1] Batch [140]\n",
      "loss 2.63407\n",
      "Runing in EPOC[1] Batch [150]\n",
      "loss 2.7079\n",
      "Runing in EPOC[1] Batch [160]\n",
      "loss 2.54971\n",
      "Runing in EPOC[1] Batch [170]\n",
      "loss 2.61186\n",
      "Runing in EPOC[1] Batch [180]\n",
      "loss 2.63493\n",
      "Runing in EPOC[1] Batch [190]\n",
      "loss 2.71497\n",
      "Runing in EPOC[2] Batch [10]\n",
      "loss 2.50089\n",
      "Runing in EPOC[2] Batch [20]\n",
      "loss 2.46388\n",
      "Runing in EPOC[2] Batch [30]\n",
      "loss 2.58075\n",
      "Runing in EPOC[2] Batch [40]\n",
      "loss 2.52983\n",
      "Runing in EPOC[2] Batch [50]\n",
      "loss 2.63292\n",
      "Runing in EPOC[2] Batch [60]\n",
      "loss 2.67747\n",
      "Runing in EPOC[2] Batch [70]\n",
      "loss 2.59074\n",
      "Runing in EPOC[2] Batch [80]\n",
      "loss 2.68275\n",
      "Runing in EPOC[2] Batch [90]\n",
      "loss 2.74498\n",
      "Runing in EPOC[2] Batch [100]\n",
      "loss 2.68051\n",
      "Runing in EPOC[2] Batch [110]\n",
      "loss 2.75894\n",
      "Runing in EPOC[2] Batch [120]\n",
      "loss 2.68072\n",
      "Runing in EPOC[2] Batch [130]\n",
      "loss 2.81696\n",
      "Runing in EPOC[2] Batch [140]\n",
      "loss 2.59898\n",
      "Runing in EPOC[2] Batch [150]\n",
      "loss 2.67433\n",
      "Runing in EPOC[2] Batch [160]\n",
      "loss 2.51877\n",
      "Runing in EPOC[2] Batch [170]\n",
      "loss 2.58557\n",
      "Runing in EPOC[2] Batch [180]\n",
      "loss 2.60235\n",
      "Runing in EPOC[2] Batch [190]\n",
      "loss 2.68541\n",
      "Runing in EPOC[3] Batch [10]\n",
      "loss 2.47417\n",
      "Runing in EPOC[3] Batch [20]\n",
      "loss 2.43589\n",
      "Runing in EPOC[3] Batch [30]\n",
      "loss 2.55375\n",
      "Runing in EPOC[3] Batch [40]\n",
      "loss 2.50348\n",
      "Runing in EPOC[3] Batch [50]\n",
      "loss 2.59817\n",
      "Runing in EPOC[3] Batch [60]\n",
      "loss 2.64065\n",
      "Runing in EPOC[3] Batch [70]\n",
      "loss 2.56573\n",
      "Runing in EPOC[3] Batch [80]\n",
      "loss 2.65149\n",
      "Runing in EPOC[3] Batch [90]\n",
      "loss 2.71333\n",
      "Runing in EPOC[3] Batch [100]\n",
      "loss 2.64406\n",
      "Runing in EPOC[3] Batch [110]\n",
      "loss 2.73298\n",
      "Runing in EPOC[3] Batch [120]\n",
      "loss 2.64969\n",
      "Runing in EPOC[3] Batch [130]\n",
      "loss 2.7847\n",
      "Runing in EPOC[3] Batch [140]\n",
      "loss 2.56708\n",
      "Runing in EPOC[3] Batch [150]\n",
      "loss 2.64599\n",
      "Runing in EPOC[3] Batch [160]\n",
      "loss 2.49196\n",
      "Runing in EPOC[3] Batch [170]\n",
      "loss 2.55463\n",
      "Runing in EPOC[3] Batch [180]\n",
      "loss 2.56793\n",
      "Runing in EPOC[3] Batch [190]\n",
      "loss 2.65489\n",
      "Runing in EPOC[4] Batch [10]\n",
      "loss 2.44789\n",
      "Runing in EPOC[4] Batch [20]\n",
      "loss 2.41169\n",
      "Runing in EPOC[4] Batch [30]\n",
      "loss 2.5311\n",
      "Runing in EPOC[4] Batch [40]\n",
      "loss 2.48874\n",
      "Runing in EPOC[4] Batch [50]\n",
      "loss 2.57428\n",
      "Runing in EPOC[4] Batch [60]\n",
      "loss 2.61062\n",
      "Runing in EPOC[4] Batch [70]\n",
      "loss 2.53652\n",
      "Runing in EPOC[4] Batch [80]\n",
      "loss 2.62108\n",
      "Runing in EPOC[4] Batch [90]\n",
      "loss 2.68922\n",
      "Runing in EPOC[4] Batch [100]\n",
      "loss 2.61585\n",
      "Runing in EPOC[4] Batch [110]\n",
      "loss 2.70348\n",
      "Runing in EPOC[4] Batch [120]\n",
      "loss 2.62344\n",
      "Runing in EPOC[4] Batch [130]\n",
      "loss 2.75725\n",
      "Runing in EPOC[4] Batch [140]\n",
      "loss 2.53657\n",
      "Runing in EPOC[4] Batch [150]\n",
      "loss 2.62056\n",
      "Runing in EPOC[4] Batch [160]\n",
      "loss 2.46678\n",
      "Runing in EPOC[4] Batch [170]\n",
      "loss 2.52423\n",
      "Runing in EPOC[4] Batch [180]\n",
      "loss 2.54186\n",
      "Runing in EPOC[4] Batch [190]\n",
      "loss 2.62706\n",
      "Runing in EPOC[5] Batch [10]\n",
      "loss 2.42474\n",
      "Runing in EPOC[5] Batch [20]\n",
      "loss 2.39027\n",
      "Runing in EPOC[5] Batch [30]\n",
      "loss 2.50637\n",
      "Runing in EPOC[5] Batch [40]\n",
      "loss 2.46151\n",
      "Runing in EPOC[5] Batch [50]\n",
      "loss 2.54593\n",
      "Runing in EPOC[5] Batch [60]\n",
      "loss 2.57884\n",
      "Runing in EPOC[5] Batch [70]\n",
      "loss 2.51155\n",
      "Runing in EPOC[5] Batch [80]\n",
      "loss 2.59383\n",
      "Runing in EPOC[5] Batch [90]\n",
      "loss 2.66311\n",
      "Runing in EPOC[5] Batch [100]\n",
      "loss 2.59186\n",
      "Runing in EPOC[5] Batch [110]\n",
      "loss 2.68071\n",
      "Runing in EPOC[5] Batch [120]\n",
      "loss 2.59571\n",
      "Runing in EPOC[5] Batch [130]\n",
      "loss 2.72845\n",
      "Runing in EPOC[5] Batch [140]\n",
      "loss 2.51128\n",
      "Runing in EPOC[5] Batch [150]\n",
      "loss 2.59611\n",
      "Runing in EPOC[5] Batch [160]\n",
      "loss 2.44322\n",
      "Runing in EPOC[5] Batch [170]\n",
      "loss 2.50413\n",
      "Runing in EPOC[5] Batch [180]\n",
      "loss 2.51669\n",
      "Runing in EPOC[5] Batch [190]\n",
      "loss 2.60145\n",
      "Runing in EPOC[6] Batch [10]\n",
      "loss 2.40477\n",
      "Runing in EPOC[6] Batch [20]\n",
      "loss 2.37145\n",
      "Runing in EPOC[6] Batch [30]\n",
      "loss 2.48525\n",
      "Runing in EPOC[6] Batch [40]\n",
      "loss 2.43678\n",
      "Runing in EPOC[6] Batch [50]\n",
      "loss 2.51946\n",
      "Runing in EPOC[6] Batch [60]\n",
      "loss 2.55066\n",
      "Runing in EPOC[6] Batch [70]\n",
      "loss 2.49178\n",
      "Runing in EPOC[6] Batch [80]\n",
      "loss 2.56726\n",
      "Runing in EPOC[6] Batch [90]\n",
      "loss 2.63517\n",
      "Runing in EPOC[6] Batch [100]\n",
      "loss 2.57083\n",
      "Runing in EPOC[6] Batch [110]\n",
      "loss 2.66161\n",
      "Runing in EPOC[6] Batch [120]\n",
      "loss 2.57359\n",
      "Runing in EPOC[6] Batch [130]\n",
      "loss 2.70136\n",
      "Runing in EPOC[6] Batch [140]\n",
      "loss 2.48792\n",
      "Runing in EPOC[6] Batch [150]\n",
      "loss 2.56939\n",
      "Runing in EPOC[6] Batch [160]\n",
      "loss 2.42516\n",
      "Runing in EPOC[6] Batch [170]\n",
      "loss 2.48324\n",
      "Runing in EPOC[6] Batch [180]\n",
      "loss 2.49166\n",
      "Runing in EPOC[6] Batch [190]\n",
      "loss 2.58775\n",
      "Runing in EPOC[7] Batch [10]\n",
      "loss 2.38334\n",
      "Runing in EPOC[7] Batch [20]\n",
      "loss 2.35357\n",
      "Runing in EPOC[7] Batch [30]\n",
      "loss 2.46614\n",
      "Runing in EPOC[7] Batch [40]\n",
      "loss 2.41769\n",
      "Runing in EPOC[7] Batch [50]\n",
      "loss 2.49878\n",
      "Runing in EPOC[7] Batch [60]\n",
      "loss 2.52891\n",
      "Runing in EPOC[7] Batch [70]\n",
      "loss 2.47288\n",
      "Runing in EPOC[7] Batch [80]\n",
      "loss 2.54647\n",
      "Runing in EPOC[7] Batch [90]\n",
      "loss 2.61108\n",
      "Runing in EPOC[7] Batch [100]\n",
      "loss 2.55275\n",
      "Runing in EPOC[7] Batch [110]\n",
      "loss 2.64002\n",
      "Runing in EPOC[7] Batch [120]\n",
      "loss 2.55784\n",
      "Runing in EPOC[7] Batch [130]\n",
      "loss 2.67644\n",
      "Runing in EPOC[7] Batch [140]\n",
      "loss 2.46737\n",
      "Runing in EPOC[7] Batch [150]\n",
      "loss 2.54688\n",
      "Runing in EPOC[7] Batch [160]\n",
      "loss 2.40984\n",
      "Runing in EPOC[7] Batch [170]\n",
      "loss 2.46267\n",
      "Runing in EPOC[7] Batch [180]\n",
      "loss 2.47395\n",
      "Runing in EPOC[7] Batch [190]\n",
      "loss 2.56735\n",
      "Runing in EPOC[8] Batch [10]\n",
      "loss 2.3663\n",
      "Runing in EPOC[8] Batch [20]\n",
      "loss 2.33818\n",
      "Runing in EPOC[8] Batch [30]\n",
      "loss 2.44215\n",
      "Runing in EPOC[8] Batch [40]\n",
      "loss 2.40061\n",
      "Runing in EPOC[8] Batch [50]\n",
      "loss 2.47835\n",
      "Runing in EPOC[8] Batch [60]\n",
      "loss 2.51009\n",
      "Runing in EPOC[8] Batch [70]\n",
      "loss 2.45465\n",
      "Runing in EPOC[8] Batch [80]\n",
      "loss 2.52825\n",
      "Runing in EPOC[8] Batch [90]\n",
      "loss 2.58712\n",
      "Runing in EPOC[8] Batch [100]\n",
      "loss 2.53158\n",
      "Runing in EPOC[8] Batch [110]\n",
      "loss 2.61961\n",
      "Runing in EPOC[8] Batch [120]\n",
      "loss 2.54059\n",
      "Runing in EPOC[8] Batch [130]\n",
      "loss 2.6563\n",
      "Runing in EPOC[8] Batch [140]\n",
      "loss 2.44249\n",
      "Runing in EPOC[8] Batch [150]\n",
      "loss 2.52939\n",
      "Runing in EPOC[8] Batch [160]\n",
      "loss 2.38667\n",
      "Runing in EPOC[8] Batch [170]\n",
      "loss 2.44492\n",
      "Runing in EPOC[8] Batch [180]\n",
      "loss 2.45621\n",
      "Runing in EPOC[8] Batch [190]\n",
      "loss 2.5372\n",
      "Runing in EPOC[9] Batch [10]\n",
      "loss 2.35271\n",
      "Runing in EPOC[9] Batch [20]\n",
      "loss 2.31537\n",
      "Runing in EPOC[9] Batch [30]\n",
      "loss 2.42347\n",
      "Runing in EPOC[9] Batch [40]\n",
      "loss 2.38402\n",
      "Runing in EPOC[9] Batch [50]\n",
      "loss 2.45686\n",
      "Runing in EPOC[9] Batch [60]\n",
      "loss 2.48824\n",
      "Runing in EPOC[9] Batch [70]\n",
      "loss 2.44025\n",
      "Runing in EPOC[9] Batch [80]\n",
      "loss 2.50535\n",
      "Runing in EPOC[9] Batch [90]\n",
      "loss 2.56631\n",
      "Runing in EPOC[9] Batch [100]\n",
      "loss 2.51256\n",
      "Runing in EPOC[9] Batch [110]\n",
      "loss 2.60329\n",
      "Runing in EPOC[9] Batch [120]\n",
      "loss 2.51737\n",
      "Runing in EPOC[9] Batch [130]\n",
      "loss 2.63938\n",
      "Runing in EPOC[9] Batch [140]\n",
      "loss 2.41913\n",
      "Runing in EPOC[9] Batch [150]\n",
      "loss 2.51044\n",
      "Runing in EPOC[9] Batch [160]\n",
      "loss 2.36787\n",
      "Runing in EPOC[9] Batch [170]\n",
      "loss 2.42371\n",
      "Runing in EPOC[9] Batch [180]\n",
      "loss 2.43903\n",
      "Runing in EPOC[9] Batch [190]\n",
      "loss 2.51324\n",
      "Runing in EPOC[10] Batch [10]\n",
      "loss 2.33539\n",
      "Runing in EPOC[10] Batch [20]\n",
      "loss 2.29606\n",
      "Runing in EPOC[10] Batch [30]\n",
      "loss 2.40773\n",
      "Runing in EPOC[10] Batch [40]\n",
      "loss 2.36825\n",
      "Runing in EPOC[10] Batch [50]\n",
      "loss 2.43825\n",
      "Runing in EPOC[10] Batch [60]\n",
      "loss 2.46621\n",
      "Runing in EPOC[10] Batch [70]\n",
      "loss 2.42379\n",
      "Runing in EPOC[10] Batch [80]\n",
      "loss 2.48604\n",
      "Runing in EPOC[10] Batch [90]\n",
      "loss 2.54551\n",
      "Runing in EPOC[10] Batch [100]\n",
      "loss 2.49598\n",
      "Runing in EPOC[10] Batch [110]\n",
      "loss 2.58694\n",
      "Runing in EPOC[10] Batch [120]\n",
      "loss 2.49724\n",
      "Runing in EPOC[10] Batch [130]\n",
      "loss 2.62217\n",
      "Runing in EPOC[10] Batch [140]\n",
      "loss 2.40095\n",
      "Runing in EPOC[10] Batch [150]\n",
      "loss 2.49048\n",
      "Runing in EPOC[10] Batch [160]\n",
      "loss 2.35164\n",
      "Runing in EPOC[10] Batch [170]\n",
      "loss 2.40182\n",
      "Runing in EPOC[10] Batch [180]\n",
      "loss 2.42146\n",
      "Runing in EPOC[10] Batch [190]\n",
      "loss 2.49317\n",
      "Runing in EPOC[11] Batch [10]\n",
      "loss 2.31679\n",
      "Runing in EPOC[11] Batch [20]\n",
      "loss 2.28111\n",
      "Runing in EPOC[11] Batch [30]\n",
      "loss 2.39142\n",
      "Runing in EPOC[11] Batch [40]\n",
      "loss 2.35642\n",
      "Runing in EPOC[11] Batch [50]\n",
      "loss 2.41813\n",
      "Runing in EPOC[11] Batch [60]\n",
      "loss 2.44822\n",
      "Runing in EPOC[11] Batch [70]\n",
      "loss 2.40521\n",
      "Runing in EPOC[11] Batch [80]\n",
      "loss 2.4689\n",
      "Runing in EPOC[11] Batch [90]\n",
      "loss 2.52447\n",
      "Runing in EPOC[11] Batch [100]\n",
      "loss 2.48001\n",
      "Runing in EPOC[11] Batch [110]\n",
      "loss 2.57045\n",
      "Runing in EPOC[11] Batch [120]\n",
      "loss 2.48105\n",
      "Runing in EPOC[11] Batch [130]\n",
      "loss 2.60479\n",
      "Runing in EPOC[11] Batch [140]\n",
      "loss 2.38388\n",
      "Runing in EPOC[11] Batch [150]\n",
      "loss 2.47309\n",
      "Runing in EPOC[11] Batch [160]\n",
      "loss 2.33654\n",
      "Runing in EPOC[11] Batch [170]\n",
      "loss 2.38488\n",
      "Runing in EPOC[11] Batch [180]\n",
      "loss 2.40539\n",
      "Runing in EPOC[11] Batch [190]\n",
      "loss 2.47628\n",
      "Runing in EPOC[12] Batch [10]\n",
      "loss 2.29996\n",
      "Runing in EPOC[12] Batch [20]\n",
      "loss 2.27084\n",
      "Runing in EPOC[12] Batch [30]\n",
      "loss 2.37546\n",
      "Runing in EPOC[12] Batch [40]\n",
      "loss 2.34989\n",
      "Runing in EPOC[12] Batch [50]\n",
      "loss 2.40326\n",
      "Runing in EPOC[12] Batch [60]\n",
      "loss 2.4355\n",
      "Runing in EPOC[12] Batch [70]\n",
      "loss 2.38863\n",
      "Runing in EPOC[12] Batch [80]\n",
      "loss 2.45376\n",
      "Runing in EPOC[12] Batch [90]\n",
      "loss 2.50754\n",
      "Runing in EPOC[12] Batch [100]\n",
      "loss 2.46478\n",
      "Runing in EPOC[12] Batch [110]\n",
      "loss 2.55706\n",
      "Runing in EPOC[12] Batch [120]\n",
      "loss 2.46211\n",
      "Runing in EPOC[12] Batch [130]\n",
      "loss 2.59245\n",
      "Runing in EPOC[12] Batch [140]\n",
      "loss 2.36894\n",
      "Runing in EPOC[12] Batch [150]\n",
      "loss 2.45757\n",
      "Runing in EPOC[12] Batch [160]\n",
      "loss 2.32149\n",
      "Runing in EPOC[12] Batch [170]\n",
      "loss 2.37068\n",
      "Runing in EPOC[12] Batch [180]\n",
      "loss 2.39202\n",
      "Runing in EPOC[12] Batch [190]\n",
      "loss 2.46218\n",
      "Runing in EPOC[13] Batch [10]\n",
      "loss 2.28991\n",
      "Runing in EPOC[13] Batch [20]\n",
      "loss 2.25914\n",
      "Runing in EPOC[13] Batch [30]\n",
      "loss 2.36093\n",
      "Runing in EPOC[13] Batch [40]\n",
      "loss 2.3364\n",
      "Runing in EPOC[13] Batch [50]\n",
      "loss 2.40364\n",
      "Runing in EPOC[13] Batch [60]\n",
      "loss 2.42398\n",
      "Runing in EPOC[13] Batch [70]\n",
      "loss 2.38162\n",
      "Runing in EPOC[13] Batch [80]\n",
      "loss 2.44359\n",
      "Runing in EPOC[13] Batch [90]\n",
      "loss 2.49503\n",
      "Runing in EPOC[13] Batch [100]\n",
      "loss 2.4492\n",
      "Runing in EPOC[13] Batch [110]\n",
      "loss 2.54428\n",
      "Runing in EPOC[13] Batch [120]\n",
      "loss 2.44555\n",
      "Runing in EPOC[13] Batch [130]\n",
      "loss 2.58129\n",
      "Runing in EPOC[13] Batch [140]\n",
      "loss 2.35987\n",
      "Runing in EPOC[13] Batch [150]\n",
      "loss 2.44456\n",
      "Runing in EPOC[13] Batch [160]\n",
      "loss 2.30878\n",
      "Runing in EPOC[13] Batch [170]\n",
      "loss 2.35611\n",
      "Runing in EPOC[13] Batch [180]\n",
      "loss 2.37666\n",
      "Runing in EPOC[13] Batch [190]\n",
      "loss 2.449\n",
      "Runing in EPOC[14] Batch [10]\n",
      "loss 2.2813\n",
      "Runing in EPOC[14] Batch [20]\n",
      "loss 2.24432\n",
      "Runing in EPOC[14] Batch [30]\n",
      "loss 2.35528\n",
      "Runing in EPOC[14] Batch [40]\n",
      "loss 2.31921\n",
      "Runing in EPOC[14] Batch [50]\n",
      "loss 2.38397\n",
      "Runing in EPOC[14] Batch [60]\n",
      "loss 2.41752\n",
      "Runing in EPOC[14] Batch [70]\n",
      "loss 2.3712\n",
      "Runing in EPOC[14] Batch [80]\n",
      "loss 2.4317\n",
      "Runing in EPOC[14] Batch [90]\n",
      "loss 2.48401\n",
      "Runing in EPOC[14] Batch [100]\n",
      "loss 2.43286\n",
      "Runing in EPOC[14] Batch [110]\n",
      "loss 2.52996\n",
      "Runing in EPOC[14] Batch [120]\n",
      "loss 2.43082\n",
      "Runing in EPOC[14] Batch [130]\n",
      "loss 2.56196\n",
      "Runing in EPOC[14] Batch [140]\n",
      "loss 2.35012\n",
      "Runing in EPOC[14] Batch [150]\n",
      "loss 2.43868\n",
      "Runing in EPOC[14] Batch [160]\n",
      "loss 2.29564\n",
      "Runing in EPOC[14] Batch [170]\n",
      "loss 2.34229\n",
      "Runing in EPOC[14] Batch [180]\n",
      "loss 2.36141\n",
      "Runing in EPOC[14] Batch [190]\n",
      "loss 2.43858\n",
      "Runing in EPOC[15] Batch [10]\n",
      "loss 2.26834\n",
      "Runing in EPOC[15] Batch [20]\n",
      "loss 2.23364\n",
      "Runing in EPOC[15] Batch [30]\n",
      "loss 2.35028\n",
      "Runing in EPOC[15] Batch [40]\n",
      "loss 2.30778\n",
      "Runing in EPOC[15] Batch [50]\n",
      "loss 2.36036\n",
      "Runing in EPOC[15] Batch [60]\n",
      "loss 2.39829\n",
      "Runing in EPOC[15] Batch [70]\n",
      "loss 2.35907\n",
      "Runing in EPOC[15] Batch [80]\n",
      "loss 2.41324\n",
      "Runing in EPOC[15] Batch [90]\n",
      "loss 2.47353\n",
      "Runing in EPOC[15] Batch [100]\n",
      "loss 2.41598\n",
      "Runing in EPOC[15] Batch [110]\n",
      "loss 2.51207\n",
      "Runing in EPOC[15] Batch [120]\n",
      "loss 2.41803\n",
      "Runing in EPOC[15] Batch [130]\n",
      "loss 2.54393\n",
      "Runing in EPOC[15] Batch [140]\n",
      "loss 2.33033\n",
      "Runing in EPOC[15] Batch [150]\n",
      "loss 2.42772\n",
      "Runing in EPOC[15] Batch [160]\n",
      "loss 2.2842\n",
      "Runing in EPOC[15] Batch [170]\n",
      "loss 2.32719\n",
      "Runing in EPOC[15] Batch [180]\n",
      "loss 2.3464\n",
      "Runing in EPOC[15] Batch [190]\n",
      "loss 2.42845\n",
      "Runing in EPOC[16] Batch [10]\n",
      "loss 2.25076\n",
      "Runing in EPOC[16] Batch [20]\n",
      "loss 2.22111\n",
      "Runing in EPOC[16] Batch [30]\n",
      "loss 2.34048\n",
      "Runing in EPOC[16] Batch [40]\n",
      "loss 2.29649\n",
      "Runing in EPOC[16] Batch [50]\n",
      "loss 2.34595\n",
      "Runing in EPOC[16] Batch [60]\n",
      "loss 2.38139\n",
      "Runing in EPOC[16] Batch [70]\n",
      "loss 2.34432\n",
      "Runing in EPOC[16] Batch [80]\n",
      "loss 2.39586\n",
      "Runing in EPOC[16] Batch [90]\n",
      "loss 2.45847\n",
      "Runing in EPOC[16] Batch [100]\n",
      "loss 2.40151\n",
      "Runing in EPOC[16] Batch [110]\n",
      "loss 2.49794\n",
      "Runing in EPOC[16] Batch [120]\n",
      "loss 2.40629\n",
      "Runing in EPOC[16] Batch [130]\n",
      "loss 2.52918\n",
      "Runing in EPOC[16] Batch [140]\n",
      "loss 2.31048\n",
      "Runing in EPOC[16] Batch [150]\n",
      "loss 2.41326\n",
      "Runing in EPOC[16] Batch [160]\n",
      "loss 2.27245\n",
      "Runing in EPOC[16] Batch [170]\n",
      "loss 2.31576\n",
      "Runing in EPOC[16] Batch [180]\n",
      "loss 2.33473\n",
      "Runing in EPOC[16] Batch [190]\n",
      "loss 2.41709\n",
      "Runing in EPOC[17] Batch [10]\n",
      "loss 2.23562\n",
      "Runing in EPOC[17] Batch [20]\n",
      "loss 2.20788\n",
      "Runing in EPOC[17] Batch [30]\n",
      "loss 2.3266\n",
      "Runing in EPOC[17] Batch [40]\n",
      "loss 2.28277\n",
      "Runing in EPOC[17] Batch [50]\n",
      "loss 2.3319\n",
      "Runing in EPOC[17] Batch [60]\n",
      "loss 2.36917\n",
      "Runing in EPOC[17] Batch [70]\n",
      "loss 2.32927\n",
      "Runing in EPOC[17] Batch [80]\n",
      "loss 2.38035\n",
      "Runing in EPOC[17] Batch [90]\n",
      "loss 2.44225\n",
      "Runing in EPOC[17] Batch [100]\n",
      "loss 2.38769\n",
      "Runing in EPOC[17] Batch [110]\n",
      "loss 2.48333\n",
      "Runing in EPOC[17] Batch [120]\n",
      "loss 2.39317\n",
      "Runing in EPOC[17] Batch [130]\n",
      "loss 2.51623\n",
      "Runing in EPOC[17] Batch [140]\n",
      "loss 2.29584\n",
      "Runing in EPOC[17] Batch [150]\n",
      "loss 2.39878\n",
      "Runing in EPOC[17] Batch [160]\n",
      "loss 2.26004\n",
      "Runing in EPOC[17] Batch [170]\n",
      "loss 2.30613\n",
      "Runing in EPOC[17] Batch [180]\n",
      "loss 2.32249\n",
      "Runing in EPOC[17] Batch [190]\n",
      "loss 2.40411\n",
      "Runing in EPOC[18] Batch [10]\n",
      "loss 2.22165\n",
      "Runing in EPOC[18] Batch [20]\n",
      "loss 2.19578\n",
      "Runing in EPOC[18] Batch [30]\n",
      "loss 2.31046\n",
      "Runing in EPOC[18] Batch [40]\n",
      "loss 2.26959\n",
      "Runing in EPOC[18] Batch [50]\n",
      "loss 2.32068\n",
      "Runing in EPOC[18] Batch [60]\n",
      "loss 2.35829\n",
      "Runing in EPOC[18] Batch [70]\n",
      "loss 2.31663\n",
      "Runing in EPOC[18] Batch [80]\n",
      "loss 2.36862\n",
      "Runing in EPOC[18] Batch [90]\n",
      "loss 2.42912\n",
      "Runing in EPOC[18] Batch [100]\n",
      "loss 2.3741\n",
      "Runing in EPOC[18] Batch [110]\n",
      "loss 2.46897\n",
      "Runing in EPOC[18] Batch [120]\n",
      "loss 2.37878\n",
      "Runing in EPOC[18] Batch [130]\n",
      "loss 2.50394\n",
      "Runing in EPOC[18] Batch [140]\n",
      "loss 2.28272\n",
      "Runing in EPOC[18] Batch [150]\n",
      "loss 2.38575\n",
      "Runing in EPOC[18] Batch [160]\n",
      "loss 2.24749\n",
      "Runing in EPOC[18] Batch [170]\n",
      "loss 2.2961\n",
      "Runing in EPOC[18] Batch [180]\n",
      "loss 2.31261\n",
      "Runing in EPOC[18] Batch [190]\n",
      "loss 2.39095\n",
      "Runing in EPOC[19] Batch [10]\n",
      "loss 2.20781\n",
      "Runing in EPOC[19] Batch [20]\n",
      "loss 2.18305\n",
      "Runing in EPOC[19] Batch [30]\n",
      "loss 2.29499\n",
      "Runing in EPOC[19] Batch [40]\n",
      "loss 2.25748\n",
      "Runing in EPOC[19] Batch [50]\n",
      "loss 2.31145\n",
      "Runing in EPOC[19] Batch [60]\n",
      "loss 2.34995\n",
      "Runing in EPOC[19] Batch [70]\n",
      "loss 2.30589\n",
      "Runing in EPOC[19] Batch [80]\n",
      "loss 2.36075\n",
      "Runing in EPOC[19] Batch [90]\n",
      "loss 2.41617\n",
      "Runing in EPOC[19] Batch [100]\n",
      "loss 2.3623\n",
      "Runing in EPOC[19] Batch [110]\n",
      "loss 2.45549\n",
      "Runing in EPOC[19] Batch [120]\n",
      "loss 2.36413\n",
      "Runing in EPOC[19] Batch [130]\n",
      "loss 2.49088\n",
      "Runing in EPOC[19] Batch [140]\n",
      "loss 2.2705\n",
      "Runing in EPOC[19] Batch [150]\n",
      "loss 2.37367\n",
      "Runing in EPOC[19] Batch [160]\n",
      "loss 2.23501\n",
      "Runing in EPOC[19] Batch [170]\n",
      "loss 2.28577\n",
      "Runing in EPOC[19] Batch [180]\n",
      "loss 2.30172\n",
      "Runing in EPOC[19] Batch [190]\n",
      "loss 2.37809\n",
      "Runing in EPOC[20] Batch [10]\n",
      "loss 2.19556\n",
      "Runing in EPOC[20] Batch [20]\n",
      "loss 2.1702\n",
      "Runing in EPOC[20] Batch [30]\n",
      "loss 2.27881\n",
      "Runing in EPOC[20] Batch [40]\n",
      "loss 2.24489\n",
      "Runing in EPOC[20] Batch [50]\n",
      "loss 2.30116\n",
      "Runing in EPOC[20] Batch [60]\n",
      "loss 2.33946\n",
      "Runing in EPOC[20] Batch [70]\n",
      "loss 2.2992\n",
      "Runing in EPOC[20] Batch [80]\n",
      "loss 2.35335\n",
      "Runing in EPOC[20] Batch [90]\n",
      "loss 2.40543\n",
      "Runing in EPOC[20] Batch [100]\n",
      "loss 2.35284\n",
      "Runing in EPOC[20] Batch [110]\n",
      "loss 2.44334\n",
      "Runing in EPOC[20] Batch [120]\n",
      "loss 2.34971\n",
      "Runing in EPOC[20] Batch [130]\n",
      "loss 2.478\n",
      "Runing in EPOC[20] Batch [140]\n",
      "loss 2.25956\n",
      "Runing in EPOC[20] Batch [150]\n",
      "loss 2.36346\n",
      "Runing in EPOC[20] Batch [160]\n",
      "loss 2.22495\n",
      "Runing in EPOC[20] Batch [170]\n",
      "loss 2.27578\n",
      "Runing in EPOC[20] Batch [180]\n",
      "loss 2.29352\n",
      "Runing in EPOC[20] Batch [190]\n",
      "loss 2.36488\n",
      "Runing in EPOC[21] Batch [10]\n",
      "loss 2.1863\n",
      "Runing in EPOC[21] Batch [20]\n",
      "loss 2.15752\n",
      "Runing in EPOC[21] Batch [30]\n",
      "loss 2.26572\n",
      "Runing in EPOC[21] Batch [40]\n",
      "loss 2.23099\n",
      "Runing in EPOC[21] Batch [50]\n",
      "loss 2.28855\n",
      "Runing in EPOC[21] Batch [60]\n",
      "loss 2.32594\n",
      "Runing in EPOC[21] Batch [70]\n",
      "loss 2.29275\n",
      "Runing in EPOC[21] Batch [80]\n",
      "loss 2.34539\n",
      "Runing in EPOC[21] Batch [90]\n",
      "loss 2.39619\n",
      "Runing in EPOC[21] Batch [100]\n",
      "loss 2.34605\n",
      "Runing in EPOC[21] Batch [110]\n",
      "loss 2.43178\n",
      "Runing in EPOC[21] Batch [120]\n",
      "loss 2.33729\n",
      "Runing in EPOC[21] Batch [130]\n",
      "loss 2.46524\n",
      "Runing in EPOC[21] Batch [140]\n",
      "loss 2.24932\n",
      "Runing in EPOC[21] Batch [150]\n",
      "loss 2.35474\n",
      "Runing in EPOC[21] Batch [160]\n",
      "loss 2.21832\n",
      "Runing in EPOC[21] Batch [170]\n",
      "loss 2.26879\n",
      "Runing in EPOC[21] Batch [180]\n",
      "loss 2.28871\n",
      "Runing in EPOC[21] Batch [190]\n",
      "loss 2.35591\n",
      "Runing in EPOC[22] Batch [10]\n",
      "loss 2.17847\n",
      "Runing in EPOC[22] Batch [20]\n",
      "loss 2.15067\n",
      "Runing in EPOC[22] Batch [30]\n",
      "loss 2.25887\n",
      "Runing in EPOC[22] Batch [40]\n",
      "loss 2.21638\n",
      "Runing in EPOC[22] Batch [50]\n",
      "loss 2.27687\n",
      "Runing in EPOC[22] Batch [60]\n",
      "loss 2.31427\n",
      "Runing in EPOC[22] Batch [70]\n",
      "loss 2.28105\n",
      "Runing in EPOC[22] Batch [80]\n",
      "loss 2.33537\n",
      "Runing in EPOC[22] Batch [90]\n",
      "loss 2.38723\n",
      "Runing in EPOC[22] Batch [100]\n",
      "loss 2.33679\n",
      "Runing in EPOC[22] Batch [110]\n",
      "loss 2.42251\n",
      "Runing in EPOC[22] Batch [120]\n",
      "loss 2.3302\n",
      "Runing in EPOC[22] Batch [130]\n",
      "loss 2.44961\n",
      "Runing in EPOC[22] Batch [140]\n",
      "loss 2.2427\n",
      "Runing in EPOC[22] Batch [150]\n",
      "loss 2.34395\n",
      "Runing in EPOC[22] Batch [160]\n",
      "loss 2.21552\n",
      "Runing in EPOC[22] Batch [170]\n",
      "loss 2.26236\n",
      "Runing in EPOC[22] Batch [180]\n",
      "loss 2.29224\n",
      "Runing in EPOC[22] Batch [190]\n",
      "loss 2.35541\n",
      "Runing in EPOC[23] Batch [10]\n",
      "loss 2.17194\n",
      "Runing in EPOC[23] Batch [20]\n",
      "loss 2.1466\n",
      "Runing in EPOC[23] Batch [30]\n",
      "loss 2.24944\n",
      "Runing in EPOC[23] Batch [40]\n",
      "loss 2.21332\n",
      "Runing in EPOC[23] Batch [50]\n",
      "loss 2.27085\n",
      "Runing in EPOC[23] Batch [60]\n",
      "loss 2.29585\n",
      "Runing in EPOC[23] Batch [70]\n",
      "loss 2.2719\n",
      "Runing in EPOC[23] Batch [80]\n",
      "loss 2.32625\n",
      "Runing in EPOC[23] Batch [90]\n",
      "loss 2.3778\n",
      "Runing in EPOC[23] Batch [100]\n",
      "loss 2.32551\n",
      "Runing in EPOC[23] Batch [110]\n",
      "loss 2.41848\n",
      "Runing in EPOC[23] Batch [120]\n",
      "loss 2.32291\n",
      "Runing in EPOC[23] Batch [130]\n",
      "loss 2.43586\n",
      "Runing in EPOC[23] Batch [140]\n",
      "loss 2.23556\n",
      "Runing in EPOC[23] Batch [150]\n",
      "loss 2.33106\n",
      "Runing in EPOC[23] Batch [160]\n",
      "loss 2.20726\n",
      "Runing in EPOC[23] Batch [170]\n",
      "loss 2.25431\n",
      "Runing in EPOC[23] Batch [180]\n",
      "loss 2.29483\n",
      "Runing in EPOC[23] Batch [190]\n",
      "loss 2.36704\n",
      "Runing in EPOC[24] Batch [10]\n",
      "loss 2.16796\n",
      "Runing in EPOC[24] Batch [20]\n",
      "loss 2.13147\n",
      "Runing in EPOC[24] Batch [30]\n",
      "loss 2.23981\n",
      "Runing in EPOC[24] Batch [40]\n",
      "loss 2.21456\n",
      "Runing in EPOC[24] Batch [50]\n",
      "loss 2.25843\n",
      "Runing in EPOC[24] Batch [60]\n",
      "loss 2.28292\n",
      "Runing in EPOC[24] Batch [70]\n",
      "loss 2.2686\n",
      "Runing in EPOC[24] Batch [80]\n",
      "loss 2.31276\n",
      "Runing in EPOC[24] Batch [90]\n",
      "loss 2.3693\n",
      "Runing in EPOC[24] Batch [100]\n",
      "loss 2.31698\n",
      "Runing in EPOC[24] Batch [110]\n",
      "loss 2.41168\n",
      "Runing in EPOC[24] Batch [120]\n",
      "loss 2.30903\n",
      "Runing in EPOC[24] Batch [130]\n",
      "loss 2.42808\n",
      "Runing in EPOC[24] Batch [140]\n",
      "loss 2.22263\n",
      "Runing in EPOC[24] Batch [150]\n",
      "loss 2.32898\n",
      "Runing in EPOC[24] Batch [160]\n",
      "loss 2.19145\n",
      "Runing in EPOC[24] Batch [170]\n",
      "loss 2.24859\n",
      "Runing in EPOC[24] Batch [180]\n",
      "loss 2.28238\n",
      "Runing in EPOC[24] Batch [190]\n",
      "loss 2.36924\n",
      "Runing in EPOC[25] Batch [10]\n",
      "loss 2.17503\n",
      "Runing in EPOC[25] Batch [20]\n",
      "loss 2.12771\n",
      "Runing in EPOC[25] Batch [30]\n",
      "loss 2.22354\n",
      "Runing in EPOC[25] Batch [40]\n",
      "loss 2.2002\n",
      "Runing in EPOC[25] Batch [50]\n",
      "loss 2.24931\n",
      "Runing in EPOC[25] Batch [60]\n",
      "loss 2.27518\n",
      "Runing in EPOC[25] Batch [70]\n",
      "loss 2.25419\n",
      "Runing in EPOC[25] Batch [80]\n",
      "loss 2.30913\n",
      "Runing in EPOC[25] Batch [90]\n",
      "loss 2.36404\n",
      "Runing in EPOC[25] Batch [100]\n",
      "loss 2.31404\n",
      "Runing in EPOC[25] Batch [110]\n",
      "loss 2.4046\n",
      "Runing in EPOC[25] Batch [120]\n",
      "loss 2.3013\n",
      "Runing in EPOC[25] Batch [130]\n",
      "loss 2.41806\n",
      "Runing in EPOC[25] Batch [140]\n",
      "loss 2.20887\n",
      "Runing in EPOC[25] Batch [150]\n",
      "loss 2.31984\n",
      "Runing in EPOC[25] Batch [160]\n",
      "loss 2.17509\n",
      "Runing in EPOC[25] Batch [170]\n",
      "loss 2.23716\n",
      "Runing in EPOC[25] Batch [180]\n",
      "loss 2.26936\n",
      "Runing in EPOC[25] Batch [190]\n",
      "loss 2.35118\n",
      "Runing in EPOC[26] Batch [10]\n",
      "loss 2.16878\n",
      "Runing in EPOC[26] Batch [20]\n",
      "loss 2.12688\n",
      "Runing in EPOC[26] Batch [30]\n",
      "loss 2.21895\n",
      "Runing in EPOC[26] Batch [40]\n",
      "loss 2.1832\n",
      "Runing in EPOC[26] Batch [50]\n",
      "loss 2.23672\n",
      "Runing in EPOC[26] Batch [60]\n",
      "loss 2.26432\n",
      "Runing in EPOC[26] Batch [70]\n",
      "loss 2.23951\n",
      "Runing in EPOC[26] Batch [80]\n",
      "loss 2.29977\n",
      "Runing in EPOC[26] Batch [90]\n",
      "loss 2.35348\n",
      "Runing in EPOC[26] Batch [100]\n",
      "loss 2.31088\n",
      "Runing in EPOC[26] Batch [110]\n",
      "loss 2.39931\n",
      "Runing in EPOC[26] Batch [120]\n",
      "loss 2.29606\n",
      "Runing in EPOC[26] Batch [130]\n",
      "loss 2.40704\n",
      "Runing in EPOC[26] Batch [140]\n",
      "loss 2.1987\n",
      "Runing in EPOC[26] Batch [150]\n",
      "loss 2.30549\n",
      "Runing in EPOC[26] Batch [160]\n",
      "loss 2.16482\n",
      "Runing in EPOC[26] Batch [170]\n",
      "loss 2.22054\n",
      "Runing in EPOC[26] Batch [180]\n",
      "loss 2.25725\n",
      "Runing in EPOC[26] Batch [190]\n",
      "loss 2.33001\n",
      "Runing in EPOC[27] Batch [10]\n",
      "loss 2.15303\n",
      "Runing in EPOC[27] Batch [20]\n",
      "loss 2.11536\n",
      "Runing in EPOC[27] Batch [30]\n",
      "loss 2.21158\n",
      "Runing in EPOC[27] Batch [40]\n",
      "loss 2.17255\n",
      "Runing in EPOC[27] Batch [50]\n",
      "loss 2.22465\n",
      "Runing in EPOC[27] Batch [60]\n",
      "loss 2.25049\n",
      "Runing in EPOC[27] Batch [70]\n",
      "loss 2.22594\n",
      "Runing in EPOC[27] Batch [80]\n",
      "loss 2.28454\n",
      "Runing in EPOC[27] Batch [90]\n",
      "loss 2.33785\n",
      "Runing in EPOC[27] Batch [100]\n",
      "loss 2.29823\n",
      "Runing in EPOC[27] Batch [110]\n",
      "loss 2.38963\n",
      "Runing in EPOC[27] Batch [120]\n",
      "loss 2.28938\n",
      "Runing in EPOC[27] Batch [130]\n",
      "loss 2.39905\n",
      "Runing in EPOC[27] Batch [140]\n",
      "loss 2.19095\n",
      "Runing in EPOC[27] Batch [150]\n",
      "loss 2.29384\n",
      "Runing in EPOC[27] Batch [160]\n",
      "loss 2.15364\n",
      "Runing in EPOC[27] Batch [170]\n",
      "loss 2.20657\n",
      "Runing in EPOC[27] Batch [180]\n",
      "loss 2.24603\n",
      "Runing in EPOC[27] Batch [190]\n",
      "loss 2.31355\n",
      "Runing in EPOC[28] Batch [10]\n",
      "loss 2.14124\n",
      "Runing in EPOC[28] Batch [20]\n",
      "loss 2.10198\n",
      "Runing in EPOC[28] Batch [30]\n",
      "loss 2.20087\n",
      "Runing in EPOC[28] Batch [40]\n",
      "loss 2.16179\n",
      "Runing in EPOC[28] Batch [50]\n",
      "loss 2.21501\n",
      "Runing in EPOC[28] Batch [60]\n",
      "loss 2.23924\n",
      "Runing in EPOC[28] Batch [70]\n",
      "loss 2.21428\n",
      "Runing in EPOC[28] Batch [80]\n",
      "loss 2.2728\n",
      "Runing in EPOC[28] Batch [90]\n",
      "loss 2.32601\n",
      "Runing in EPOC[28] Batch [100]\n",
      "loss 2.28634\n",
      "Runing in EPOC[28] Batch [110]\n",
      "loss 2.37532\n",
      "Runing in EPOC[28] Batch [120]\n",
      "loss 2.28086\n",
      "Runing in EPOC[28] Batch [130]\n",
      "loss 2.39315\n",
      "Runing in EPOC[28] Batch [140]\n",
      "loss 2.18613\n",
      "Runing in EPOC[28] Batch [150]\n",
      "loss 2.28546\n",
      "Runing in EPOC[28] Batch [160]\n",
      "loss 2.14331\n",
      "Runing in EPOC[28] Batch [170]\n",
      "loss 2.19423\n",
      "Runing in EPOC[28] Batch [180]\n",
      "loss 2.23128\n",
      "Runing in EPOC[28] Batch [190]\n",
      "loss 2.29897\n",
      "Runing in EPOC[29] Batch [10]\n",
      "loss 2.13236\n",
      "Runing in EPOC[29] Batch [20]\n",
      "loss 2.09158\n",
      "Runing in EPOC[29] Batch [30]\n",
      "loss 2.19158\n",
      "Runing in EPOC[29] Batch [40]\n",
      "loss 2.15321\n",
      "Runing in EPOC[29] Batch [50]\n",
      "loss 2.20678\n",
      "Runing in EPOC[29] Batch [60]\n",
      "loss 2.22867\n",
      "Runing in EPOC[29] Batch [70]\n",
      "loss 2.20592\n",
      "Runing in EPOC[29] Batch [80]\n",
      "loss 2.26304\n",
      "Runing in EPOC[29] Batch [90]\n",
      "loss 2.31712\n",
      "Runing in EPOC[29] Batch [100]\n",
      "loss 2.27476\n",
      "Runing in EPOC[29] Batch [110]\n",
      "loss 2.3632\n",
      "Runing in EPOC[29] Batch [120]\n",
      "loss 2.26969\n",
      "Runing in EPOC[29] Batch [130]\n",
      "loss 2.38362\n",
      "Runing in EPOC[29] Batch [140]\n",
      "loss 2.18103\n",
      "Runing in EPOC[29] Batch [150]\n",
      "loss 2.27695\n",
      "Runing in EPOC[29] Batch [160]\n",
      "loss 2.13432\n",
      "Runing in EPOC[29] Batch [170]\n",
      "loss 2.1844\n",
      "Runing in EPOC[29] Batch [180]\n",
      "loss 2.21771\n",
      "Runing in EPOC[29] Batch [190]\n",
      "loss 2.28739\n",
      "Runing in EPOC[30] Batch [10]\n",
      "loss 2.1231\n",
      "Runing in EPOC[30] Batch [20]\n",
      "loss 2.08298\n",
      "Runing in EPOC[30] Batch [30]\n",
      "loss 2.18301\n",
      "Runing in EPOC[30] Batch [40]\n",
      "loss 2.14611\n",
      "Runing in EPOC[30] Batch [50]\n",
      "loss 2.20082\n",
      "Runing in EPOC[30] Batch [60]\n",
      "loss 2.21882\n",
      "Runing in EPOC[30] Batch [70]\n",
      "loss 2.199\n",
      "Runing in EPOC[30] Batch [80]\n",
      "loss 2.25262\n",
      "Runing in EPOC[30] Batch [90]\n",
      "loss 2.31272\n",
      "Runing in EPOC[30] Batch [100]\n",
      "loss 2.2649\n",
      "Runing in EPOC[30] Batch [110]\n",
      "loss 2.3532\n",
      "Runing in EPOC[30] Batch [120]\n",
      "loss 2.25986\n",
      "Runing in EPOC[30] Batch [130]\n",
      "loss 2.37446\n",
      "Runing in EPOC[30] Batch [140]\n",
      "loss 2.17461\n",
      "Runing in EPOC[30] Batch [150]\n",
      "loss 2.2702\n",
      "Runing in EPOC[30] Batch [160]\n",
      "loss 2.12895\n",
      "Runing in EPOC[30] Batch [170]\n",
      "loss 2.17559\n",
      "Runing in EPOC[30] Batch [180]\n",
      "loss 2.20745\n",
      "Runing in EPOC[30] Batch [190]\n",
      "loss 2.27747\n",
      "Runing in EPOC[31] Batch [10]\n",
      "loss 2.11079\n",
      "Runing in EPOC[31] Batch [20]\n",
      "loss 2.07614\n",
      "Runing in EPOC[31] Batch [30]\n",
      "loss 2.17316\n",
      "Runing in EPOC[31] Batch [40]\n",
      "loss 2.14239\n",
      "Runing in EPOC[31] Batch [50]\n",
      "loss 2.19782\n",
      "Runing in EPOC[31] Batch [60]\n",
      "loss 2.21368\n",
      "Runing in EPOC[31] Batch [70]\n",
      "loss 2.19375\n",
      "Runing in EPOC[31] Batch [80]\n",
      "loss 2.24328\n",
      "Runing in EPOC[31] Batch [90]\n",
      "loss 2.30551\n",
      "Runing in EPOC[31] Batch [100]\n",
      "loss 2.25637\n",
      "Runing in EPOC[31] Batch [110]\n",
      "loss 2.34807\n",
      "Runing in EPOC[31] Batch [120]\n",
      "loss 2.25258\n",
      "Runing in EPOC[31] Batch [130]\n",
      "loss 2.37004\n",
      "Runing in EPOC[31] Batch [140]\n",
      "loss 2.17167\n",
      "Runing in EPOC[31] Batch [150]\n",
      "loss 2.26639\n",
      "Runing in EPOC[31] Batch [160]\n",
      "loss 2.12504\n",
      "Runing in EPOC[31] Batch [170]\n",
      "loss 2.16849\n",
      "Runing in EPOC[31] Batch [180]\n",
      "loss 2.20149\n",
      "Runing in EPOC[31] Batch [190]\n",
      "loss 2.26812\n",
      "Runing in EPOC[32] Batch [10]\n",
      "loss 2.10314\n",
      "Runing in EPOC[32] Batch [20]\n",
      "loss 2.06896\n",
      "Runing in EPOC[32] Batch [30]\n",
      "loss 2.16398\n",
      "Runing in EPOC[32] Batch [40]\n",
      "loss 2.13758\n",
      "Runing in EPOC[32] Batch [50]\n",
      "loss 2.19384\n",
      "Runing in EPOC[32] Batch [60]\n",
      "loss 2.21076\n",
      "Runing in EPOC[32] Batch [70]\n",
      "loss 2.19085\n",
      "Runing in EPOC[32] Batch [80]\n",
      "loss 2.24009\n",
      "Runing in EPOC[32] Batch [90]\n",
      "loss 2.29579\n",
      "Runing in EPOC[32] Batch [100]\n",
      "loss 2.2478\n",
      "Runing in EPOC[32] Batch [110]\n",
      "loss 2.34499\n",
      "Runing in EPOC[32] Batch [120]\n",
      "loss 2.24919\n",
      "Runing in EPOC[32] Batch [130]\n",
      "loss 2.37226\n",
      "Runing in EPOC[32] Batch [140]\n",
      "loss 2.17068\n",
      "Runing in EPOC[32] Batch [150]\n",
      "loss 2.26797\n",
      "Runing in EPOC[32] Batch [160]\n",
      "loss 2.12402\n",
      "Runing in EPOC[32] Batch [170]\n",
      "loss 2.16491\n",
      "Runing in EPOC[32] Batch [180]\n",
      "loss 2.19831\n",
      "Runing in EPOC[32] Batch [190]\n",
      "loss 2.25794\n",
      "Runing in EPOC[33] Batch [10]\n",
      "loss 2.10025\n",
      "Runing in EPOC[33] Batch [20]\n",
      "loss 2.06536\n",
      "Runing in EPOC[33] Batch [30]\n",
      "loss 2.15922\n",
      "Runing in EPOC[33] Batch [40]\n",
      "loss 2.13378\n",
      "Runing in EPOC[33] Batch [50]\n",
      "loss 2.19114\n",
      "Runing in EPOC[33] Batch [60]\n",
      "loss 2.20954\n",
      "Runing in EPOC[33] Batch [70]\n",
      "loss 2.19159\n",
      "Runing in EPOC[33] Batch [80]\n",
      "loss 2.24004\n",
      "Runing in EPOC[33] Batch [90]\n",
      "loss 2.28994\n",
      "Runing in EPOC[33] Batch [100]\n",
      "loss 2.24434\n",
      "Runing in EPOC[33] Batch [110]\n",
      "loss 2.3379\n",
      "Runing in EPOC[33] Batch [120]\n",
      "loss 2.24969\n",
      "Runing in EPOC[33] Batch [130]\n",
      "loss 2.37036\n",
      "Runing in EPOC[33] Batch [140]\n",
      "loss 2.16893\n",
      "Runing in EPOC[33] Batch [150]\n",
      "loss 2.26501\n",
      "Runing in EPOC[33] Batch [160]\n",
      "loss 2.12025\n",
      "Runing in EPOC[33] Batch [170]\n",
      "loss 2.16238\n",
      "Runing in EPOC[33] Batch [180]\n",
      "loss 2.19178\n",
      "Runing in EPOC[33] Batch [190]\n",
      "loss 2.25296\n",
      "Runing in EPOC[34] Batch [10]\n",
      "loss 2.10173\n",
      "Runing in EPOC[34] Batch [20]\n",
      "loss 2.0671\n",
      "Runing in EPOC[34] Batch [30]\n",
      "loss 2.15647\n",
      "Runing in EPOC[34] Batch [40]\n",
      "loss 2.12736\n",
      "Runing in EPOC[34] Batch [50]\n",
      "loss 2.18627\n",
      "Runing in EPOC[34] Batch [60]\n",
      "loss 2.21142\n",
      "Runing in EPOC[34] Batch [70]\n",
      "loss 2.1991\n",
      "Runing in EPOC[34] Batch [80]\n",
      "loss 2.245\n",
      "Runing in EPOC[34] Batch [90]\n",
      "loss 2.29536\n",
      "Runing in EPOC[34] Batch [100]\n",
      "loss 2.24927\n",
      "Runing in EPOC[34] Batch [110]\n",
      "loss 2.33639\n",
      "Runing in EPOC[34] Batch [120]\n",
      "loss 2.25216\n",
      "Runing in EPOC[34] Batch [130]\n",
      "loss 2.35692\n",
      "Runing in EPOC[34] Batch [140]\n",
      "loss 2.16639\n",
      "Runing in EPOC[34] Batch [150]\n",
      "loss 2.2584\n",
      "Runing in EPOC[34] Batch [160]\n",
      "loss 2.11622\n",
      "Runing in EPOC[34] Batch [170]\n",
      "loss 2.15586\n",
      "Runing in EPOC[34] Batch [180]\n",
      "loss 2.18418\n",
      "Runing in EPOC[34] Batch [190]\n",
      "loss 2.24634\n",
      "Runing in EPOC[35] Batch [10]\n",
      "loss 2.09549\n",
      "Runing in EPOC[35] Batch [20]\n",
      "loss 2.06599\n",
      "Runing in EPOC[35] Batch [30]\n",
      "loss 2.15177\n",
      "Runing in EPOC[35] Batch [40]\n",
      "loss 2.12197\n",
      "Runing in EPOC[35] Batch [50]\n",
      "loss 2.18445\n",
      "Runing in EPOC[35] Batch [60]\n",
      "loss 2.20526\n",
      "Runing in EPOC[35] Batch [70]\n",
      "loss 2.20719\n",
      "Runing in EPOC[35] Batch [80]\n",
      "loss 2.23458\n",
      "Runing in EPOC[35] Batch [90]\n",
      "loss 2.29575\n",
      "Runing in EPOC[35] Batch [100]\n",
      "loss 2.25028\n",
      "Runing in EPOC[35] Batch [110]\n",
      "loss 2.33383\n",
      "Runing in EPOC[35] Batch [120]\n",
      "loss 2.24797\n",
      "Runing in EPOC[35] Batch [130]\n",
      "loss 2.34848\n",
      "Runing in EPOC[35] Batch [140]\n",
      "loss 2.1577\n",
      "Runing in EPOC[35] Batch [150]\n",
      "loss 2.25015\n",
      "Runing in EPOC[35] Batch [160]\n",
      "loss 2.10504\n",
      "Runing in EPOC[35] Batch [170]\n",
      "loss 2.14913\n",
      "Runing in EPOC[35] Batch [180]\n",
      "loss 2.17766\n",
      "Runing in EPOC[35] Batch [190]\n",
      "loss 2.2365\n",
      "Runing in EPOC[36] Batch [10]\n",
      "loss 2.08927\n",
      "Runing in EPOC[36] Batch [20]\n",
      "loss 2.05621\n",
      "Runing in EPOC[36] Batch [30]\n",
      "loss 2.14283\n",
      "Runing in EPOC[36] Batch [40]\n",
      "loss 2.12182\n",
      "Runing in EPOC[36] Batch [50]\n",
      "loss 2.17593\n",
      "Runing in EPOC[36] Batch [60]\n",
      "loss 2.19455\n",
      "Runing in EPOC[36] Batch [70]\n",
      "loss 2.1954\n",
      "Runing in EPOC[36] Batch [80]\n",
      "loss 2.21948\n",
      "Runing in EPOC[36] Batch [90]\n",
      "loss 2.27571\n",
      "Runing in EPOC[36] Batch [100]\n",
      "loss 2.23634\n",
      "Runing in EPOC[36] Batch [110]\n",
      "loss 2.31743\n",
      "Runing in EPOC[36] Batch [120]\n",
      "loss 2.23588\n",
      "Runing in EPOC[36] Batch [130]\n",
      "loss 2.33628\n",
      "Runing in EPOC[36] Batch [140]\n",
      "loss 2.14524\n",
      "Runing in EPOC[36] Batch [150]\n",
      "loss 2.23871\n",
      "Runing in EPOC[36] Batch [160]\n",
      "loss 2.09433\n",
      "Runing in EPOC[36] Batch [170]\n",
      "loss 2.1411\n",
      "Runing in EPOC[36] Batch [180]\n",
      "loss 2.17008\n",
      "Runing in EPOC[36] Batch [190]\n",
      "loss 2.23299\n",
      "Runing in EPOC[37] Batch [10]\n",
      "loss 2.0775\n",
      "Runing in EPOC[37] Batch [20]\n",
      "loss 2.0452\n",
      "Runing in EPOC[37] Batch [30]\n",
      "loss 2.13031\n",
      "Runing in EPOC[37] Batch [40]\n",
      "loss 2.11027\n",
      "Runing in EPOC[37] Batch [50]\n",
      "loss 2.16204\n",
      "Runing in EPOC[37] Batch [60]\n",
      "loss 2.18476\n",
      "Runing in EPOC[37] Batch [70]\n",
      "loss 2.17512\n",
      "Runing in EPOC[37] Batch [80]\n",
      "loss 2.20886\n",
      "Runing in EPOC[37] Batch [90]\n",
      "loss 2.25902\n",
      "Runing in EPOC[37] Batch [100]\n",
      "loss 2.22445\n",
      "Runing in EPOC[37] Batch [110]\n",
      "loss 2.30096\n",
      "Runing in EPOC[37] Batch [120]\n",
      "loss 2.22045\n",
      "Runing in EPOC[37] Batch [130]\n",
      "loss 2.32079\n",
      "Runing in EPOC[37] Batch [140]\n",
      "loss 2.13215\n",
      "Runing in EPOC[37] Batch [150]\n",
      "loss 2.22595\n",
      "Runing in EPOC[37] Batch [160]\n",
      "loss 2.08294\n",
      "Runing in EPOC[37] Batch [170]\n",
      "loss 2.13344\n",
      "Runing in EPOC[37] Batch [180]\n",
      "loss 2.16252\n",
      "Runing in EPOC[37] Batch [190]\n",
      "loss 2.22849\n",
      "Runing in EPOC[38] Batch [10]\n",
      "loss 2.06838\n",
      "Runing in EPOC[38] Batch [20]\n",
      "loss 2.03628\n",
      "Runing in EPOC[38] Batch [30]\n",
      "loss 2.11857\n",
      "Runing in EPOC[38] Batch [40]\n",
      "loss 2.0987\n",
      "Runing in EPOC[38] Batch [50]\n",
      "loss 2.15234\n",
      "Runing in EPOC[38] Batch [60]\n",
      "loss 2.17103\n",
      "Runing in EPOC[38] Batch [70]\n",
      "loss 2.159\n",
      "Runing in EPOC[38] Batch [80]\n",
      "loss 2.19788\n",
      "Runing in EPOC[38] Batch [90]\n",
      "loss 2.24869\n",
      "Runing in EPOC[38] Batch [100]\n",
      "loss 2.2128\n",
      "Runing in EPOC[38] Batch [110]\n",
      "loss 2.29137\n",
      "Runing in EPOC[38] Batch [120]\n",
      "loss 2.20949\n",
      "Runing in EPOC[38] Batch [130]\n",
      "loss 2.30873\n",
      "Runing in EPOC[38] Batch [140]\n",
      "loss 2.11844\n",
      "Runing in EPOC[38] Batch [150]\n",
      "loss 2.21361\n",
      "Runing in EPOC[38] Batch [160]\n",
      "loss 2.06996\n",
      "Runing in EPOC[38] Batch [170]\n",
      "loss 2.1245\n",
      "Runing in EPOC[38] Batch [180]\n",
      "loss 2.15354\n",
      "Runing in EPOC[38] Batch [190]\n",
      "loss 2.22072\n",
      "Runing in EPOC[39] Batch [10]\n",
      "loss 2.0616\n",
      "Runing in EPOC[39] Batch [20]\n",
      "loss 2.03005\n",
      "Runing in EPOC[39] Batch [30]\n",
      "loss 2.1111\n",
      "Runing in EPOC[39] Batch [40]\n",
      "loss 2.08961\n",
      "Runing in EPOC[39] Batch [50]\n",
      "loss 2.14467\n",
      "Runing in EPOC[39] Batch [60]\n",
      "loss 2.16017\n",
      "Runing in EPOC[39] Batch [70]\n",
      "loss 2.14822\n",
      "Runing in EPOC[39] Batch [80]\n",
      "loss 2.18603\n",
      "Runing in EPOC[39] Batch [90]\n",
      "loss 2.24082\n",
      "Runing in EPOC[39] Batch [100]\n",
      "loss 2.20323\n",
      "Runing in EPOC[39] Batch [110]\n",
      "loss 2.28389\n",
      "Runing in EPOC[39] Batch [120]\n",
      "loss 2.20097\n",
      "Runing in EPOC[39] Batch [130]\n",
      "loss 2.30118\n",
      "Runing in EPOC[39] Batch [140]\n",
      "loss 2.10968\n",
      "Runing in EPOC[39] Batch [150]\n",
      "loss 2.20391\n",
      "Runing in EPOC[39] Batch [160]\n",
      "loss 2.0595\n",
      "Runing in EPOC[39] Batch [170]\n",
      "loss 2.11659\n",
      "Runing in EPOC[39] Batch [180]\n",
      "loss 2.14469\n",
      "Runing in EPOC[39] Batch [190]\n",
      "loss 2.20954\n",
      "Runing in EPOC[40] Batch [10]\n",
      "loss 2.05696\n",
      "Runing in EPOC[40] Batch [20]\n",
      "loss 2.02608\n",
      "Runing in EPOC[40] Batch [30]\n",
      "loss 2.10531\n",
      "Runing in EPOC[40] Batch [40]\n",
      "loss 2.08325\n",
      "Runing in EPOC[40] Batch [50]\n",
      "loss 2.13779\n",
      "Runing in EPOC[40] Batch [60]\n",
      "loss 2.15248\n",
      "Runing in EPOC[40] Batch [70]\n",
      "loss 2.13755\n",
      "Runing in EPOC[40] Batch [80]\n",
      "loss 2.17666\n",
      "Runing in EPOC[40] Batch [90]\n",
      "loss 2.23313\n",
      "Runing in EPOC[40] Batch [100]\n",
      "loss 2.19367\n",
      "Runing in EPOC[40] Batch [110]\n",
      "loss 2.2787\n",
      "Runing in EPOC[40] Batch [120]\n",
      "loss 2.19339\n",
      "Runing in EPOC[40] Batch [130]\n",
      "loss 2.29489\n",
      "Runing in EPOC[40] Batch [140]\n",
      "loss 2.10307\n",
      "Runing in EPOC[40] Batch [150]\n",
      "loss 2.19632\n",
      "Runing in EPOC[40] Batch [160]\n",
      "loss 2.04985\n",
      "Runing in EPOC[40] Batch [170]\n",
      "loss 2.10954\n",
      "Runing in EPOC[40] Batch [180]\n",
      "loss 2.13749\n",
      "Runing in EPOC[40] Batch [190]\n",
      "loss 2.19933\n",
      "Runing in EPOC[41] Batch [10]\n",
      "loss 2.05419\n",
      "Runing in EPOC[41] Batch [20]\n",
      "loss 2.02203\n",
      "Runing in EPOC[41] Batch [30]\n",
      "loss 2.10027\n",
      "Runing in EPOC[41] Batch [40]\n",
      "loss 2.0766\n",
      "Runing in EPOC[41] Batch [50]\n",
      "loss 2.13198\n",
      "Runing in EPOC[41] Batch [60]\n",
      "loss 2.14575\n",
      "Runing in EPOC[41] Batch [70]\n",
      "loss 2.12995\n",
      "Runing in EPOC[41] Batch [80]\n",
      "loss 2.17028\n",
      "Runing in EPOC[41] Batch [90]\n",
      "loss 2.22647\n",
      "Runing in EPOC[41] Batch [100]\n",
      "loss 2.18542\n",
      "Runing in EPOC[41] Batch [110]\n",
      "loss 2.27405\n",
      "Runing in EPOC[41] Batch [120]\n",
      "loss 2.18644\n",
      "Runing in EPOC[41] Batch [130]\n",
      "loss 2.29106\n",
      "Runing in EPOC[41] Batch [140]\n",
      "loss 2.09585\n",
      "Runing in EPOC[41] Batch [150]\n",
      "loss 2.19017\n",
      "Runing in EPOC[41] Batch [160]\n",
      "loss 2.04301\n",
      "Runing in EPOC[41] Batch [170]\n",
      "loss 2.10278\n",
      "Runing in EPOC[41] Batch [180]\n",
      "loss 2.13295\n",
      "Runing in EPOC[41] Batch [190]\n",
      "loss 2.19017\n",
      "Runing in EPOC[42] Batch [10]\n",
      "loss 2.04858\n",
      "Runing in EPOC[42] Batch [20]\n",
      "loss 2.01528\n",
      "Runing in EPOC[42] Batch [30]\n",
      "loss 2.09443\n",
      "Runing in EPOC[42] Batch [40]\n",
      "loss 2.07049\n",
      "Runing in EPOC[42] Batch [50]\n",
      "loss 2.12635\n",
      "Runing in EPOC[42] Batch [60]\n",
      "loss 2.14232\n",
      "Runing in EPOC[42] Batch [70]\n",
      "loss 2.12527\n",
      "Runing in EPOC[42] Batch [80]\n",
      "loss 2.16469\n",
      "Runing in EPOC[42] Batch [90]\n",
      "loss 2.21995\n",
      "Runing in EPOC[42] Batch [100]\n",
      "loss 2.1806\n",
      "Runing in EPOC[42] Batch [110]\n",
      "loss 2.27201\n",
      "Runing in EPOC[42] Batch [120]\n",
      "loss 2.18318\n",
      "Runing in EPOC[42] Batch [130]\n",
      "loss 2.28867\n",
      "Runing in EPOC[42] Batch [140]\n",
      "loss 2.09407\n",
      "Runing in EPOC[42] Batch [150]\n",
      "loss 2.18675\n",
      "Runing in EPOC[42] Batch [160]\n",
      "loss 2.04184\n",
      "Runing in EPOC[42] Batch [170]\n",
      "loss 2.09678\n",
      "Runing in EPOC[42] Batch [180]\n",
      "loss 2.1291\n",
      "Runing in EPOC[42] Batch [190]\n",
      "loss 2.18269\n",
      "Runing in EPOC[43] Batch [10]\n",
      "loss 2.04199\n",
      "Runing in EPOC[43] Batch [20]\n",
      "loss 2.00971\n",
      "Runing in EPOC[43] Batch [30]\n",
      "loss 2.08813\n",
      "Runing in EPOC[43] Batch [40]\n",
      "loss 2.06705\n",
      "Runing in EPOC[43] Batch [50]\n",
      "loss 2.12502\n",
      "Runing in EPOC[43] Batch [60]\n",
      "loss 2.14173\n",
      "Runing in EPOC[43] Batch [70]\n",
      "loss 2.12383\n",
      "Runing in EPOC[43] Batch [80]\n",
      "loss 2.16285\n",
      "Runing in EPOC[43] Batch [90]\n",
      "loss 2.21758\n",
      "Runing in EPOC[43] Batch [100]\n",
      "loss 2.18344\n",
      "Runing in EPOC[43] Batch [110]\n",
      "loss 2.27075\n",
      "Runing in EPOC[43] Batch [120]\n",
      "loss 2.18841\n",
      "Runing in EPOC[43] Batch [130]\n",
      "loss 2.29445\n",
      "Runing in EPOC[43] Batch [140]\n",
      "loss 2.10125\n",
      "Runing in EPOC[43] Batch [150]\n",
      "loss 2.18861\n",
      "Runing in EPOC[43] Batch [160]\n",
      "loss 2.04433\n",
      "Runing in EPOC[43] Batch [170]\n",
      "loss 2.09418\n",
      "Runing in EPOC[43] Batch [180]\n",
      "loss 2.12647\n",
      "Runing in EPOC[43] Batch [190]\n",
      "loss 2.18083\n",
      "Runing in EPOC[44] Batch [10]\n",
      "loss 2.03772\n",
      "Runing in EPOC[44] Batch [20]\n",
      "loss 2.01175\n",
      "Runing in EPOC[44] Batch [30]\n",
      "loss 2.0913\n",
      "Runing in EPOC[44] Batch [40]\n",
      "loss 2.07472\n",
      "Runing in EPOC[44] Batch [50]\n",
      "loss 2.1335\n",
      "Runing in EPOC[44] Batch [60]\n",
      "loss 2.14513\n",
      "Runing in EPOC[44] Batch [70]\n",
      "loss 2.12393\n",
      "Runing in EPOC[44] Batch [80]\n",
      "loss 2.16026\n",
      "Runing in EPOC[44] Batch [90]\n",
      "loss 2.21863\n",
      "Runing in EPOC[44] Batch [100]\n",
      "loss 2.19186\n",
      "Runing in EPOC[44] Batch [110]\n",
      "loss 2.26911\n",
      "Runing in EPOC[44] Batch [120]\n",
      "loss 2.19574\n",
      "Runing in EPOC[44] Batch [130]\n",
      "loss 2.29425\n",
      "Runing in EPOC[44] Batch [140]\n",
      "loss 2.11476\n",
      "Runing in EPOC[44] Batch [150]\n",
      "loss 2.19027\n",
      "Runing in EPOC[44] Batch [160]\n",
      "loss 2.05032\n",
      "Runing in EPOC[44] Batch [170]\n",
      "loss 2.09949\n",
      "Runing in EPOC[44] Batch [180]\n",
      "loss 2.12589\n",
      "Runing in EPOC[44] Batch [190]\n",
      "loss 2.18822\n",
      "Runing in EPOC[45] Batch [10]\n",
      "loss 2.04231\n",
      "Runing in EPOC[45] Batch [20]\n",
      "loss 2.01903\n",
      "Runing in EPOC[45] Batch [30]\n",
      "loss 2.10921\n",
      "Runing in EPOC[45] Batch [40]\n",
      "loss 2.07921\n",
      "Runing in EPOC[45] Batch [50]\n",
      "loss 2.13856\n",
      "Runing in EPOC[45] Batch [60]\n",
      "loss 2.14191\n",
      "Runing in EPOC[45] Batch [70]\n",
      "loss 2.12674\n",
      "Runing in EPOC[45] Batch [80]\n",
      "loss 2.16234\n",
      "Runing in EPOC[45] Batch [90]\n",
      "loss 2.22054\n",
      "Runing in EPOC[45] Batch [100]\n",
      "loss 2.19487\n",
      "Runing in EPOC[45] Batch [110]\n",
      "loss 2.26752\n",
      "Runing in EPOC[45] Batch [120]\n",
      "loss 2.19396\n",
      "Runing in EPOC[45] Batch [130]\n",
      "loss 2.29293\n",
      "Runing in EPOC[45] Batch [140]\n",
      "loss 2.10296\n",
      "Runing in EPOC[45] Batch [150]\n",
      "loss 2.19286\n",
      "Runing in EPOC[45] Batch [160]\n",
      "loss 2.04783\n",
      "Runing in EPOC[45] Batch [170]\n",
      "loss 2.10436\n",
      "Runing in EPOC[45] Batch [180]\n",
      "loss 2.1266\n",
      "Runing in EPOC[45] Batch [190]\n",
      "loss 2.18846\n",
      "Runing in EPOC[46] Batch [10]\n",
      "loss 2.04215\n",
      "Runing in EPOC[46] Batch [20]\n",
      "loss 2.02182\n",
      "Runing in EPOC[46] Batch [30]\n",
      "loss 2.11027\n",
      "Runing in EPOC[46] Batch [40]\n",
      "loss 2.08348\n",
      "Runing in EPOC[46] Batch [50]\n",
      "loss 2.13879\n",
      "Runing in EPOC[46] Batch [60]\n",
      "loss 2.14153\n",
      "Runing in EPOC[46] Batch [70]\n",
      "loss 2.13443\n",
      "Runing in EPOC[46] Batch [80]\n",
      "loss 2.161\n",
      "Runing in EPOC[46] Batch [90]\n",
      "loss 2.21056\n",
      "Runing in EPOC[46] Batch [100]\n",
      "loss 2.18338\n",
      "Runing in EPOC[46] Batch [110]\n",
      "loss 2.25765\n",
      "Runing in EPOC[46] Batch [120]\n",
      "loss 2.17714\n",
      "Runing in EPOC[46] Batch [130]\n",
      "loss 2.28182\n",
      "Runing in EPOC[46] Batch [140]\n",
      "loss 2.08366\n",
      "Runing in EPOC[46] Batch [150]\n",
      "loss 2.18266\n",
      "Runing in EPOC[46] Batch [160]\n",
      "loss 2.03326\n",
      "Runing in EPOC[46] Batch [170]\n",
      "loss 2.08949\n",
      "Runing in EPOC[46] Batch [180]\n",
      "loss 2.11559\n",
      "Runing in EPOC[46] Batch [190]\n",
      "loss 2.16479\n",
      "Runing in EPOC[47] Batch [10]\n",
      "loss 2.02901\n",
      "Runing in EPOC[47] Batch [20]\n",
      "loss 2.00836\n",
      "Runing in EPOC[47] Batch [30]\n",
      "loss 2.09405\n",
      "Runing in EPOC[47] Batch [40]\n",
      "loss 2.0639\n",
      "Runing in EPOC[47] Batch [50]\n",
      "loss 2.12489\n",
      "Runing in EPOC[47] Batch [60]\n",
      "loss 2.12202\n",
      "Runing in EPOC[47] Batch [70]\n",
      "loss 2.11621\n",
      "Runing in EPOC[47] Batch [80]\n",
      "loss 2.14788\n",
      "Runing in EPOC[47] Batch [90]\n",
      "loss 2.19575\n",
      "Runing in EPOC[47] Batch [100]\n",
      "loss 2.17062\n",
      "Runing in EPOC[47] Batch [110]\n",
      "loss 2.24363\n",
      "Runing in EPOC[47] Batch [120]\n",
      "loss 2.16115\n",
      "Runing in EPOC[47] Batch [130]\n",
      "loss 2.26367\n",
      "Runing in EPOC[47] Batch [140]\n",
      "loss 2.07438\n",
      "Runing in EPOC[47] Batch [150]\n",
      "loss 2.16661\n",
      "Runing in EPOC[47] Batch [160]\n",
      "loss 2.02496\n",
      "Runing in EPOC[47] Batch [170]\n",
      "loss 2.06994\n",
      "Runing in EPOC[47] Batch [180]\n",
      "loss 2.10556\n",
      "Runing in EPOC[47] Batch [190]\n",
      "loss 2.15323\n",
      "Runing in EPOC[48] Batch [10]\n",
      "loss 2.02059\n",
      "Runing in EPOC[48] Batch [20]\n",
      "loss 1.99436\n",
      "Runing in EPOC[48] Batch [30]\n",
      "loss 2.07901\n",
      "Runing in EPOC[48] Batch [40]\n",
      "loss 2.04708\n",
      "Runing in EPOC[48] Batch [50]\n",
      "loss 2.11214\n",
      "Runing in EPOC[48] Batch [60]\n",
      "loss 2.11141\n",
      "Runing in EPOC[48] Batch [70]\n",
      "loss 2.1021\n",
      "Runing in EPOC[48] Batch [80]\n",
      "loss 2.13071\n",
      "Runing in EPOC[48] Batch [90]\n",
      "loss 2.18611\n",
      "Runing in EPOC[48] Batch [100]\n",
      "loss 2.16173\n",
      "Runing in EPOC[48] Batch [110]\n",
      "loss 2.23579\n",
      "Runing in EPOC[48] Batch [120]\n",
      "loss 2.15254\n",
      "Runing in EPOC[48] Batch [130]\n",
      "loss 2.25179\n",
      "Runing in EPOC[48] Batch [140]\n",
      "loss 2.06609\n",
      "Runing in EPOC[48] Batch [150]\n",
      "loss 2.15512\n",
      "Runing in EPOC[48] Batch [160]\n",
      "loss 2.01683\n",
      "Runing in EPOC[48] Batch [170]\n",
      "loss 2.06007\n",
      "Runing in EPOC[48] Batch [180]\n",
      "loss 2.09759\n",
      "Runing in EPOC[48] Batch [190]\n",
      "loss 2.15084\n",
      "Runing in EPOC[49] Batch [10]\n",
      "loss 2.00821\n",
      "Runing in EPOC[49] Batch [20]\n",
      "loss 1.98272\n",
      "Runing in EPOC[49] Batch [30]\n",
      "loss 2.06609\n",
      "Runing in EPOC[49] Batch [40]\n",
      "loss 2.03904\n",
      "Runing in EPOC[49] Batch [50]\n",
      "loss 2.09984\n",
      "Runing in EPOC[49] Batch [60]\n",
      "loss 2.10554\n",
      "Runing in EPOC[49] Batch [70]\n",
      "loss 2.09479\n",
      "Runing in EPOC[49] Batch [80]\n",
      "loss 2.12367\n",
      "Runing in EPOC[49] Batch [90]\n",
      "loss 2.17763\n",
      "Runing in EPOC[49] Batch [100]\n",
      "loss 2.15205\n",
      "Runing in EPOC[49] Batch [110]\n",
      "loss 2.2263\n",
      "Runing in EPOC[49] Batch [120]\n",
      "loss 2.14599\n",
      "Runing in EPOC[49] Batch [130]\n",
      "loss 2.24405\n",
      "Runing in EPOC[49] Batch [140]\n",
      "loss 2.05655\n",
      "Runing in EPOC[49] Batch [150]\n",
      "loss 2.14629\n",
      "Runing in EPOC[49] Batch [160]\n",
      "loss 2.00477\n",
      "Runing in EPOC[49] Batch [170]\n",
      "loss 2.0538\n",
      "Runing in EPOC[49] Batch [180]\n",
      "loss 2.08644\n",
      "Runing in EPOC[49] Batch [190]\n",
      "loss 2.14758\n",
      "Runing in EPOC[50] Batch [10]\n",
      "loss 1.99885\n",
      "Runing in EPOC[50] Batch [20]\n",
      "loss 1.97599\n",
      "Runing in EPOC[50] Batch [30]\n",
      "loss 2.05304\n",
      "Runing in EPOC[50] Batch [40]\n",
      "loss 2.03438\n",
      "Runing in EPOC[50] Batch [50]\n",
      "loss 2.0895\n",
      "Runing in EPOC[50] Batch [60]\n",
      "loss 2.09883\n",
      "Runing in EPOC[50] Batch [70]\n",
      "loss 2.09003\n",
      "Runing in EPOC[50] Batch [80]\n",
      "loss 2.11965\n",
      "Runing in EPOC[50] Batch [90]\n",
      "loss 2.17028\n",
      "Runing in EPOC[50] Batch [100]\n",
      "loss 2.14911\n",
      "Runing in EPOC[50] Batch [110]\n",
      "loss 2.21794\n",
      "Runing in EPOC[50] Batch [120]\n",
      "loss 2.14079\n",
      "Runing in EPOC[50] Batch [130]\n",
      "loss 2.23792\n",
      "Runing in EPOC[50] Batch [140]\n",
      "loss 2.0496\n",
      "Runing in EPOC[50] Batch [150]\n",
      "loss 2.13847\n",
      "Runing in EPOC[50] Batch [160]\n",
      "loss 1.99719\n",
      "Runing in EPOC[50] Batch [170]\n",
      "loss 2.04804\n",
      "Runing in EPOC[50] Batch [180]\n",
      "loss 2.07977\n",
      "Runing in EPOC[50] Batch [190]\n",
      "loss 2.14173\n",
      "Runing in EPOC[51] Batch [10]\n",
      "loss 1.9916\n",
      "Runing in EPOC[51] Batch [20]\n",
      "loss 1.968\n",
      "Runing in EPOC[51] Batch [30]\n",
      "loss 2.04276\n",
      "Runing in EPOC[51] Batch [40]\n",
      "loss 2.02803\n",
      "Runing in EPOC[51] Batch [50]\n",
      "loss 2.08612\n",
      "Runing in EPOC[51] Batch [60]\n",
      "loss 2.09255\n",
      "Runing in EPOC[51] Batch [70]\n",
      "loss 2.08679\n",
      "Runing in EPOC[51] Batch [80]\n",
      "loss 2.1198\n",
      "Runing in EPOC[51] Batch [90]\n",
      "loss 2.16679\n",
      "Runing in EPOC[51] Batch [100]\n",
      "loss 2.15033\n",
      "Runing in EPOC[51] Batch [110]\n",
      "loss 2.21287\n",
      "Runing in EPOC[51] Batch [120]\n",
      "loss 2.13596\n",
      "Runing in EPOC[51] Batch [130]\n",
      "loss 2.23576\n",
      "Runing in EPOC[51] Batch [140]\n",
      "loss 2.04761\n",
      "Runing in EPOC[51] Batch [150]\n",
      "loss 2.13563\n",
      "Runing in EPOC[51] Batch [160]\n",
      "loss 1.99437\n",
      "Runing in EPOC[51] Batch [170]\n",
      "loss 2.04196\n",
      "Runing in EPOC[51] Batch [180]\n",
      "loss 2.0759\n",
      "Runing in EPOC[51] Batch [190]\n",
      "loss 2.1314\n",
      "Runing in EPOC[52] Batch [10]\n",
      "loss 1.99085\n",
      "Runing in EPOC[52] Batch [20]\n",
      "loss 1.96097\n",
      "Runing in EPOC[52] Batch [30]\n",
      "loss 2.04019\n",
      "Runing in EPOC[52] Batch [40]\n",
      "loss 2.01946\n",
      "Runing in EPOC[52] Batch [50]\n",
      "loss 2.0894\n",
      "Runing in EPOC[52] Batch [60]\n",
      "loss 2.09044\n",
      "Runing in EPOC[52] Batch [70]\n",
      "loss 2.09182\n",
      "Runing in EPOC[52] Batch [80]\n",
      "loss 2.12508\n",
      "Runing in EPOC[52] Batch [90]\n",
      "loss 2.17439\n",
      "Runing in EPOC[52] Batch [100]\n",
      "loss 2.15317\n",
      "Runing in EPOC[52] Batch [110]\n",
      "loss 2.22346\n",
      "Runing in EPOC[52] Batch [120]\n",
      "loss 2.13865\n",
      "Runing in EPOC[52] Batch [130]\n",
      "loss 2.24721\n",
      "Runing in EPOC[52] Batch [140]\n",
      "loss 2.05623\n",
      "Runing in EPOC[52] Batch [150]\n",
      "loss 2.14453\n",
      "Runing in EPOC[52] Batch [160]\n",
      "loss 2.00017\n",
      "Runing in EPOC[52] Batch [170]\n",
      "loss 2.04744\n",
      "Runing in EPOC[52] Batch [180]\n",
      "loss 2.08129\n",
      "Runing in EPOC[52] Batch [190]\n",
      "loss 2.12419\n",
      "Runing in EPOC[53] Batch [10]\n",
      "loss 1.99021\n",
      "Runing in EPOC[53] Batch [20]\n",
      "loss 1.97789\n",
      "Runing in EPOC[53] Batch [30]\n",
      "loss 2.02955\n",
      "Runing in EPOC[53] Batch [40]\n",
      "loss 2.02532\n",
      "Runing in EPOC[53] Batch [50]\n",
      "loss 2.07987\n",
      "Runing in EPOC[53] Batch [60]\n",
      "loss 2.10449\n",
      "Runing in EPOC[53] Batch [70]\n",
      "loss 2.07689\n",
      "Runing in EPOC[53] Batch [80]\n",
      "loss 2.1385\n",
      "Runing in EPOC[53] Batch [90]\n",
      "loss 2.18049\n",
      "Runing in EPOC[53] Batch [100]\n",
      "loss 2.1586\n",
      "Runing in EPOC[53] Batch [110]\n",
      "loss 2.21343\n",
      "Runing in EPOC[53] Batch [120]\n",
      "loss 2.13483\n",
      "Runing in EPOC[53] Batch [130]\n",
      "loss 2.23625\n",
      "Runing in EPOC[53] Batch [140]\n",
      "loss 2.07086\n",
      "Runing in EPOC[53] Batch [150]\n",
      "loss 2.1537\n",
      "Runing in EPOC[53] Batch [160]\n",
      "loss 2.01005\n",
      "Runing in EPOC[53] Batch [170]\n",
      "loss 2.05084\n",
      "Runing in EPOC[53] Batch [180]\n",
      "loss 2.08212\n",
      "Runing in EPOC[53] Batch [190]\n",
      "loss 2.12928\n",
      "Runing in EPOC[54] Batch [10]\n",
      "loss 1.99924\n",
      "Runing in EPOC[54] Batch [20]\n",
      "loss 1.98899\n",
      "Runing in EPOC[54] Batch [30]\n",
      "loss 2.06905\n",
      "Runing in EPOC[54] Batch [40]\n",
      "loss 2.02632\n",
      "Runing in EPOC[54] Batch [50]\n",
      "loss 2.08467\n",
      "Runing in EPOC[54] Batch [60]\n",
      "loss 2.08429\n",
      "Runing in EPOC[54] Batch [70]\n",
      "loss 2.08992\n",
      "Runing in EPOC[54] Batch [80]\n",
      "loss 2.12502\n",
      "Runing in EPOC[54] Batch [90]\n",
      "loss 2.1748\n",
      "Runing in EPOC[54] Batch [100]\n",
      "loss 2.16207\n",
      "Runing in EPOC[54] Batch [110]\n",
      "loss 2.21075\n",
      "Runing in EPOC[54] Batch [120]\n",
      "loss 2.11792\n",
      "Runing in EPOC[54] Batch [130]\n",
      "loss 2.21756\n",
      "Runing in EPOC[54] Batch [140]\n",
      "loss 2.05398\n",
      "Runing in EPOC[54] Batch [150]\n",
      "loss 2.14481\n",
      "Runing in EPOC[54] Batch [160]\n",
      "loss 2.01107\n",
      "Runing in EPOC[54] Batch [170]\n",
      "loss 2.04397\n",
      "Runing in EPOC[54] Batch [180]\n",
      "loss 2.07513\n",
      "Runing in EPOC[54] Batch [190]\n",
      "loss 2.12326\n",
      "Runing in EPOC[55] Batch [10]\n",
      "loss 2.00334\n",
      "Runing in EPOC[55] Batch [20]\n",
      "loss 1.97082\n",
      "Runing in EPOC[55] Batch [30]\n",
      "loss 2.06152\n",
      "Runing in EPOC[55] Batch [40]\n",
      "loss 2.03238\n",
      "Runing in EPOC[55] Batch [50]\n",
      "loss 2.07402\n",
      "Runing in EPOC[55] Batch [60]\n",
      "loss 2.07826\n",
      "Runing in EPOC[55] Batch [70]\n",
      "loss 2.07365\n",
      "Runing in EPOC[55] Batch [80]\n",
      "loss 2.11905\n",
      "Runing in EPOC[55] Batch [90]\n",
      "loss 2.1679\n",
      "Runing in EPOC[55] Batch [100]\n",
      "loss 2.1493\n",
      "Runing in EPOC[55] Batch [110]\n",
      "loss 2.21661\n",
      "Runing in EPOC[55] Batch [120]\n",
      "loss 2.12637\n",
      "Runing in EPOC[55] Batch [130]\n",
      "loss 2.21546\n",
      "Runing in EPOC[55] Batch [140]\n",
      "loss 2.04329\n",
      "Runing in EPOC[55] Batch [150]\n",
      "loss 2.13369\n",
      "Runing in EPOC[55] Batch [160]\n",
      "loss 2.00227\n",
      "Runing in EPOC[55] Batch [170]\n",
      "loss 2.03975\n",
      "Runing in EPOC[55] Batch [180]\n",
      "loss 2.06556\n",
      "Runing in EPOC[55] Batch [190]\n",
      "loss 2.11832\n",
      "Runing in EPOC[56] Batch [10]\n",
      "loss 2.00508\n",
      "Runing in EPOC[56] Batch [20]\n",
      "loss 1.96578\n",
      "Runing in EPOC[56] Batch [30]\n",
      "loss 2.04082\n",
      "Runing in EPOC[56] Batch [40]\n",
      "loss 2.0205\n",
      "Runing in EPOC[56] Batch [50]\n",
      "loss 2.06957\n",
      "Runing in EPOC[56] Batch [60]\n",
      "loss 2.07025\n",
      "Runing in EPOC[56] Batch [70]\n",
      "loss 2.05755\n",
      "Runing in EPOC[56] Batch [80]\n",
      "loss 2.10205\n",
      "Runing in EPOC[56] Batch [90]\n",
      "loss 2.15638\n",
      "Runing in EPOC[56] Batch [100]\n",
      "loss 2.13225\n",
      "Runing in EPOC[56] Batch [110]\n",
      "loss 2.19899\n",
      "Runing in EPOC[56] Batch [120]\n",
      "loss 2.13265\n",
      "Runing in EPOC[56] Batch [130]\n",
      "loss 2.23001\n",
      "Runing in EPOC[56] Batch [140]\n",
      "loss 2.05324\n",
      "Runing in EPOC[56] Batch [150]\n",
      "loss 2.1339\n",
      "Runing in EPOC[56] Batch [160]\n",
      "loss 1.99683\n",
      "Runing in EPOC[56] Batch [170]\n",
      "loss 2.03492\n",
      "Runing in EPOC[56] Batch [180]\n",
      "loss 2.06633\n",
      "Runing in EPOC[56] Batch [190]\n",
      "loss 2.11806\n",
      "Runing in EPOC[57] Batch [10]\n",
      "loss 1.99324\n",
      "Runing in EPOC[57] Batch [20]\n",
      "loss 1.9591\n",
      "Runing in EPOC[57] Batch [30]\n",
      "loss 2.03392\n",
      "Runing in EPOC[57] Batch [40]\n",
      "loss 2.01277\n",
      "Runing in EPOC[57] Batch [50]\n",
      "loss 2.06727\n",
      "Runing in EPOC[57] Batch [60]\n",
      "loss 2.07649\n",
      "Runing in EPOC[57] Batch [70]\n",
      "loss 2.0511\n",
      "Runing in EPOC[57] Batch [80]\n",
      "loss 2.09004\n",
      "Runing in EPOC[57] Batch [90]\n",
      "loss 2.14135\n",
      "Runing in EPOC[57] Batch [100]\n",
      "loss 2.12053\n",
      "Runing in EPOC[57] Batch [110]\n",
      "loss 2.18505\n",
      "Runing in EPOC[57] Batch [120]\n",
      "loss 2.11423\n",
      "Runing in EPOC[57] Batch [130]\n",
      "loss 2.22153\n",
      "Runing in EPOC[57] Batch [140]\n",
      "loss 2.05762\n",
      "Runing in EPOC[57] Batch [150]\n",
      "loss 2.13257\n",
      "Runing in EPOC[57] Batch [160]\n",
      "loss 1.99362\n",
      "Runing in EPOC[57] Batch [170]\n",
      "loss 2.02963\n",
      "Runing in EPOC[57] Batch [180]\n",
      "loss 2.06247\n",
      "Runing in EPOC[57] Batch [190]\n",
      "loss 2.115\n",
      "Runing in EPOC[58] Batch [10]\n",
      "loss 1.98966\n",
      "Runing in EPOC[58] Batch [20]\n",
      "loss 1.96399\n",
      "Runing in EPOC[58] Batch [30]\n",
      "loss 2.03549\n",
      "Runing in EPOC[58] Batch [40]\n",
      "loss 2.01145\n",
      "Runing in EPOC[58] Batch [50]\n",
      "loss 2.05764\n",
      "Runing in EPOC[58] Batch [60]\n",
      "loss 2.07282\n",
      "Runing in EPOC[58] Batch [70]\n",
      "loss 2.05179\n",
      "Runing in EPOC[58] Batch [80]\n",
      "loss 2.08556\n",
      "Runing in EPOC[58] Batch [90]\n",
      "loss 2.13571\n",
      "Runing in EPOC[58] Batch [100]\n",
      "loss 2.11617\n",
      "Runing in EPOC[58] Batch [110]\n",
      "loss 2.18152\n",
      "Runing in EPOC[58] Batch [120]\n",
      "loss 2.10212\n",
      "Runing in EPOC[58] Batch [130]\n",
      "loss 2.19802\n",
      "Runing in EPOC[58] Batch [140]\n",
      "loss 2.0413\n",
      "Runing in EPOC[58] Batch [150]\n",
      "loss 2.12357\n",
      "Runing in EPOC[58] Batch [160]\n",
      "loss 1.98682\n",
      "Runing in EPOC[58] Batch [170]\n",
      "loss 2.02815\n",
      "Runing in EPOC[58] Batch [180]\n",
      "loss 2.05678\n",
      "Runing in EPOC[58] Batch [190]\n",
      "loss 2.11282\n",
      "Runing in EPOC[59] Batch [10]\n",
      "loss 1.98116\n",
      "Runing in EPOC[59] Batch [20]\n",
      "loss 1.96354\n",
      "Runing in EPOC[59] Batch [30]\n",
      "loss 2.03422\n",
      "Runing in EPOC[59] Batch [40]\n",
      "loss 2.01885\n",
      "Runing in EPOC[59] Batch [50]\n",
      "loss 2.05783\n",
      "Runing in EPOC[59] Batch [60]\n",
      "loss 2.0649\n",
      "Runing in EPOC[59] Batch [70]\n",
      "loss 2.05137\n",
      "Runing in EPOC[59] Batch [80]\n",
      "loss 2.08908\n",
      "Runing in EPOC[59] Batch [90]\n",
      "loss 2.13225\n",
      "Runing in EPOC[59] Batch [100]\n",
      "loss 2.11326\n",
      "Runing in EPOC[59] Batch [110]\n",
      "loss 2.18342\n",
      "Runing in EPOC[59] Batch [120]\n",
      "loss 2.09581\n",
      "Runing in EPOC[59] Batch [130]\n",
      "loss 2.18516\n",
      "Runing in EPOC[59] Batch [140]\n",
      "loss 2.02857\n",
      "Runing in EPOC[59] Batch [150]\n",
      "loss 2.11377\n",
      "Runing in EPOC[59] Batch [160]\n",
      "loss 1.98637\n",
      "Runing in EPOC[59] Batch [170]\n",
      "loss 2.03162\n",
      "Runing in EPOC[59] Batch [180]\n",
      "loss 2.0577\n",
      "Runing in EPOC[59] Batch [190]\n",
      "loss 2.10729\n",
      "Runing in EPOC[60] Batch [10]\n",
      "loss 1.96636\n",
      "Runing in EPOC[60] Batch [20]\n",
      "loss 1.95058\n",
      "Runing in EPOC[60] Batch [30]\n",
      "loss 2.01705\n",
      "Runing in EPOC[60] Batch [40]\n",
      "loss 2.01167\n",
      "Runing in EPOC[60] Batch [50]\n",
      "loss 2.06072\n",
      "Runing in EPOC[60] Batch [60]\n",
      "loss 2.07448\n",
      "Runing in EPOC[60] Batch [70]\n",
      "loss 2.05653\n",
      "Runing in EPOC[60] Batch [80]\n",
      "loss 2.08612\n",
      "Runing in EPOC[60] Batch [90]\n",
      "loss 2.13579\n",
      "Runing in EPOC[60] Batch [100]\n",
      "loss 2.10863\n",
      "Runing in EPOC[60] Batch [110]\n",
      "loss 2.1829\n",
      "Runing in EPOC[60] Batch [120]\n",
      "loss 2.10113\n",
      "Runing in EPOC[60] Batch [130]\n",
      "loss 2.18635\n",
      "Runing in EPOC[60] Batch [140]\n",
      "loss 2.02233\n",
      "Runing in EPOC[60] Batch [150]\n",
      "loss 2.10686\n",
      "Runing in EPOC[60] Batch [160]\n",
      "loss 1.98802\n",
      "Runing in EPOC[60] Batch [170]\n",
      "loss 2.03706\n",
      "Runing in EPOC[60] Batch [180]\n",
      "loss 2.06371\n",
      "Runing in EPOC[60] Batch [190]\n",
      "loss 2.11798\n",
      "Runing in EPOC[61] Batch [10]\n",
      "loss 1.95996\n",
      "Runing in EPOC[61] Batch [20]\n",
      "loss 1.94104\n",
      "Runing in EPOC[61] Batch [30]\n",
      "loss 2.01847\n",
      "Runing in EPOC[61] Batch [40]\n",
      "loss 2.00719\n",
      "Runing in EPOC[61] Batch [50]\n",
      "loss 2.06203\n",
      "Runing in EPOC[61] Batch [60]\n",
      "loss 2.08675\n",
      "Runing in EPOC[61] Batch [70]\n",
      "loss 2.07067\n",
      "Runing in EPOC[61] Batch [80]\n",
      "loss 2.09231\n",
      "Runing in EPOC[61] Batch [90]\n",
      "loss 2.14824\n",
      "Runing in EPOC[61] Batch [100]\n",
      "loss 2.11792\n",
      "Runing in EPOC[61] Batch [110]\n",
      "loss 2.18223\n",
      "Runing in EPOC[61] Batch [120]\n",
      "loss 2.0988\n",
      "Runing in EPOC[61] Batch [130]\n",
      "loss 2.19532\n",
      "Runing in EPOC[61] Batch [140]\n",
      "loss 2.02115\n",
      "Runing in EPOC[61] Batch [150]\n",
      "loss 2.10581\n",
      "Runing in EPOC[61] Batch [160]\n",
      "loss 1.9782\n",
      "Runing in EPOC[61] Batch [170]\n",
      "loss 2.02666\n",
      "Runing in EPOC[61] Batch [180]\n",
      "loss 2.04818\n",
      "Runing in EPOC[61] Batch [190]\n",
      "loss 2.09922\n",
      "Runing in EPOC[62] Batch [10]\n",
      "loss 1.95674\n",
      "Runing in EPOC[62] Batch [20]\n",
      "loss 1.93951\n",
      "Runing in EPOC[62] Batch [30]\n",
      "loss 2.01238\n",
      "Runing in EPOC[62] Batch [40]\n",
      "loss 2.00959\n",
      "Runing in EPOC[62] Batch [50]\n",
      "loss 2.06037\n",
      "Runing in EPOC[62] Batch [60]\n",
      "loss 2.07988\n",
      "Runing in EPOC[62] Batch [70]\n",
      "loss 2.05994\n",
      "Runing in EPOC[62] Batch [80]\n",
      "loss 2.09383\n",
      "Runing in EPOC[62] Batch [90]\n",
      "loss 2.15022\n",
      "Runing in EPOC[62] Batch [100]\n",
      "loss 2.11663\n",
      "Runing in EPOC[62] Batch [110]\n",
      "loss 2.18446\n",
      "Runing in EPOC[62] Batch [120]\n",
      "loss 2.10208\n",
      "Runing in EPOC[62] Batch [130]\n",
      "loss 2.18895\n",
      "Runing in EPOC[62] Batch [140]\n",
      "loss 2.01916\n",
      "Runing in EPOC[62] Batch [150]\n",
      "loss 2.10828\n",
      "Runing in EPOC[62] Batch [160]\n",
      "loss 1.97938\n",
      "Runing in EPOC[62] Batch [170]\n",
      "loss 2.02078\n",
      "Runing in EPOC[62] Batch [180]\n",
      "loss 2.0447\n",
      "Runing in EPOC[62] Batch [190]\n",
      "loss 2.08996\n",
      "Runing in EPOC[63] Batch [10]\n",
      "loss 1.96615\n",
      "Runing in EPOC[63] Batch [20]\n",
      "loss 1.93473\n",
      "Runing in EPOC[63] Batch [30]\n",
      "loss 1.99565\n",
      "Runing in EPOC[63] Batch [40]\n",
      "loss 1.99039\n",
      "Runing in EPOC[63] Batch [50]\n",
      "loss 2.05427\n",
      "Runing in EPOC[63] Batch [60]\n",
      "loss 2.07207\n",
      "Runing in EPOC[63] Batch [70]\n",
      "loss 2.05564\n",
      "Runing in EPOC[63] Batch [80]\n",
      "loss 2.08653\n",
      "Runing in EPOC[63] Batch [90]\n",
      "loss 2.13476\n",
      "Runing in EPOC[63] Batch [100]\n",
      "loss 2.10371\n",
      "Runing in EPOC[63] Batch [110]\n",
      "loss 2.1662\n",
      "Runing in EPOC[63] Batch [120]\n",
      "loss 2.0949\n",
      "Runing in EPOC[63] Batch [130]\n",
      "loss 2.18537\n",
      "Runing in EPOC[63] Batch [140]\n",
      "loss 2.02076\n",
      "Runing in EPOC[63] Batch [150]\n",
      "loss 2.09745\n",
      "Runing in EPOC[63] Batch [160]\n",
      "loss 1.96953\n",
      "Runing in EPOC[63] Batch [170]\n",
      "loss 2.01617\n",
      "Runing in EPOC[63] Batch [180]\n",
      "loss 2.04437\n",
      "Runing in EPOC[63] Batch [190]\n",
      "loss 2.0908\n",
      "Runing in EPOC[64] Batch [10]\n",
      "loss 1.96259\n",
      "Runing in EPOC[64] Batch [20]\n",
      "loss 1.93665\n",
      "Runing in EPOC[64] Batch [30]\n",
      "loss 1.99148\n",
      "Runing in EPOC[64] Batch [40]\n",
      "loss 1.9797\n",
      "Runing in EPOC[64] Batch [50]\n",
      "loss 2.03896\n",
      "Runing in EPOC[64] Batch [60]\n",
      "loss 2.05863\n",
      "Runing in EPOC[64] Batch [70]\n",
      "loss 2.05481\n",
      "Runing in EPOC[64] Batch [80]\n",
      "loss 2.08362\n",
      "Runing in EPOC[64] Batch [90]\n",
      "loss 2.13246\n",
      "Runing in EPOC[64] Batch [100]\n",
      "loss 2.09894\n",
      "Runing in EPOC[64] Batch [110]\n",
      "loss 2.15951\n",
      "Runing in EPOC[64] Batch [120]\n",
      "loss 2.08729\n",
      "Runing in EPOC[64] Batch [130]\n",
      "loss 2.18128\n",
      "Runing in EPOC[64] Batch [140]\n",
      "loss 2.02292\n",
      "Runing in EPOC[64] Batch [150]\n",
      "loss 2.09713\n",
      "Runing in EPOC[64] Batch [160]\n",
      "loss 1.95457\n",
      "Runing in EPOC[64] Batch [170]\n",
      "loss 2.00043\n",
      "Runing in EPOC[64] Batch [180]\n",
      "loss 2.03634\n",
      "Runing in EPOC[64] Batch [190]\n",
      "loss 2.08587\n",
      "Runing in EPOC[65] Batch [10]\n",
      "loss 1.94785\n",
      "Runing in EPOC[65] Batch [20]\n",
      "loss 1.92496\n",
      "Runing in EPOC[65] Batch [30]\n",
      "loss 1.98455\n",
      "Runing in EPOC[65] Batch [40]\n",
      "loss 1.97679\n",
      "Runing in EPOC[65] Batch [50]\n",
      "loss 2.03076\n",
      "Runing in EPOC[65] Batch [60]\n",
      "loss 2.04666\n",
      "Runing in EPOC[65] Batch [70]\n",
      "loss 2.05243\n",
      "Runing in EPOC[65] Batch [80]\n",
      "loss 2.08507\n",
      "Runing in EPOC[65] Batch [90]\n",
      "loss 2.1319\n",
      "Runing in EPOC[65] Batch [100]\n",
      "loss 2.10144\n",
      "Runing in EPOC[65] Batch [110]\n",
      "loss 2.15448\n",
      "Runing in EPOC[65] Batch [120]\n",
      "loss 2.07895\n",
      "Runing in EPOC[65] Batch [130]\n",
      "loss 2.1669\n",
      "Runing in EPOC[65] Batch [140]\n",
      "loss 2.00993\n",
      "Runing in EPOC[65] Batch [150]\n",
      "loss 2.09164\n",
      "Runing in EPOC[65] Batch [160]\n",
      "loss 1.94949\n",
      "Runing in EPOC[65] Batch [170]\n",
      "loss 1.99284\n",
      "Runing in EPOC[65] Batch [180]\n",
      "loss 2.02914\n",
      "Runing in EPOC[65] Batch [190]\n",
      "loss 2.0723\n",
      "Runing in EPOC[66] Batch [10]\n",
      "loss 1.94097\n",
      "Runing in EPOC[66] Batch [20]\n",
      "loss 1.91926\n",
      "Runing in EPOC[66] Batch [30]\n",
      "loss 1.98307\n",
      "Runing in EPOC[66] Batch [40]\n",
      "loss 1.97303\n",
      "Runing in EPOC[66] Batch [50]\n",
      "loss 2.02549\n",
      "Runing in EPOC[66] Batch [60]\n",
      "loss 2.03768\n",
      "Runing in EPOC[66] Batch [70]\n",
      "loss 2.03586\n",
      "Runing in EPOC[66] Batch [80]\n",
      "loss 2.068\n",
      "Runing in EPOC[66] Batch [90]\n",
      "loss 2.11527\n",
      "Runing in EPOC[66] Batch [100]\n",
      "loss 2.09982\n",
      "Runing in EPOC[66] Batch [110]\n",
      "loss 2.15472\n",
      "Runing in EPOC[66] Batch [120]\n",
      "loss 2.07198\n",
      "Runing in EPOC[66] Batch [130]\n",
      "loss 2.15675\n",
      "Runing in EPOC[66] Batch [140]\n",
      "loss 1.99699\n",
      "Runing in EPOC[66] Batch [150]\n",
      "loss 2.08096\n",
      "Runing in EPOC[66] Batch [160]\n",
      "loss 1.94861\n",
      "Runing in EPOC[66] Batch [170]\n",
      "loss 1.99441\n",
      "Runing in EPOC[66] Batch [180]\n",
      "loss 2.02925\n",
      "Runing in EPOC[66] Batch [190]\n",
      "loss 2.06517\n",
      "Runing in EPOC[67] Batch [10]\n",
      "loss 1.94129\n",
      "Runing in EPOC[67] Batch [20]\n",
      "loss 1.91904\n",
      "Runing in EPOC[67] Batch [30]\n",
      "loss 1.98523\n",
      "Runing in EPOC[67] Batch [40]\n",
      "loss 1.9773\n",
      "Runing in EPOC[67] Batch [50]\n",
      "loss 2.02625\n",
      "Runing in EPOC[67] Batch [60]\n",
      "loss 2.03636\n",
      "Runing in EPOC[67] Batch [70]\n",
      "loss 2.02706\n",
      "Runing in EPOC[67] Batch [80]\n",
      "loss 2.06292\n",
      "Runing in EPOC[67] Batch [90]\n",
      "loss 2.10919\n",
      "Runing in EPOC[67] Batch [100]\n",
      "loss 2.10223\n",
      "Runing in EPOC[67] Batch [110]\n",
      "loss 2.16386\n",
      "Runing in EPOC[67] Batch [120]\n",
      "loss 2.07929\n",
      "Runing in EPOC[67] Batch [130]\n",
      "loss 2.15759\n",
      "Runing in EPOC[67] Batch [140]\n",
      "loss 1.99322\n",
      "Runing in EPOC[67] Batch [150]\n",
      "loss 2.08488\n",
      "Runing in EPOC[67] Batch [160]\n",
      "loss 1.95711\n",
      "Runing in EPOC[67] Batch [170]\n",
      "loss 2.00088\n",
      "Runing in EPOC[67] Batch [180]\n",
      "loss 2.03707\n",
      "Runing in EPOC[67] Batch [190]\n",
      "loss 2.06926\n",
      "Runing in EPOC[68] Batch [10]\n",
      "loss 1.95539\n",
      "Runing in EPOC[68] Batch [20]\n",
      "loss 1.92066\n",
      "Runing in EPOC[68] Batch [30]\n",
      "loss 1.97615\n",
      "Runing in EPOC[68] Batch [40]\n",
      "loss 1.96888\n",
      "Runing in EPOC[68] Batch [50]\n",
      "loss 2.02338\n",
      "Runing in EPOC[68] Batch [60]\n",
      "loss 2.03253\n",
      "Runing in EPOC[68] Batch [70]\n",
      "loss 2.02546\n",
      "Runing in EPOC[68] Batch [80]\n",
      "loss 2.06453\n",
      "Runing in EPOC[68] Batch [90]\n",
      "loss 2.10477\n",
      "Runing in EPOC[68] Batch [100]\n",
      "loss 2.10005\n",
      "Runing in EPOC[68] Batch [110]\n",
      "loss 2.16495\n",
      "Runing in EPOC[68] Batch [120]\n",
      "loss 2.08572\n",
      "Runing in EPOC[68] Batch [130]\n",
      "loss 2.16206\n",
      "Runing in EPOC[68] Batch [140]\n",
      "loss 1.99596\n",
      "Runing in EPOC[68] Batch [150]\n",
      "loss 2.07861\n",
      "Runing in EPOC[68] Batch [160]\n",
      "loss 1.9459\n",
      "Runing in EPOC[68] Batch [170]\n",
      "loss 1.99237\n",
      "Runing in EPOC[68] Batch [180]\n",
      "loss 2.02577\n",
      "Runing in EPOC[68] Batch [190]\n",
      "loss 2.07047\n",
      "Runing in EPOC[69] Batch [10]\n",
      "loss 1.97333\n",
      "Runing in EPOC[69] Batch [20]\n",
      "loss 1.94188\n",
      "Runing in EPOC[69] Batch [30]\n",
      "loss 1.99429\n",
      "Runing in EPOC[69] Batch [40]\n",
      "loss 1.97942\n",
      "Runing in EPOC[69] Batch [50]\n",
      "loss 2.03585\n",
      "Runing in EPOC[69] Batch [60]\n",
      "loss 2.04506\n",
      "Runing in EPOC[69] Batch [70]\n",
      "loss 2.02665\n",
      "Runing in EPOC[69] Batch [80]\n",
      "loss 2.06051\n",
      "Runing in EPOC[69] Batch [90]\n",
      "loss 2.09751\n",
      "Runing in EPOC[69] Batch [100]\n",
      "loss 2.08968\n",
      "Runing in EPOC[69] Batch [110]\n",
      "loss 2.14009\n",
      "Runing in EPOC[69] Batch [120]\n",
      "loss 2.06249\n",
      "Runing in EPOC[69] Batch [130]\n",
      "loss 2.16454\n",
      "Runing in EPOC[69] Batch [140]\n",
      "loss 2.00118\n",
      "Runing in EPOC[69] Batch [150]\n",
      "loss 2.07906\n",
      "Runing in EPOC[69] Batch [160]\n",
      "loss 1.94036\n",
      "Runing in EPOC[69] Batch [170]\n",
      "loss 1.9808\n",
      "Runing in EPOC[69] Batch [180]\n",
      "loss 2.01411\n",
      "Runing in EPOC[69] Batch [190]\n",
      "loss 2.06637\n",
      "Runing in EPOC[70] Batch [10]\n",
      "loss 1.94533\n",
      "Runing in EPOC[70] Batch [20]\n",
      "loss 1.92146\n",
      "Runing in EPOC[70] Batch [30]\n",
      "loss 1.99678\n",
      "Runing in EPOC[70] Batch [40]\n",
      "loss 1.97936\n",
      "Runing in EPOC[70] Batch [50]\n",
      "loss 2.02841\n",
      "Runing in EPOC[70] Batch [60]\n",
      "loss 2.03156\n",
      "Runing in EPOC[70] Batch [70]\n",
      "loss 2.01688\n",
      "Runing in EPOC[70] Batch [80]\n",
      "loss 2.05639\n",
      "Runing in EPOC[70] Batch [90]\n",
      "loss 2.09883\n",
      "Runing in EPOC[70] Batch [100]\n",
      "loss 2.09964\n",
      "Runing in EPOC[70] Batch [110]\n",
      "loss 2.14879\n",
      "Runing in EPOC[70] Batch [120]\n",
      "loss 2.06906\n",
      "Runing in EPOC[70] Batch [130]\n",
      "loss 2.16723\n",
      "Runing in EPOC[70] Batch [140]\n",
      "loss 2.00467\n",
      "Runing in EPOC[70] Batch [150]\n",
      "loss 2.07837\n",
      "Runing in EPOC[70] Batch [160]\n",
      "loss 1.94819\n",
      "Runing in EPOC[70] Batch [170]\n",
      "loss 1.98361\n",
      "Runing in EPOC[70] Batch [180]\n",
      "loss 2.00907\n",
      "Runing in EPOC[70] Batch [190]\n",
      "loss 2.05802\n",
      "Runing in EPOC[71] Batch [10]\n",
      "loss 1.93831\n",
      "Runing in EPOC[71] Batch [20]\n",
      "loss 1.90734\n",
      "Runing in EPOC[71] Batch [30]\n",
      "loss 1.99189\n",
      "Runing in EPOC[71] Batch [40]\n",
      "loss 1.97919\n",
      "Runing in EPOC[71] Batch [50]\n",
      "loss 2.02132\n",
      "Runing in EPOC[71] Batch [60]\n",
      "loss 2.02523\n",
      "Runing in EPOC[71] Batch [70]\n",
      "loss 2.01661\n",
      "Runing in EPOC[71] Batch [80]\n",
      "loss 2.05748\n",
      "Runing in EPOC[71] Batch [90]\n",
      "loss 2.10363\n",
      "Runing in EPOC[71] Batch [100]\n",
      "loss 2.10225\n",
      "Runing in EPOC[71] Batch [110]\n",
      "loss 2.14788\n",
      "Runing in EPOC[71] Batch [120]\n",
      "loss 2.06754\n",
      "Runing in EPOC[71] Batch [130]\n",
      "loss 2.16574\n",
      "Runing in EPOC[71] Batch [140]\n",
      "loss 2.00396\n",
      "Runing in EPOC[71] Batch [150]\n",
      "loss 2.07289\n",
      "Runing in EPOC[71] Batch [160]\n",
      "loss 1.93555\n",
      "Runing in EPOC[71] Batch [170]\n",
      "loss 1.97535\n",
      "Runing in EPOC[71] Batch [180]\n",
      "loss 2.01488\n",
      "Runing in EPOC[71] Batch [190]\n",
      "loss 2.05263\n",
      "Runing in EPOC[72] Batch [10]\n",
      "loss 1.94291\n",
      "Runing in EPOC[72] Batch [20]\n",
      "loss 1.91537\n",
      "Runing in EPOC[72] Batch [30]\n",
      "loss 1.98422\n",
      "Runing in EPOC[72] Batch [40]\n",
      "loss 1.97874\n",
      "Runing in EPOC[72] Batch [50]\n",
      "loss 2.0286\n",
      "Runing in EPOC[72] Batch [60]\n",
      "loss 2.03153\n",
      "Runing in EPOC[72] Batch [70]\n",
      "loss 2.01625\n",
      "Runing in EPOC[72] Batch [80]\n",
      "loss 2.04321\n",
      "Runing in EPOC[72] Batch [90]\n",
      "loss 2.08531\n",
      "Runing in EPOC[72] Batch [100]\n",
      "loss 2.08297\n",
      "Runing in EPOC[72] Batch [110]\n",
      "loss 2.12965\n",
      "Runing in EPOC[72] Batch [120]\n",
      "loss 2.05825\n",
      "Runing in EPOC[72] Batch [130]\n",
      "loss 2.15785\n",
      "Runing in EPOC[72] Batch [140]\n",
      "loss 1.99553\n",
      "Runing in EPOC[72] Batch [150]\n",
      "loss 2.07824\n",
      "Runing in EPOC[72] Batch [160]\n",
      "loss 1.94397\n",
      "Runing in EPOC[72] Batch [170]\n",
      "loss 1.98301\n",
      "Runing in EPOC[72] Batch [180]\n",
      "loss 2.00972\n",
      "Runing in EPOC[72] Batch [190]\n",
      "loss 2.04134\n",
      "Runing in EPOC[73] Batch [10]\n",
      "loss 1.92389\n",
      "Runing in EPOC[73] Batch [20]\n",
      "loss 1.90945\n",
      "Runing in EPOC[73] Batch [30]\n",
      "loss 1.96743\n",
      "Runing in EPOC[73] Batch [40]\n",
      "loss 1.96568\n",
      "Runing in EPOC[73] Batch [50]\n",
      "loss 2.02231\n",
      "Runing in EPOC[73] Batch [60]\n",
      "loss 2.03707\n",
      "Runing in EPOC[73] Batch [70]\n",
      "loss 2.01853\n",
      "Runing in EPOC[73] Batch [80]\n",
      "loss 2.04189\n",
      "Runing in EPOC[73] Batch [90]\n",
      "loss 2.08137\n",
      "Runing in EPOC[73] Batch [100]\n",
      "loss 2.0753\n",
      "Runing in EPOC[73] Batch [110]\n",
      "loss 2.12144\n",
      "Runing in EPOC[73] Batch [120]\n",
      "loss 2.04898\n",
      "Runing in EPOC[73] Batch [130]\n",
      "loss 2.14768\n",
      "Runing in EPOC[73] Batch [140]\n",
      "loss 1.97716\n",
      "Runing in EPOC[73] Batch [150]\n",
      "loss 2.06509\n",
      "Runing in EPOC[73] Batch [160]\n",
      "loss 1.93313\n",
      "Runing in EPOC[73] Batch [170]\n",
      "loss 1.98963\n",
      "Runing in EPOC[73] Batch [180]\n",
      "loss 2.01805\n",
      "Runing in EPOC[73] Batch [190]\n",
      "loss 2.04258\n",
      "Runing in EPOC[74] Batch [10]\n",
      "loss 1.91801\n",
      "Runing in EPOC[74] Batch [20]\n",
      "loss 1.8895\n",
      "Runing in EPOC[74] Batch [30]\n",
      "loss 1.95337\n",
      "Runing in EPOC[74] Batch [40]\n",
      "loss 1.94944\n",
      "Runing in EPOC[74] Batch [50]\n",
      "loss 2.00993\n",
      "Runing in EPOC[74] Batch [60]\n",
      "loss 2.0289\n",
      "Runing in EPOC[74] Batch [70]\n",
      "loss 2.00967\n",
      "Runing in EPOC[74] Batch [80]\n",
      "loss 2.03549\n",
      "Runing in EPOC[74] Batch [90]\n",
      "loss 2.07443\n",
      "Runing in EPOC[74] Batch [100]\n",
      "loss 2.06992\n",
      "Runing in EPOC[74] Batch [110]\n",
      "loss 2.11859\n",
      "Runing in EPOC[74] Batch [120]\n",
      "loss 2.04619\n",
      "Runing in EPOC[74] Batch [130]\n",
      "loss 2.14409\n",
      "Runing in EPOC[74] Batch [140]\n",
      "loss 1.9693\n",
      "Runing in EPOC[74] Batch [150]\n",
      "loss 2.04958\n",
      "Runing in EPOC[74] Batch [160]\n",
      "loss 1.92229\n",
      "Runing in EPOC[74] Batch [170]\n",
      "loss 1.97504\n",
      "Runing in EPOC[74] Batch [180]\n",
      "loss 2.00362\n",
      "Runing in EPOC[74] Batch [190]\n",
      "loss 2.03864\n",
      "Runing in EPOC[75] Batch [10]\n",
      "loss 1.9157\n",
      "Runing in EPOC[75] Batch [20]\n",
      "loss 1.8898\n",
      "Runing in EPOC[75] Batch [30]\n",
      "loss 1.9435\n",
      "Runing in EPOC[75] Batch [40]\n",
      "loss 1.93631\n",
      "Runing in EPOC[75] Batch [50]\n",
      "loss 1.99629\n",
      "Runing in EPOC[75] Batch [60]\n",
      "loss 2.0122\n",
      "Runing in EPOC[75] Batch [70]\n",
      "loss 1.9961\n",
      "Runing in EPOC[75] Batch [80]\n",
      "loss 2.02651\n",
      "Runing in EPOC[75] Batch [90]\n",
      "loss 2.06429\n",
      "Runing in EPOC[75] Batch [100]\n",
      "loss 2.06289\n",
      "Runing in EPOC[75] Batch [110]\n",
      "loss 2.11601\n",
      "Runing in EPOC[75] Batch [120]\n",
      "loss 2.04943\n",
      "Runing in EPOC[75] Batch [130]\n",
      "loss 2.14822\n",
      "Runing in EPOC[75] Batch [140]\n",
      "loss 1.97759\n",
      "Runing in EPOC[75] Batch [150]\n",
      "loss 2.05055\n",
      "Runing in EPOC[75] Batch [160]\n",
      "loss 1.92228\n",
      "Runing in EPOC[75] Batch [170]\n",
      "loss 1.96763\n",
      "Runing in EPOC[75] Batch [180]\n",
      "loss 1.99576\n",
      "Runing in EPOC[75] Batch [190]\n",
      "loss 2.0381\n",
      "Runing in EPOC[76] Batch [10]\n",
      "loss 1.92378\n",
      "Runing in EPOC[76] Batch [20]\n",
      "loss 1.88634\n",
      "Runing in EPOC[76] Batch [30]\n",
      "loss 1.94877\n",
      "Runing in EPOC[76] Batch [40]\n",
      "loss 1.93525\n",
      "Runing in EPOC[76] Batch [50]\n",
      "loss 1.98726\n",
      "Runing in EPOC[76] Batch [60]\n",
      "loss 2.00148\n",
      "Runing in EPOC[76] Batch [70]\n",
      "loss 1.99059\n",
      "Runing in EPOC[76] Batch [80]\n",
      "loss 2.02507\n",
      "Runing in EPOC[76] Batch [90]\n",
      "loss 2.06555\n",
      "Runing in EPOC[76] Batch [100]\n",
      "loss 2.06569\n",
      "Runing in EPOC[76] Batch [110]\n",
      "loss 2.10982\n",
      "Runing in EPOC[76] Batch [120]\n",
      "loss 2.04418\n",
      "Runing in EPOC[76] Batch [130]\n",
      "loss 2.13784\n",
      "Runing in EPOC[76] Batch [140]\n",
      "loss 1.97602\n",
      "Runing in EPOC[76] Batch [150]\n",
      "loss 2.05347\n",
      "Runing in EPOC[76] Batch [160]\n",
      "loss 1.92622\n",
      "Runing in EPOC[76] Batch [170]\n",
      "loss 1.96549\n",
      "Runing in EPOC[76] Batch [180]\n",
      "loss 2.00268\n",
      "Runing in EPOC[76] Batch [190]\n",
      "loss 2.05065\n",
      "Runing in EPOC[77] Batch [10]\n",
      "loss 1.92658\n",
      "Runing in EPOC[77] Batch [20]\n",
      "loss 1.90804\n",
      "Runing in EPOC[77] Batch [30]\n",
      "loss 1.96312\n",
      "Runing in EPOC[77] Batch [40]\n",
      "loss 1.9455\n",
      "Runing in EPOC[77] Batch [50]\n",
      "loss 1.99417\n",
      "Runing in EPOC[77] Batch [60]\n",
      "loss 2.00606\n",
      "Runing in EPOC[77] Batch [70]\n",
      "loss 2.0032\n",
      "Runing in EPOC[77] Batch [80]\n",
      "loss 2.02849\n",
      "Runing in EPOC[77] Batch [90]\n",
      "loss 2.07149\n",
      "Runing in EPOC[77] Batch [100]\n",
      "loss 2.07149\n",
      "Runing in EPOC[77] Batch [110]\n",
      "loss 2.11164\n",
      "Runing in EPOC[77] Batch [120]\n",
      "loss 2.0391\n",
      "Runing in EPOC[77] Batch [130]\n",
      "loss 2.12541\n",
      "Runing in EPOC[77] Batch [140]\n",
      "loss 1.9592\n",
      "Runing in EPOC[77] Batch [150]\n",
      "loss 2.04515\n",
      "Runing in EPOC[77] Batch [160]\n",
      "loss 1.92296\n",
      "Runing in EPOC[77] Batch [170]\n",
      "loss 1.96301\n",
      "Runing in EPOC[77] Batch [180]\n",
      "loss 2.00307\n",
      "Runing in EPOC[77] Batch [190]\n",
      "loss 2.0523\n",
      "Runing in EPOC[78] Batch [10]\n",
      "loss 1.91313\n",
      "Runing in EPOC[78] Batch [20]\n",
      "loss 1.90336\n",
      "Runing in EPOC[78] Batch [30]\n",
      "loss 1.9628\n",
      "Runing in EPOC[78] Batch [40]\n",
      "loss 1.95287\n",
      "Runing in EPOC[78] Batch [50]\n",
      "loss 1.99957\n",
      "Runing in EPOC[78] Batch [60]\n",
      "loss 2.00872\n",
      "Runing in EPOC[78] Batch [70]\n",
      "loss 2.0024\n",
      "Runing in EPOC[78] Batch [80]\n",
      "loss 2.01805\n",
      "Runing in EPOC[78] Batch [90]\n",
      "loss 2.06436\n",
      "Runing in EPOC[78] Batch [100]\n",
      "loss 2.06205\n",
      "Runing in EPOC[78] Batch [110]\n",
      "loss 2.11639\n",
      "Runing in EPOC[78] Batch [120]\n",
      "loss 2.04639\n",
      "Runing in EPOC[78] Batch [130]\n",
      "loss 2.13269\n",
      "Runing in EPOC[78] Batch [140]\n",
      "loss 1.96214\n",
      "Runing in EPOC[78] Batch [150]\n",
      "loss 2.04836\n",
      "Runing in EPOC[78] Batch [160]\n",
      "loss 1.91806\n",
      "Runing in EPOC[78] Batch [170]\n",
      "loss 1.94951\n",
      "Runing in EPOC[78] Batch [180]\n",
      "loss 1.98625\n",
      "Runing in EPOC[78] Batch [190]\n",
      "loss 2.03595\n",
      "Runing in EPOC[79] Batch [10]\n",
      "loss 1.90722\n",
      "Runing in EPOC[79] Batch [20]\n",
      "loss 1.88751\n",
      "Runing in EPOC[79] Batch [30]\n",
      "loss 1.94083\n",
      "Runing in EPOC[79] Batch [40]\n",
      "loss 1.94759\n",
      "Runing in EPOC[79] Batch [50]\n",
      "loss 2.00148\n",
      "Runing in EPOC[79] Batch [60]\n",
      "loss 2.01682\n",
      "Runing in EPOC[79] Batch [70]\n",
      "loss 2.00187\n",
      "Runing in EPOC[79] Batch [80]\n",
      "loss 2.01734\n",
      "Runing in EPOC[79] Batch [90]\n",
      "loss 2.06309\n",
      "Runing in EPOC[79] Batch [100]\n",
      "loss 2.05955\n",
      "Runing in EPOC[79] Batch [110]\n",
      "loss 2.11171\n",
      "Runing in EPOC[79] Batch [120]\n",
      "loss 2.03582\n",
      "Runing in EPOC[79] Batch [130]\n",
      "loss 2.1323\n",
      "Runing in EPOC[79] Batch [140]\n",
      "loss 1.95689\n",
      "Runing in EPOC[79] Batch [150]\n",
      "loss 2.04358\n",
      "Runing in EPOC[79] Batch [160]\n",
      "loss 1.9225\n",
      "Runing in EPOC[79] Batch [170]\n",
      "loss 1.95118\n",
      "Runing in EPOC[79] Batch [180]\n",
      "loss 1.98503\n",
      "Runing in EPOC[79] Batch [190]\n",
      "loss 2.02993\n",
      "Runing in EPOC[80] Batch [10]\n",
      "loss 1.90395\n",
      "Runing in EPOC[80] Batch [20]\n",
      "loss 1.88707\n",
      "Runing in EPOC[80] Batch [30]\n",
      "loss 1.93759\n",
      "Runing in EPOC[80] Batch [40]\n",
      "loss 1.93949\n",
      "Runing in EPOC[80] Batch [50]\n",
      "loss 1.99519\n",
      "Runing in EPOC[80] Batch [60]\n",
      "loss 2.01997\n",
      "Runing in EPOC[80] Batch [70]\n",
      "loss 1.99007\n",
      "Runing in EPOC[80] Batch [80]\n",
      "loss 2.02155\n",
      "Runing in EPOC[80] Batch [90]\n",
      "loss 2.07174\n",
      "Runing in EPOC[80] Batch [100]\n",
      "loss 2.07003\n",
      "Runing in EPOC[80] Batch [110]\n",
      "loss 2.10609\n",
      "Runing in EPOC[80] Batch [120]\n",
      "loss 2.02792\n",
      "Runing in EPOC[80] Batch [130]\n",
      "loss 2.12575\n",
      "Runing in EPOC[80] Batch [140]\n",
      "loss 1.9542\n",
      "Runing in EPOC[80] Batch [150]\n",
      "loss 2.02883\n",
      "Runing in EPOC[80] Batch [160]\n",
      "loss 1.91274\n",
      "Runing in EPOC[80] Batch [170]\n",
      "loss 1.94625\n",
      "Runing in EPOC[80] Batch [180]\n",
      "loss 1.97705\n",
      "Runing in EPOC[80] Batch [190]\n",
      "loss 2.02229\n",
      "Runing in EPOC[81] Batch [10]\n",
      "loss 1.90921\n",
      "Runing in EPOC[81] Batch [20]\n",
      "loss 1.89023\n",
      "Runing in EPOC[81] Batch [30]\n",
      "loss 1.94367\n",
      "Runing in EPOC[81] Batch [40]\n",
      "loss 1.93763\n",
      "Runing in EPOC[81] Batch [50]\n",
      "loss 1.99764\n",
      "Runing in EPOC[81] Batch [60]\n",
      "loss 2.01837\n",
      "Runing in EPOC[81] Batch [70]\n",
      "loss 1.99226\n",
      "Runing in EPOC[81] Batch [80]\n",
      "loss 2.0229\n",
      "Runing in EPOC[81] Batch [90]\n",
      "loss 2.08788\n",
      "Runing in EPOC[81] Batch [100]\n",
      "loss 2.08189\n",
      "Runing in EPOC[81] Batch [110]\n",
      "loss 2.1062\n",
      "Runing in EPOC[81] Batch [120]\n",
      "loss 2.02221\n",
      "Runing in EPOC[81] Batch [130]\n",
      "loss 2.11897\n",
      "Runing in EPOC[81] Batch [140]\n",
      "loss 1.95499\n",
      "Runing in EPOC[81] Batch [150]\n",
      "loss 2.02288\n",
      "Runing in EPOC[81] Batch [160]\n",
      "loss 1.90496\n",
      "Runing in EPOC[81] Batch [170]\n",
      "loss 1.94166\n",
      "Runing in EPOC[81] Batch [180]\n",
      "loss 1.97279\n",
      "Runing in EPOC[81] Batch [190]\n",
      "loss 2.02171\n",
      "Runing in EPOC[82] Batch [10]\n",
      "loss 1.9026\n",
      "Runing in EPOC[82] Batch [20]\n",
      "loss 1.88736\n",
      "Runing in EPOC[82] Batch [30]\n",
      "loss 1.94167\n",
      "Runing in EPOC[82] Batch [40]\n",
      "loss 1.93314\n",
      "Runing in EPOC[82] Batch [50]\n",
      "loss 1.99733\n",
      "Runing in EPOC[82] Batch [60]\n",
      "loss 2.0091\n",
      "Runing in EPOC[82] Batch [70]\n",
      "loss 1.99668\n",
      "Runing in EPOC[82] Batch [80]\n",
      "loss 2.02829\n",
      "Runing in EPOC[82] Batch [90]\n",
      "loss 2.08909\n",
      "Runing in EPOC[82] Batch [100]\n",
      "loss 2.05473\n",
      "Runing in EPOC[82] Batch [110]\n",
      "loss 2.09511\n",
      "Runing in EPOC[82] Batch [120]\n",
      "loss 2.02657\n",
      "Runing in EPOC[82] Batch [130]\n",
      "loss 2.12677\n",
      "Runing in EPOC[82] Batch [140]\n",
      "loss 1.96194\n",
      "Runing in EPOC[82] Batch [150]\n",
      "loss 2.02879\n",
      "Runing in EPOC[82] Batch [160]\n",
      "loss 1.91663\n",
      "Runing in EPOC[82] Batch [170]\n",
      "loss 1.94847\n",
      "Runing in EPOC[82] Batch [180]\n",
      "loss 1.96657\n",
      "Runing in EPOC[82] Batch [190]\n",
      "loss 2.0124\n",
      "Runing in EPOC[83] Batch [10]\n",
      "loss 1.89856\n",
      "Runing in EPOC[83] Batch [20]\n",
      "loss 1.87612\n",
      "Runing in EPOC[83] Batch [30]\n",
      "loss 1.92617\n",
      "Runing in EPOC[83] Batch [40]\n",
      "loss 1.92249\n",
      "Runing in EPOC[83] Batch [50]\n",
      "loss 1.98445\n",
      "Runing in EPOC[83] Batch [60]\n",
      "loss 1.99998\n",
      "Runing in EPOC[83] Batch [70]\n",
      "loss 1.99906\n",
      "Runing in EPOC[83] Batch [80]\n",
      "loss 2.03347\n",
      "Runing in EPOC[83] Batch [90]\n",
      "loss 2.09015\n",
      "Runing in EPOC[83] Batch [100]\n",
      "loss 2.04876\n",
      "Runing in EPOC[83] Batch [110]\n",
      "loss 2.10444\n",
      "Runing in EPOC[83] Batch [120]\n",
      "loss 2.03642\n",
      "Runing in EPOC[83] Batch [130]\n",
      "loss 2.1401\n",
      "Runing in EPOC[83] Batch [140]\n",
      "loss 1.95637\n",
      "Runing in EPOC[83] Batch [150]\n",
      "loss 2.01696\n",
      "Runing in EPOC[83] Batch [160]\n",
      "loss 1.90694\n",
      "Runing in EPOC[83] Batch [170]\n",
      "loss 1.94184\n",
      "Runing in EPOC[83] Batch [180]\n",
      "loss 1.96705\n",
      "Runing in EPOC[83] Batch [190]\n",
      "loss 2.01481\n",
      "Runing in EPOC[84] Batch [10]\n",
      "loss 1.8954\n",
      "Runing in EPOC[84] Batch [20]\n",
      "loss 1.88673\n",
      "Runing in EPOC[84] Batch [30]\n",
      "loss 1.92754\n",
      "Runing in EPOC[84] Batch [40]\n",
      "loss 1.92417\n",
      "Runing in EPOC[84] Batch [50]\n",
      "loss 1.98548\n",
      "Runing in EPOC[84] Batch [60]\n",
      "loss 2.0048\n",
      "Runing in EPOC[84] Batch [70]\n",
      "loss 1.98418\n",
      "Runing in EPOC[84] Batch [80]\n",
      "loss 2.01593\n",
      "Runing in EPOC[84] Batch [90]\n",
      "loss 2.06886\n",
      "Runing in EPOC[84] Batch [100]\n",
      "loss 2.04693\n",
      "Runing in EPOC[84] Batch [110]\n",
      "loss 2.10888\n",
      "Runing in EPOC[84] Batch [120]\n",
      "loss 2.03526\n",
      "Runing in EPOC[84] Batch [130]\n",
      "loss 2.12754\n",
      "Runing in EPOC[84] Batch [140]\n",
      "loss 1.95029\n",
      "Runing in EPOC[84] Batch [150]\n",
      "loss 2.02024\n",
      "Runing in EPOC[84] Batch [160]\n",
      "loss 1.90717\n",
      "Runing in EPOC[84] Batch [170]\n",
      "loss 1.93678\n",
      "Runing in EPOC[84] Batch [180]\n",
      "loss 1.969\n",
      "Runing in EPOC[84] Batch [190]\n",
      "loss 2.0121\n",
      "Runing in EPOC[85] Batch [10]\n",
      "loss 1.88285\n",
      "Runing in EPOC[85] Batch [20]\n",
      "loss 1.87917\n",
      "Runing in EPOC[85] Batch [30]\n",
      "loss 1.93245\n",
      "Runing in EPOC[85] Batch [40]\n",
      "loss 1.93142\n",
      "Runing in EPOC[85] Batch [50]\n",
      "loss 1.98293\n",
      "Runing in EPOC[85] Batch [60]\n",
      "loss 1.98981\n",
      "Runing in EPOC[85] Batch [70]\n",
      "loss 1.97739\n",
      "Runing in EPOC[85] Batch [80]\n",
      "loss 2.00561\n",
      "Runing in EPOC[85] Batch [90]\n",
      "loss 2.06181\n",
      "Runing in EPOC[85] Batch [100]\n",
      "loss 2.05838\n",
      "Runing in EPOC[85] Batch [110]\n",
      "loss 2.10527\n",
      "Runing in EPOC[85] Batch [120]\n",
      "loss 2.0298\n",
      "Runing in EPOC[85] Batch [130]\n",
      "loss 2.10998\n",
      "Runing in EPOC[85] Batch [140]\n",
      "loss 1.94763\n",
      "Runing in EPOC[85] Batch [150]\n",
      "loss 2.02648\n",
      "Runing in EPOC[85] Batch [160]\n",
      "loss 1.90202\n",
      "Runing in EPOC[85] Batch [170]\n",
      "loss 1.93374\n",
      "Runing in EPOC[85] Batch [180]\n",
      "loss 1.96115\n",
      "Runing in EPOC[85] Batch [190]\n",
      "loss 2.00331\n",
      "Runing in EPOC[86] Batch [10]\n",
      "loss 1.8856\n",
      "Runing in EPOC[86] Batch [20]\n",
      "loss 1.86056\n",
      "Runing in EPOC[86] Batch [30]\n",
      "loss 1.91846\n",
      "Runing in EPOC[86] Batch [40]\n",
      "loss 1.93223\n",
      "Runing in EPOC[86] Batch [50]\n",
      "loss 1.98865\n",
      "Runing in EPOC[86] Batch [60]\n",
      "loss 1.99703\n",
      "Runing in EPOC[86] Batch [70]\n",
      "loss 1.96426\n",
      "Runing in EPOC[86] Batch [80]\n",
      "loss 2.00844\n",
      "Runing in EPOC[86] Batch [90]\n",
      "loss 2.06754\n",
      "Runing in EPOC[86] Batch [100]\n",
      "loss 2.05392\n",
      "Runing in EPOC[86] Batch [110]\n",
      "loss 2.09331\n",
      "Runing in EPOC[86] Batch [120]\n",
      "loss 2.01569\n",
      "Runing in EPOC[86] Batch [130]\n",
      "loss 2.1074\n",
      "Runing in EPOC[86] Batch [140]\n",
      "loss 1.95352\n",
      "Runing in EPOC[86] Batch [150]\n",
      "loss 2.03805\n",
      "Runing in EPOC[86] Batch [160]\n",
      "loss 1.90237\n",
      "Runing in EPOC[86] Batch [170]\n",
      "loss 1.93257\n",
      "Runing in EPOC[86] Batch [180]\n",
      "loss 1.9603\n",
      "Runing in EPOC[86] Batch [190]\n",
      "loss 2.00116\n",
      "Runing in EPOC[87] Batch [10]\n",
      "loss 1.89249\n",
      "Runing in EPOC[87] Batch [20]\n",
      "loss 1.87258\n",
      "Runing in EPOC[87] Batch [30]\n",
      "loss 1.91709\n",
      "Runing in EPOC[87] Batch [40]\n",
      "loss 1.91183\n",
      "Runing in EPOC[87] Batch [50]\n",
      "loss 1.98646\n",
      "Runing in EPOC[87] Batch [60]\n",
      "loss 2.00244\n",
      "Runing in EPOC[87] Batch [70]\n",
      "loss 1.97089\n",
      "Runing in EPOC[87] Batch [80]\n",
      "loss 2.00195\n",
      "Runing in EPOC[87] Batch [90]\n",
      "loss 2.06902\n",
      "Runing in EPOC[87] Batch [100]\n",
      "loss 2.05448\n",
      "Runing in EPOC[87] Batch [110]\n",
      "loss 2.09719\n",
      "Runing in EPOC[87] Batch [120]\n",
      "loss 2.01194\n",
      "Runing in EPOC[87] Batch [130]\n",
      "loss 2.09911\n",
      "Runing in EPOC[87] Batch [140]\n",
      "loss 1.9549\n",
      "Runing in EPOC[87] Batch [150]\n",
      "loss 2.03778\n",
      "Runing in EPOC[87] Batch [160]\n",
      "loss 1.90792\n",
      "Runing in EPOC[87] Batch [170]\n",
      "loss 1.94989\n",
      "Runing in EPOC[87] Batch [180]\n",
      "loss 1.97413\n",
      "Runing in EPOC[87] Batch [190]\n",
      "loss 2.01288\n",
      "Runing in EPOC[88] Batch [10]\n",
      "loss 1.88277\n",
      "Runing in EPOC[88] Batch [20]\n",
      "loss 1.85897\n",
      "Runing in EPOC[88] Batch [30]\n",
      "loss 1.92453\n",
      "Runing in EPOC[88] Batch [40]\n",
      "loss 1.91504\n",
      "Runing in EPOC[88] Batch [50]\n",
      "loss 1.97141\n",
      "Runing in EPOC[88] Batch [60]\n",
      "loss 1.98329\n",
      "Runing in EPOC[88] Batch [70]\n",
      "loss 1.96188\n",
      "Runing in EPOC[88] Batch [80]\n",
      "loss 2.01306\n",
      "Runing in EPOC[88] Batch [90]\n",
      "loss 2.06331\n",
      "Runing in EPOC[88] Batch [100]\n",
      "loss 2.04453\n",
      "Runing in EPOC[88] Batch [110]\n",
      "loss 2.09107\n",
      "Runing in EPOC[88] Batch [120]\n",
      "loss 2.01604\n",
      "Runing in EPOC[88] Batch [130]\n",
      "loss 2.11372\n",
      "Runing in EPOC[88] Batch [140]\n",
      "loss 1.94976\n",
      "Runing in EPOC[88] Batch [150]\n",
      "loss 2.01533\n",
      "Runing in EPOC[88] Batch [160]\n",
      "loss 1.90302\n",
      "Runing in EPOC[88] Batch [170]\n",
      "loss 1.95711\n",
      "Runing in EPOC[88] Batch [180]\n",
      "loss 1.99378\n",
      "Runing in EPOC[88] Batch [190]\n",
      "loss 2.02723\n",
      "Runing in EPOC[89] Batch [10]\n",
      "loss 1.89369\n",
      "Runing in EPOC[89] Batch [20]\n",
      "loss 1.86349\n",
      "Runing in EPOC[89] Batch [30]\n",
      "loss 1.92477\n",
      "Runing in EPOC[89] Batch [40]\n",
      "loss 1.92026\n",
      "Runing in EPOC[89] Batch [50]\n",
      "loss 1.98352\n",
      "Runing in EPOC[89] Batch [60]\n",
      "loss 1.98553\n",
      "Runing in EPOC[89] Batch [70]\n",
      "loss 1.96696\n",
      "Runing in EPOC[89] Batch [80]\n",
      "loss 1.99615\n",
      "Runing in EPOC[89] Batch [90]\n",
      "loss 2.04751\n",
      "Runing in EPOC[89] Batch [100]\n",
      "loss 2.04443\n",
      "Runing in EPOC[89] Batch [110]\n",
      "loss 2.08864\n",
      "Runing in EPOC[89] Batch [120]\n",
      "loss 2.0115\n",
      "Runing in EPOC[89] Batch [130]\n",
      "loss 2.09799\n",
      "Runing in EPOC[89] Batch [140]\n",
      "loss 1.94018\n",
      "Runing in EPOC[89] Batch [150]\n",
      "loss 2.00489\n",
      "Runing in EPOC[89] Batch [160]\n",
      "loss 1.89115\n",
      "Runing in EPOC[89] Batch [170]\n",
      "loss 1.93675\n",
      "Runing in EPOC[89] Batch [180]\n",
      "loss 1.98576\n",
      "Runing in EPOC[89] Batch [190]\n",
      "loss 2.02277\n",
      "Runing in EPOC[90] Batch [10]\n",
      "loss 1.88179\n",
      "Runing in EPOC[90] Batch [20]\n",
      "loss 1.85936\n",
      "Runing in EPOC[90] Batch [30]\n",
      "loss 1.91738\n",
      "Runing in EPOC[90] Batch [40]\n",
      "loss 1.91081\n",
      "Runing in EPOC[90] Batch [50]\n",
      "loss 1.97141\n",
      "Runing in EPOC[90] Batch [60]\n",
      "loss 1.98115\n",
      "Runing in EPOC[90] Batch [70]\n",
      "loss 1.9577\n",
      "Runing in EPOC[90] Batch [80]\n",
      "loss 1.98953\n",
      "Runing in EPOC[90] Batch [90]\n",
      "loss 2.03959\n",
      "Runing in EPOC[90] Batch [100]\n",
      "loss 2.04536\n",
      "Runing in EPOC[90] Batch [110]\n",
      "loss 2.08873\n",
      "Runing in EPOC[90] Batch [120]\n",
      "loss 2.01214\n",
      "Runing in EPOC[90] Batch [130]\n",
      "loss 2.09486\n",
      "Runing in EPOC[90] Batch [140]\n",
      "loss 1.93675\n",
      "Runing in EPOC[90] Batch [150]\n",
      "loss 2.00575\n",
      "Runing in EPOC[90] Batch [160]\n",
      "loss 1.89331\n",
      "Runing in EPOC[90] Batch [170]\n",
      "loss 1.92095\n",
      "Runing in EPOC[90] Batch [180]\n",
      "loss 1.95403\n",
      "Runing in EPOC[90] Batch [190]\n",
      "loss 1.99259\n",
      "Runing in EPOC[91] Batch [10]\n",
      "loss 1.87886\n",
      "Runing in EPOC[91] Batch [20]\n",
      "loss 1.86183\n",
      "Runing in EPOC[91] Batch [30]\n",
      "loss 1.91521\n",
      "Runing in EPOC[91] Batch [40]\n",
      "loss 1.90003\n",
      "Runing in EPOC[91] Batch [50]\n",
      "loss 1.95625\n",
      "Runing in EPOC[91] Batch [60]\n",
      "loss 1.96744\n",
      "Runing in EPOC[91] Batch [70]\n",
      "loss 1.95124\n",
      "Runing in EPOC[91] Batch [80]\n",
      "loss 1.97964\n",
      "Runing in EPOC[91] Batch [90]\n",
      "loss 2.03309\n",
      "Runing in EPOC[91] Batch [100]\n",
      "loss 2.03819\n",
      "Runing in EPOC[91] Batch [110]\n",
      "loss 2.07979\n",
      "Runing in EPOC[91] Batch [120]\n",
      "loss 2.00446\n",
      "Runing in EPOC[91] Batch [130]\n",
      "loss 2.09744\n",
      "Runing in EPOC[91] Batch [140]\n",
      "loss 1.94245\n",
      "Runing in EPOC[91] Batch [150]\n",
      "loss 2.01364\n",
      "Runing in EPOC[91] Batch [160]\n",
      "loss 1.89707\n",
      "Runing in EPOC[91] Batch [170]\n",
      "loss 1.92352\n",
      "Runing in EPOC[91] Batch [180]\n",
      "loss 1.9498\n",
      "Runing in EPOC[91] Batch [190]\n",
      "loss 1.97775\n",
      "Runing in EPOC[92] Batch [10]\n",
      "loss 1.88039\n",
      "Runing in EPOC[92] Batch [20]\n",
      "loss 1.86091\n",
      "Runing in EPOC[92] Batch [30]\n",
      "loss 1.91437\n",
      "Runing in EPOC[92] Batch [40]\n",
      "loss 1.89692\n",
      "Runing in EPOC[92] Batch [50]\n",
      "loss 1.94555\n",
      "Runing in EPOC[92] Batch [60]\n",
      "loss 1.964\n",
      "Runing in EPOC[92] Batch [70]\n",
      "loss 1.95233\n",
      "Runing in EPOC[92] Batch [80]\n",
      "loss 1.98218\n",
      "Runing in EPOC[92] Batch [90]\n",
      "loss 2.02907\n",
      "Runing in EPOC[92] Batch [100]\n",
      "loss 2.02737\n",
      "Runing in EPOC[92] Batch [110]\n",
      "loss 2.0743\n",
      "Runing in EPOC[92] Batch [120]\n",
      "loss 2.00522\n",
      "Runing in EPOC[92] Batch [130]\n",
      "loss 2.09578\n",
      "Runing in EPOC[92] Batch [140]\n",
      "loss 1.9435\n",
      "Runing in EPOC[92] Batch [150]\n",
      "loss 2.0115\n",
      "Runing in EPOC[92] Batch [160]\n",
      "loss 1.90345\n",
      "Runing in EPOC[92] Batch [170]\n",
      "loss 1.93068\n",
      "Runing in EPOC[92] Batch [180]\n",
      "loss 1.9482\n",
      "Runing in EPOC[92] Batch [190]\n",
      "loss 1.98417\n",
      "Runing in EPOC[93] Batch [10]\n",
      "loss 1.87537\n",
      "Runing in EPOC[93] Batch [20]\n",
      "loss 1.86223\n",
      "Runing in EPOC[93] Batch [30]\n",
      "loss 1.9161\n",
      "Runing in EPOC[93] Batch [40]\n",
      "loss 1.89881\n",
      "Runing in EPOC[93] Batch [50]\n",
      "loss 1.94351\n",
      "Runing in EPOC[93] Batch [60]\n",
      "loss 1.95727\n",
      "Runing in EPOC[93] Batch [70]\n",
      "loss 1.94657\n",
      "Runing in EPOC[93] Batch [80]\n",
      "loss 1.97545\n",
      "Runing in EPOC[93] Batch [90]\n",
      "loss 2.0287\n",
      "Runing in EPOC[93] Batch [100]\n",
      "loss 2.02509\n",
      "Runing in EPOC[93] Batch [110]\n",
      "loss 2.06931\n",
      "Runing in EPOC[93] Batch [120]\n",
      "loss 2.00147\n",
      "Runing in EPOC[93] Batch [130]\n",
      "loss 2.10201\n",
      "Runing in EPOC[93] Batch [140]\n",
      "loss 1.94912\n",
      "Runing in EPOC[93] Batch [150]\n",
      "loss 2.01372\n",
      "Runing in EPOC[93] Batch [160]\n",
      "loss 1.88592\n",
      "Runing in EPOC[93] Batch [170]\n",
      "loss 1.91296\n",
      "Runing in EPOC[93] Batch [180]\n",
      "loss 1.94939\n",
      "Runing in EPOC[93] Batch [190]\n",
      "loss 1.99505\n",
      "Runing in EPOC[94] Batch [10]\n",
      "loss 1.86517\n",
      "Runing in EPOC[94] Batch [20]\n",
      "loss 1.84729\n",
      "Runing in EPOC[94] Batch [30]\n",
      "loss 1.91152\n",
      "Runing in EPOC[94] Batch [40]\n",
      "loss 1.90986\n",
      "Runing in EPOC[94] Batch [50]\n",
      "loss 1.96287\n",
      "Runing in EPOC[94] Batch [60]\n",
      "loss 1.97815\n",
      "Runing in EPOC[94] Batch [70]\n",
      "loss 1.95208\n",
      "Runing in EPOC[94] Batch [80]\n",
      "loss 1.96845\n",
      "Runing in EPOC[94] Batch [90]\n",
      "loss 2.01793\n",
      "Runing in EPOC[94] Batch [100]\n",
      "loss 2.01636\n",
      "Runing in EPOC[94] Batch [110]\n",
      "loss 2.05426\n",
      "Runing in EPOC[94] Batch [120]\n",
      "loss 1.98911\n",
      "Runing in EPOC[94] Batch [130]\n",
      "loss 2.08114\n",
      "Runing in EPOC[94] Batch [140]\n",
      "loss 1.93366\n",
      "Runing in EPOC[94] Batch [150]\n",
      "loss 1.99856\n",
      "Runing in EPOC[94] Batch [160]\n",
      "loss 1.87954\n",
      "Runing in EPOC[94] Batch [170]\n",
      "loss 1.90822\n",
      "Runing in EPOC[94] Batch [180]\n",
      "loss 1.95075\n",
      "Runing in EPOC[94] Batch [190]\n",
      "loss 1.99367\n",
      "Runing in EPOC[95] Batch [10]\n",
      "loss 1.87216\n",
      "Runing in EPOC[95] Batch [20]\n",
      "loss 1.85685\n",
      "Runing in EPOC[95] Batch [30]\n",
      "loss 1.90059\n",
      "Runing in EPOC[95] Batch [40]\n",
      "loss 1.89998\n",
      "Runing in EPOC[95] Batch [50]\n",
      "loss 1.95624\n",
      "Runing in EPOC[95] Batch [60]\n",
      "loss 1.9799\n",
      "Runing in EPOC[95] Batch [70]\n",
      "loss 1.96969\n",
      "Runing in EPOC[95] Batch [80]\n",
      "loss 1.98645\n",
      "Runing in EPOC[95] Batch [90]\n",
      "loss 2.02936\n",
      "Runing in EPOC[95] Batch [100]\n",
      "loss 2.02393\n",
      "Runing in EPOC[95] Batch [110]\n",
      "loss 2.05998\n",
      "Runing in EPOC[95] Batch [120]\n",
      "loss 1.98157\n",
      "Runing in EPOC[95] Batch [130]\n",
      "loss 2.0755\n",
      "Runing in EPOC[95] Batch [140]\n",
      "loss 1.92466\n",
      "Runing in EPOC[95] Batch [150]\n",
      "loss 2.00519\n",
      "Runing in EPOC[95] Batch [160]\n",
      "loss 1.87633\n",
      "Runing in EPOC[95] Batch [170]\n",
      "loss 1.92327\n",
      "Runing in EPOC[95] Batch [180]\n",
      "loss 1.95683\n",
      "Runing in EPOC[95] Batch [190]\n",
      "loss 1.99491\n",
      "Runing in EPOC[96] Batch [10]\n",
      "loss 1.89053\n",
      "Runing in EPOC[96] Batch [20]\n",
      "loss 1.86202\n",
      "Runing in EPOC[96] Batch [30]\n",
      "loss 1.91004\n",
      "Runing in EPOC[96] Batch [40]\n",
      "loss 1.9068\n",
      "Runing in EPOC[96] Batch [50]\n",
      "loss 1.9528\n",
      "Runing in EPOC[96] Batch [60]\n",
      "loss 1.97501\n",
      "Runing in EPOC[96] Batch [70]\n",
      "loss 1.97189\n",
      "Runing in EPOC[96] Batch [80]\n",
      "loss 1.98814\n",
      "Runing in EPOC[96] Batch [90]\n",
      "loss 2.04697\n",
      "Runing in EPOC[96] Batch [100]\n",
      "loss 2.04237\n",
      "Runing in EPOC[96] Batch [110]\n",
      "loss 2.06471\n",
      "Runing in EPOC[96] Batch [120]\n",
      "loss 1.99011\n",
      "Runing in EPOC[96] Batch [130]\n",
      "loss 2.08131\n",
      "Runing in EPOC[96] Batch [140]\n",
      "loss 1.93049\n",
      "Runing in EPOC[96] Batch [150]\n",
      "loss 2.00584\n",
      "Runing in EPOC[96] Batch [160]\n",
      "loss 1.88379\n",
      "Runing in EPOC[96] Batch [170]\n",
      "loss 1.9259\n",
      "Runing in EPOC[96] Batch [180]\n",
      "loss 1.94831\n",
      "Runing in EPOC[96] Batch [190]\n",
      "loss 1.98693\n",
      "Runing in EPOC[97] Batch [10]\n",
      "loss 1.87625\n",
      "Runing in EPOC[97] Batch [20]\n",
      "loss 1.84911\n",
      "Runing in EPOC[97] Batch [30]\n",
      "loss 1.89474\n",
      "Runing in EPOC[97] Batch [40]\n",
      "loss 1.89796\n",
      "Runing in EPOC[97] Batch [50]\n",
      "loss 1.94704\n",
      "Runing in EPOC[97] Batch [60]\n",
      "loss 1.9766\n",
      "Runing in EPOC[97] Batch [70]\n",
      "loss 1.9548\n",
      "Runing in EPOC[97] Batch [80]\n",
      "loss 1.97141\n",
      "Runing in EPOC[97] Batch [90]\n",
      "loss 2.03498\n",
      "Runing in EPOC[97] Batch [100]\n",
      "loss 2.02576\n",
      "Runing in EPOC[97] Batch [110]\n",
      "loss 2.0577\n",
      "Runing in EPOC[97] Batch [120]\n",
      "loss 1.98299\n",
      "Runing in EPOC[97] Batch [130]\n",
      "loss 2.07196\n",
      "Runing in EPOC[97] Batch [140]\n",
      "loss 1.91839\n",
      "Runing in EPOC[97] Batch [150]\n",
      "loss 1.99246\n",
      "Runing in EPOC[97] Batch [160]\n",
      "loss 1.86859\n",
      "Runing in EPOC[97] Batch [170]\n",
      "loss 1.90068\n",
      "Runing in EPOC[97] Batch [180]\n",
      "loss 1.93989\n",
      "Runing in EPOC[97] Batch [190]\n",
      "loss 1.9786\n",
      "Runing in EPOC[98] Batch [10]\n",
      "loss 1.87073\n",
      "Runing in EPOC[98] Batch [20]\n",
      "loss 1.83706\n",
      "Runing in EPOC[98] Batch [30]\n",
      "loss 1.90246\n",
      "Runing in EPOC[98] Batch [40]\n",
      "loss 1.89415\n",
      "Runing in EPOC[98] Batch [50]\n",
      "loss 1.94163\n",
      "Runing in EPOC[98] Batch [60]\n",
      "loss 1.95882\n",
      "Runing in EPOC[98] Batch [70]\n",
      "loss 1.93954\n",
      "Runing in EPOC[98] Batch [80]\n",
      "loss 1.96072\n",
      "Runing in EPOC[98] Batch [90]\n",
      "loss 2.0322\n",
      "Runing in EPOC[98] Batch [100]\n",
      "loss 2.0148\n",
      "Runing in EPOC[98] Batch [110]\n",
      "loss 2.04715\n",
      "Runing in EPOC[98] Batch [120]\n",
      "loss 1.97948\n",
      "Runing in EPOC[98] Batch [130]\n",
      "loss 2.06807\n",
      "Runing in EPOC[98] Batch [140]\n",
      "loss 1.91486\n",
      "Runing in EPOC[98] Batch [150]\n",
      "loss 1.98877\n",
      "Runing in EPOC[98] Batch [160]\n",
      "loss 1.86511\n",
      "Runing in EPOC[98] Batch [170]\n",
      "loss 1.89661\n",
      "Runing in EPOC[98] Batch [180]\n",
      "loss 1.93925\n",
      "Runing in EPOC[98] Batch [190]\n",
      "loss 1.98442\n",
      "Runing in EPOC[99] Batch [10]\n",
      "loss 1.87358\n",
      "Runing in EPOC[99] Batch [20]\n",
      "loss 1.84691\n",
      "Runing in EPOC[99] Batch [30]\n",
      "loss 1.90485\n",
      "Runing in EPOC[99] Batch [40]\n",
      "loss 1.8825\n",
      "Runing in EPOC[99] Batch [50]\n",
      "loss 1.92757\n",
      "Runing in EPOC[99] Batch [60]\n",
      "loss 1.95115\n",
      "Runing in EPOC[99] Batch [70]\n",
      "loss 1.93178\n",
      "Runing in EPOC[99] Batch [80]\n",
      "loss 1.95569\n",
      "Runing in EPOC[99] Batch [90]\n",
      "loss 2.02937\n",
      "Runing in EPOC[99] Batch [100]\n",
      "loss 2.01011\n",
      "Runing in EPOC[99] Batch [110]\n",
      "loss 2.05699\n",
      "Runing in EPOC[99] Batch [120]\n",
      "loss 1.99225\n",
      "Runing in EPOC[99] Batch [130]\n",
      "loss 2.08991\n",
      "Runing in EPOC[99] Batch [140]\n",
      "loss 1.92047\n",
      "Runing in EPOC[99] Batch [150]\n",
      "loss 1.99169\n",
      "Runing in EPOC[99] Batch [160]\n",
      "loss 1.85933\n",
      "Runing in EPOC[99] Batch [170]\n",
      "loss 1.89652\n",
      "Runing in EPOC[99] Batch [180]\n",
      "loss 1.92921\n",
      "Runing in EPOC[99] Batch [190]\n",
      "loss 1.97202\n",
      "Runing in EPOC[100] Batch [10]\n",
      "loss 1.85948\n",
      "Runing in EPOC[100] Batch [20]\n",
      "loss 1.85313\n",
      "Runing in EPOC[100] Batch [30]\n",
      "loss 1.9084\n",
      "Runing in EPOC[100] Batch [40]\n",
      "loss 1.897\n",
      "Runing in EPOC[100] Batch [50]\n",
      "loss 1.94263\n",
      "Runing in EPOC[100] Batch [60]\n",
      "loss 1.96332\n",
      "Runing in EPOC[100] Batch [70]\n",
      "loss 1.94527\n",
      "Runing in EPOC[100] Batch [80]\n",
      "loss 1.95846\n",
      "Runing in EPOC[100] Batch [90]\n",
      "loss 2.01714\n",
      "Runing in EPOC[100] Batch [100]\n",
      "loss 2.00426\n",
      "Runing in EPOC[100] Batch [110]\n",
      "loss 2.051\n",
      "Runing in EPOC[100] Batch [120]\n",
      "loss 1.98223\n",
      "Runing in EPOC[100] Batch [130]\n",
      "loss 2.08121\n",
      "Runing in EPOC[100] Batch [140]\n",
      "loss 1.93707\n",
      "Runing in EPOC[100] Batch [150]\n",
      "loss 1.99501\n",
      "Runing in EPOC[100] Batch [160]\n",
      "loss 1.86308\n",
      "Runing in EPOC[100] Batch [170]\n",
      "loss 1.89034\n",
      "Runing in EPOC[100] Batch [180]\n",
      "loss 1.92679\n",
      "Runing in EPOC[100] Batch [190]\n",
      "loss 1.95814\n",
      "Runing in EPOC[101] Batch [10]\n",
      "loss 1.86103\n",
      "Runing in EPOC[101] Batch [20]\n",
      "loss 1.8505\n",
      "Runing in EPOC[101] Batch [30]\n",
      "loss 1.90207\n",
      "Runing in EPOC[101] Batch [40]\n",
      "loss 1.88839\n",
      "Runing in EPOC[101] Batch [50]\n",
      "loss 1.95062\n",
      "Runing in EPOC[101] Batch [60]\n",
      "loss 1.9758\n",
      "Runing in EPOC[101] Batch [70]\n",
      "loss 1.94933\n",
      "Runing in EPOC[101] Batch [80]\n",
      "loss 1.9508\n",
      "Runing in EPOC[101] Batch [90]\n",
      "loss 2.00851\n",
      "Runing in EPOC[101] Batch [100]\n",
      "loss 2.00395\n",
      "Runing in EPOC[101] Batch [110]\n",
      "loss 2.04558\n",
      "Runing in EPOC[101] Batch [120]\n",
      "loss 1.97222\n",
      "Runing in EPOC[101] Batch [130]\n",
      "loss 2.06568\n",
      "Runing in EPOC[101] Batch [140]\n",
      "loss 1.93098\n",
      "Runing in EPOC[101] Batch [150]\n",
      "loss 2.00044\n",
      "Runing in EPOC[101] Batch [160]\n",
      "loss 1.87619\n",
      "Runing in EPOC[101] Batch [170]\n",
      "loss 1.90643\n",
      "Runing in EPOC[101] Batch [180]\n",
      "loss 1.93207\n",
      "Runing in EPOC[101] Batch [190]\n",
      "loss 1.96572\n",
      "Runing in EPOC[102] Batch [10]\n",
      "loss 1.85051\n",
      "Runing in EPOC[102] Batch [20]\n",
      "loss 1.83293\n",
      "Runing in EPOC[102] Batch [30]\n",
      "loss 1.88476\n",
      "Runing in EPOC[102] Batch [40]\n",
      "loss 1.88269\n",
      "Runing in EPOC[102] Batch [50]\n",
      "loss 1.93486\n",
      "Runing in EPOC[102] Batch [60]\n",
      "loss 1.96663\n",
      "Runing in EPOC[102] Batch [70]\n",
      "loss 1.95173\n",
      "Runing in EPOC[102] Batch [80]\n",
      "loss 1.95972\n",
      "Runing in EPOC[102] Batch [90]\n",
      "loss 2.01429\n",
      "Runing in EPOC[102] Batch [100]\n",
      "loss 2.00515\n",
      "Runing in EPOC[102] Batch [110]\n",
      "loss 2.04634\n",
      "Runing in EPOC[102] Batch [120]\n",
      "loss 1.96465\n",
      "Runing in EPOC[102] Batch [130]\n",
      "loss 2.05503\n",
      "Runing in EPOC[102] Batch [140]\n",
      "loss 1.91944\n",
      "Runing in EPOC[102] Batch [150]\n",
      "loss 1.99202\n",
      "Runing in EPOC[102] Batch [160]\n",
      "loss 1.87525\n",
      "Runing in EPOC[102] Batch [170]\n",
      "loss 1.90979\n",
      "Runing in EPOC[102] Batch [180]\n",
      "loss 1.94545\n",
      "Runing in EPOC[102] Batch [190]\n",
      "loss 1.98461\n",
      "Runing in EPOC[103] Batch [10]\n",
      "loss 1.85329\n",
      "Runing in EPOC[103] Batch [20]\n",
      "loss 1.82381\n",
      "Runing in EPOC[103] Batch [30]\n",
      "loss 1.88597\n",
      "Runing in EPOC[103] Batch [40]\n",
      "loss 1.88283\n",
      "Runing in EPOC[103] Batch [50]\n",
      "loss 1.93669\n",
      "Runing in EPOC[103] Batch [60]\n",
      "loss 1.96121\n",
      "Runing in EPOC[103] Batch [70]\n",
      "loss 1.94107\n",
      "Runing in EPOC[103] Batch [80]\n",
      "loss 1.95113\n",
      "Runing in EPOC[103] Batch [90]\n",
      "loss 2.0226\n",
      "Runing in EPOC[103] Batch [100]\n",
      "loss 2.02367\n",
      "Runing in EPOC[103] Batch [110]\n",
      "loss 2.05345\n",
      "Runing in EPOC[103] Batch [120]\n",
      "loss 1.96964\n",
      "Runing in EPOC[103] Batch [130]\n",
      "loss 2.06201\n",
      "Runing in EPOC[103] Batch [140]\n",
      "loss 1.91192\n",
      "Runing in EPOC[103] Batch [150]\n",
      "loss 1.97297\n",
      "Runing in EPOC[103] Batch [160]\n",
      "loss 1.86129\n",
      "Runing in EPOC[103] Batch [170]\n",
      "loss 1.9022\n",
      "Runing in EPOC[103] Batch [180]\n",
      "loss 1.9407\n",
      "Runing in EPOC[103] Batch [190]\n",
      "loss 1.97706\n",
      "Runing in EPOC[104] Batch [10]\n",
      "loss 1.85266\n",
      "Runing in EPOC[104] Batch [20]\n",
      "loss 1.82354\n",
      "Runing in EPOC[104] Batch [30]\n",
      "loss 1.88202\n",
      "Runing in EPOC[104] Batch [40]\n",
      "loss 1.88398\n",
      "Runing in EPOC[104] Batch [50]\n",
      "loss 1.93285\n",
      "Runing in EPOC[104] Batch [60]\n",
      "loss 1.95171\n",
      "Runing in EPOC[104] Batch [70]\n",
      "loss 1.9354\n",
      "Runing in EPOC[104] Batch [80]\n",
      "loss 1.94424\n",
      "Runing in EPOC[104] Batch [90]\n",
      "loss 2.0144\n",
      "Runing in EPOC[104] Batch [100]\n",
      "loss 2.00651\n",
      "Runing in EPOC[104] Batch [110]\n",
      "loss 2.04803\n",
      "Runing in EPOC[104] Batch [120]\n",
      "loss 1.97495\n",
      "Runing in EPOC[104] Batch [130]\n",
      "loss 2.06504\n",
      "Runing in EPOC[104] Batch [140]\n",
      "loss 1.91991\n",
      "Runing in EPOC[104] Batch [150]\n",
      "loss 1.96742\n",
      "Runing in EPOC[104] Batch [160]\n",
      "loss 1.87001\n",
      "Runing in EPOC[104] Batch [170]\n",
      "loss 1.91183\n",
      "Runing in EPOC[104] Batch [180]\n",
      "loss 1.93816\n",
      "Runing in EPOC[104] Batch [190]\n",
      "loss 1.96716\n",
      "Runing in EPOC[105] Batch [10]\n",
      "loss 1.86165\n",
      "Runing in EPOC[105] Batch [20]\n",
      "loss 1.83079\n",
      "Runing in EPOC[105] Batch [30]\n",
      "loss 1.87871\n",
      "Runing in EPOC[105] Batch [40]\n",
      "loss 1.88774\n",
      "Runing in EPOC[105] Batch [50]\n",
      "loss 1.93118\n",
      "Runing in EPOC[105] Batch [60]\n",
      "loss 1.94635\n",
      "Runing in EPOC[105] Batch [70]\n",
      "loss 1.93638\n",
      "Runing in EPOC[105] Batch [80]\n",
      "loss 1.95417\n",
      "Runing in EPOC[105] Batch [90]\n",
      "loss 2.01576\n",
      "Runing in EPOC[105] Batch [100]\n",
      "loss 2.00427\n",
      "Runing in EPOC[105] Batch [110]\n",
      "loss 2.03274\n",
      "Runing in EPOC[105] Batch [120]\n",
      "loss 1.96349\n",
      "Runing in EPOC[105] Batch [130]\n",
      "loss 2.05289\n",
      "Runing in EPOC[105] Batch [140]\n",
      "loss 1.91805\n",
      "Runing in EPOC[105] Batch [150]\n",
      "loss 1.97765\n",
      "Runing in EPOC[105] Batch [160]\n",
      "loss 1.88137\n",
      "Runing in EPOC[105] Batch [170]\n",
      "loss 1.90371\n",
      "Runing in EPOC[105] Batch [180]\n",
      "loss 1.92513\n",
      "Runing in EPOC[105] Batch [190]\n",
      "loss 1.9635\n",
      "Runing in EPOC[106] Batch [10]\n",
      "loss 1.85679\n",
      "Runing in EPOC[106] Batch [20]\n",
      "loss 1.82774\n",
      "Runing in EPOC[106] Batch [30]\n",
      "loss 1.89135\n",
      "Runing in EPOC[106] Batch [40]\n",
      "loss 1.88254\n",
      "Runing in EPOC[106] Batch [50]\n",
      "loss 1.91652\n",
      "Runing in EPOC[106] Batch [60]\n",
      "loss 1.93603\n",
      "Runing in EPOC[106] Batch [70]\n",
      "loss 1.93218\n",
      "Runing in EPOC[106] Batch [80]\n",
      "loss 1.95925\n",
      "Runing in EPOC[106] Batch [90]\n",
      "loss 2.01985\n",
      "Runing in EPOC[106] Batch [100]\n",
      "loss 2.00375\n",
      "Runing in EPOC[106] Batch [110]\n",
      "loss 2.03339\n",
      "Runing in EPOC[106] Batch [120]\n",
      "loss 1.96505\n",
      "Runing in EPOC[106] Batch [130]\n",
      "loss 2.04401\n",
      "Runing in EPOC[106] Batch [140]\n",
      "loss 1.90161\n",
      "Runing in EPOC[106] Batch [150]\n",
      "loss 1.96498\n",
      "Runing in EPOC[106] Batch [160]\n",
      "loss 1.86067\n",
      "Runing in EPOC[106] Batch [170]\n",
      "loss 1.88684\n",
      "Runing in EPOC[106] Batch [180]\n",
      "loss 1.92875\n",
      "Runing in EPOC[106] Batch [190]\n",
      "loss 1.96651\n",
      "Runing in EPOC[107] Batch [10]\n",
      "loss 1.83547\n",
      "Runing in EPOC[107] Batch [20]\n",
      "loss 1.81431\n",
      "Runing in EPOC[107] Batch [30]\n",
      "loss 1.88476\n",
      "Runing in EPOC[107] Batch [40]\n",
      "loss 1.86851\n",
      "Runing in EPOC[107] Batch [50]\n",
      "loss 1.91482\n",
      "Runing in EPOC[107] Batch [60]\n",
      "loss 1.9355\n",
      "Runing in EPOC[107] Batch [70]\n",
      "loss 1.93836\n",
      "Runing in EPOC[107] Batch [80]\n",
      "loss 1.9588\n",
      "Runing in EPOC[107] Batch [90]\n",
      "loss 2.00855\n",
      "Runing in EPOC[107] Batch [100]\n",
      "loss 1.99417\n",
      "Runing in EPOC[107] Batch [110]\n",
      "loss 2.02771\n",
      "Runing in EPOC[107] Batch [120]\n",
      "loss 1.96063\n",
      "Runing in EPOC[107] Batch [130]\n",
      "loss 2.05302\n",
      "Runing in EPOC[107] Batch [140]\n",
      "loss 1.91043\n",
      "Runing in EPOC[107] Batch [150]\n",
      "loss 1.97661\n",
      "Runing in EPOC[107] Batch [160]\n",
      "loss 1.86716\n",
      "Runing in EPOC[107] Batch [170]\n",
      "loss 1.88413\n",
      "Runing in EPOC[107] Batch [180]\n",
      "loss 1.92408\n",
      "Runing in EPOC[107] Batch [190]\n",
      "loss 1.96296\n",
      "Runing in EPOC[108] Batch [10]\n",
      "loss 1.84086\n",
      "Runing in EPOC[108] Batch [20]\n",
      "loss 1.81365\n",
      "Runing in EPOC[108] Batch [30]\n",
      "loss 1.87366\n",
      "Runing in EPOC[108] Batch [40]\n",
      "loss 1.857\n",
      "Runing in EPOC[108] Batch [50]\n",
      "loss 1.90559\n",
      "Runing in EPOC[108] Batch [60]\n",
      "loss 1.92454\n",
      "Runing in EPOC[108] Batch [70]\n",
      "loss 1.92691\n",
      "Runing in EPOC[108] Batch [80]\n",
      "loss 1.94796\n",
      "Runing in EPOC[108] Batch [90]\n",
      "loss 2.0006\n",
      "Runing in EPOC[108] Batch [100]\n",
      "loss 1.99576\n",
      "Runing in EPOC[108] Batch [110]\n",
      "loss 2.02393\n",
      "Runing in EPOC[108] Batch [120]\n",
      "loss 1.96005\n",
      "Runing in EPOC[108] Batch [130]\n",
      "loss 2.0405\n",
      "Runing in EPOC[108] Batch [140]\n",
      "loss 1.90188\n",
      "Runing in EPOC[108] Batch [150]\n",
      "loss 1.9602\n",
      "Runing in EPOC[108] Batch [160]\n",
      "loss 1.85988\n",
      "Runing in EPOC[108] Batch [170]\n",
      "loss 1.87939\n",
      "Runing in EPOC[108] Batch [180]\n",
      "loss 1.92203\n",
      "Runing in EPOC[108] Batch [190]\n",
      "loss 1.95652\n",
      "Runing in EPOC[109] Batch [10]\n",
      "loss 1.84927\n",
      "Runing in EPOC[109] Batch [20]\n",
      "loss 1.80943\n",
      "Runing in EPOC[109] Batch [30]\n",
      "loss 1.86442\n",
      "Runing in EPOC[109] Batch [40]\n",
      "loss 1.85979\n",
      "Runing in EPOC[109] Batch [50]\n",
      "loss 1.91145\n",
      "Runing in EPOC[109] Batch [60]\n",
      "loss 1.92484\n",
      "Runing in EPOC[109] Batch [70]\n",
      "loss 1.92472\n",
      "Runing in EPOC[109] Batch [80]\n",
      "loss 1.93976\n",
      "Runing in EPOC[109] Batch [90]\n",
      "loss 2.00803\n",
      "Runing in EPOC[109] Batch [100]\n",
      "loss 2.00012\n",
      "Runing in EPOC[109] Batch [110]\n",
      "loss 2.03858\n",
      "Runing in EPOC[109] Batch [120]\n",
      "loss 1.96018\n",
      "Runing in EPOC[109] Batch [130]\n",
      "loss 2.03196\n",
      "Runing in EPOC[109] Batch [140]\n",
      "loss 1.89518\n",
      "Runing in EPOC[109] Batch [150]\n",
      "loss 1.95526\n",
      "Runing in EPOC[109] Batch [160]\n",
      "loss 1.85885\n",
      "Runing in EPOC[109] Batch [170]\n",
      "loss 1.88063\n",
      "Runing in EPOC[109] Batch [180]\n",
      "loss 1.92619\n",
      "Runing in EPOC[109] Batch [190]\n",
      "loss 1.96203\n",
      "Runing in EPOC[110] Batch [10]\n",
      "loss 1.83419\n",
      "Runing in EPOC[110] Batch [20]\n",
      "loss 1.80864\n",
      "Runing in EPOC[110] Batch [30]\n",
      "loss 1.87043\n",
      "Runing in EPOC[110] Batch [40]\n",
      "loss 1.8608\n",
      "Runing in EPOC[110] Batch [50]\n",
      "loss 1.90994\n",
      "Runing in EPOC[110] Batch [60]\n",
      "loss 1.9323\n",
      "Runing in EPOC[110] Batch [70]\n",
      "loss 1.92557\n",
      "Runing in EPOC[110] Batch [80]\n",
      "loss 1.93996\n",
      "Runing in EPOC[110] Batch [90]\n",
      "loss 1.99352\n",
      "Runing in EPOC[110] Batch [100]\n",
      "loss 1.99374\n",
      "Runing in EPOC[110] Batch [110]\n",
      "loss 2.03259\n",
      "Runing in EPOC[110] Batch [120]\n",
      "loss 1.95484\n",
      "Runing in EPOC[110] Batch [130]\n",
      "loss 2.03053\n",
      "Runing in EPOC[110] Batch [140]\n",
      "loss 1.89056\n",
      "Runing in EPOC[110] Batch [150]\n",
      "loss 1.95779\n",
      "Runing in EPOC[110] Batch [160]\n",
      "loss 1.86171\n",
      "Runing in EPOC[110] Batch [170]\n",
      "loss 1.88176\n",
      "Runing in EPOC[110] Batch [180]\n",
      "loss 1.91402\n",
      "Runing in EPOC[110] Batch [190]\n",
      "loss 1.94493\n",
      "Runing in EPOC[111] Batch [10]\n",
      "loss 1.83757\n",
      "Runing in EPOC[111] Batch [20]\n",
      "loss 1.81233\n",
      "Runing in EPOC[111] Batch [30]\n",
      "loss 1.87518\n",
      "Runing in EPOC[111] Batch [40]\n",
      "loss 1.86016\n",
      "Runing in EPOC[111] Batch [50]\n",
      "loss 1.90442\n",
      "Runing in EPOC[111] Batch [60]\n",
      "loss 1.9272\n",
      "Runing in EPOC[111] Batch [70]\n",
      "loss 1.91496\n",
      "Runing in EPOC[111] Batch [80]\n",
      "loss 1.92177\n",
      "Runing in EPOC[111] Batch [90]\n",
      "loss 1.97305\n",
      "Runing in EPOC[111] Batch [100]\n",
      "loss 1.9856\n",
      "Runing in EPOC[111] Batch [110]\n",
      "loss 2.02578\n",
      "Runing in EPOC[111] Batch [120]\n",
      "loss 1.95968\n",
      "Runing in EPOC[111] Batch [130]\n",
      "loss 2.0337\n",
      "Runing in EPOC[111] Batch [140]\n",
      "loss 1.88986\n",
      "Runing in EPOC[111] Batch [150]\n",
      "loss 1.9662\n",
      "Runing in EPOC[111] Batch [160]\n",
      "loss 1.85703\n",
      "Runing in EPOC[111] Batch [170]\n",
      "loss 1.86959\n",
      "Runing in EPOC[111] Batch [180]\n",
      "loss 1.90481\n",
      "Runing in EPOC[111] Batch [190]\n",
      "loss 1.93759\n",
      "Runing in EPOC[112] Batch [10]\n",
      "loss 1.83735\n",
      "Runing in EPOC[112] Batch [20]\n",
      "loss 1.82244\n",
      "Runing in EPOC[112] Batch [30]\n",
      "loss 1.88088\n",
      "Runing in EPOC[112] Batch [40]\n",
      "loss 1.86545\n",
      "Runing in EPOC[112] Batch [50]\n",
      "loss 1.90186\n",
      "Runing in EPOC[112] Batch [60]\n",
      "loss 1.92289\n",
      "Runing in EPOC[112] Batch [70]\n",
      "loss 1.91617\n",
      "Runing in EPOC[112] Batch [80]\n",
      "loss 1.92517\n",
      "Runing in EPOC[112] Batch [90]\n",
      "loss 1.97766\n",
      "Runing in EPOC[112] Batch [100]\n",
      "loss 1.98715\n",
      "Runing in EPOC[112] Batch [110]\n",
      "loss 2.02923\n",
      "Runing in EPOC[112] Batch [120]\n",
      "loss 1.96025\n",
      "Runing in EPOC[112] Batch [130]\n",
      "loss 2.03683\n",
      "Runing in EPOC[112] Batch [140]\n",
      "loss 1.89781\n",
      "Runing in EPOC[112] Batch [150]\n",
      "loss 1.96557\n",
      "Runing in EPOC[112] Batch [160]\n",
      "loss 1.84427\n",
      "Runing in EPOC[112] Batch [170]\n",
      "loss 1.87828\n",
      "Runing in EPOC[112] Batch [180]\n",
      "loss 1.92056\n",
      "Runing in EPOC[112] Batch [190]\n",
      "loss 1.95109\n",
      "Runing in EPOC[113] Batch [10]\n",
      "loss 1.83037\n",
      "Runing in EPOC[113] Batch [20]\n",
      "loss 1.82634\n",
      "Runing in EPOC[113] Batch [30]\n",
      "loss 1.87601\n",
      "Runing in EPOC[113] Batch [40]\n",
      "loss 1.882\n",
      "Runing in EPOC[113] Batch [50]\n",
      "loss 1.91848\n"
     ]
    },
    {
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m\u001b[0m",
      "\u001b[0;31mKeyboardInterrupt\u001b[0mTraceback (most recent call last)",
      "\u001b[0;32m<ipython-input-24-4425a73c361d>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m     16\u001b[0m                 \u001b[0mencoder_inputs\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0mencoder_inputs_\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     17\u001b[0m                 \u001b[0mdecoder_inputs\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0mdecoder_inputs_\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 18\u001b[0;31m                 \u001b[0mdecoder_targets\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0mdecoder_targets_\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     19\u001b[0m         })\n\u001b[1;32m     20\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m    776\u001b[0m     \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    777\u001b[0m       result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[0;32m--> 778\u001b[0;31m                          run_metadata_ptr)\n\u001b[0m\u001b[1;32m    779\u001b[0m       \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    780\u001b[0m         \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc\u001b[0m in \u001b[0;36m_run\u001b[0;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m    980\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mfinal_fetches\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mfinal_targets\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    981\u001b[0m       results = self._do_run(handle, final_targets, final_fetches,\n\u001b[0;32m--> 982\u001b[0;31m                              feed_dict_string, options, run_metadata)\n\u001b[0m\u001b[1;32m    983\u001b[0m     \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    984\u001b[0m       \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc\u001b[0m in \u001b[0;36m_do_run\u001b[0;34m(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m   1030\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mhandle\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1031\u001b[0m       return self._do_call(_run_fn, self._session, feed_dict, fetch_list,\n\u001b[0;32m-> 1032\u001b[0;31m                            target_list, options, run_metadata)\n\u001b[0m\u001b[1;32m   1033\u001b[0m     \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1034\u001b[0m       return self._do_call(_prun_fn, self._session, handle, feed_dict,\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc\u001b[0m in \u001b[0;36m_do_call\u001b[0;34m(self, fn, *args)\u001b[0m\n\u001b[1;32m   1037\u001b[0m   \u001b[0;32mdef\u001b[0m \u001b[0m_do_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1038\u001b[0m     \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1039\u001b[0;31m       \u001b[0;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1040\u001b[0m     \u001b[0;32mexcept\u001b[0m \u001b[0merrors\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mOpError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1041\u001b[0m       \u001b[0mmessage\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcompat\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mas_text\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmessage\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc\u001b[0m in \u001b[0;36m_run_fn\u001b[0;34m(session, feed_dict, fetch_list, target_list, options, run_metadata)\u001b[0m\n\u001b[1;32m   1019\u001b[0m         return tf_session.TF_Run(session, options,\n\u001b[1;32m   1020\u001b[0m                                  \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtarget_list\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1021\u001b[0;31m                                  status, run_metadata)\n\u001b[0m\u001b[1;32m   1022\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1023\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0m_prun_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msession\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
     ]
    }
   ],
   "source": [
    "batch_size = 128\n",
    "epocs = 1500\n",
    "saver = tf.train.Saver()\n",
    "\n",
    "for i in range(epocs):\n",
    "    j = 0\n",
    "    while (j < len(X_train)):\n",
    "        \n",
    "        encoder_inputs_ = map(lambda x:rpadd(x,maxlend),X_train[j:j+batch_size])\n",
    "        decoder_inputs_ = map(lambda x:rpadd(x,maxlenh,prefix=beg),Y_train[j:j+batch_size])        \n",
    "        decoder_targets_ = map(lambda x:x[1:] + [empty],decoder_inputs_)\n",
    "        \n",
    "        j = j + batch_size\n",
    "        _,loss_,labels__,decoder_prediction_ = sess.run([train_op,loss,labels_,decoder_prediction],\n",
    "            feed_dict={\n",
    "                encoder_inputs : encoder_inputs_,\n",
    "                decoder_inputs : decoder_inputs_,\n",
    "                decoder_targets : decoder_targets_\n",
    "        })\n",
    "        \n",
    "        \"\"\"\n",
    "        print \"encorder_inputs:\", encoder_inputs_\n",
    "        print \"decoder_inputs_:\", decoder_inputs_\n",
    "        print \"decoder_targets_\", decoder_targets_\n",
    "        print \"lables_\",labels__\n",
    "        print 'decoder_prediction is :' , decoder_prediction_\n",
    "        \"\"\"\n",
    "        if j % (batch_size * 1) == 0:\n",
    "            print \"Runing in EPOC[%d] Batch [%d]\" %(i, j / batch_size)\n",
    "            print \"loss\", loss_\n",
    "            \n",
    "            k = random.randint(0,len( encoder_inputs_)-1)\n",
    "            print \"-\" * 20\n",
    "            x = [beg]\n",
    "            for i in range(maxlenh):\n",
    "                \n",
    "                decoder_prediction = sess.run([decoder_prediction],\n",
    "                         feed_dict = {\n",
    "                            encoder_inputs : [encoder_inputs_[k]],\n",
    "                            decoder_inputs : [rpadd(x,maxlenh,prefix=beg)]\n",
    "                         }\n",
    "                )\n",
    "                if decoder_prediction[i] == eos:\n",
    "                    pass\n",
    "                else:\n",
    "                    x.append(decoder_prediction[i])\n",
    "            prt(\"[*预测标题*]\",x)\n",
    "                \n",
    "            prt(\"[**描  述**]\",encoder_inputs_[k])\n",
    "            prt(\"[*预测标题*]\",decoder_prediction_[k])\n",
    "            prt(\"[*真实标题*]\",decoder_inputs_[k])\n",
    "            print \"-\" * 20\n",
    "            \n",
    "    if i % 10 == 0:\n",
    "        saver.save(sess,\"model/TitleGeneration\",global_step = i)\n",
    "        \n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "test_desc: [ 资 讯 - 牛 车 网 ] 本 田 新 思 域 <unk> y p e   <unk> 量 产 版 已 于 3 月 开 幕 的 2 0 1 7 日 内 瓦 车 展 上 正 式 发 布 ， 也 刚 刚 在 纽 北 夺 回 前 驱 最 速 的 头 衔 ， <unk> y p e <unk> 真 的 算 是 一 辆 神 车 ， 但 因 为 没 有 进 口 所 以 国 内 车 友 对 它 并 不 了 解 ， 但 资 深 的 本 田 粉 却 日 日 夜 夜 盼 着 它 的 到 来 。 <eos>\n"
     ]
    },
    {
     "ename": "TypeError",
     "evalue": "Fetch argument array([[1583, 3168,    2,    2,    2,    2,    2,    2,    2,    2,    2,\n           2,    2,    2,    2,    2,    2,    2,    2,    2,    2,    2,\n           2,    2,    2,    2,    2,    2,    2,    2,    2,    2,    2,\n           2,    2,    2,    2,    2,    2,    2]]) has invalid type <type 'numpy.ndarray'>, must be a string or Tensor. (Can not convert a ndarray into a Tensor or Operation.)",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m\u001b[0m",
      "\u001b[0;31mTypeError\u001b[0mTraceback (most recent call last)",
      "\u001b[0;32m<ipython-input-31-525c4a236255>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m      7\u001b[0m              feed_dict = {\n\u001b[1;32m      8\u001b[0m                 \u001b[0mencoder_inputs\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mtest_encode_input\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 9\u001b[0;31m                 \u001b[0mdecoder_inputs\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mnew_decoder_input\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     10\u001b[0m              }\n\u001b[1;32m     11\u001b[0m     )\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m    776\u001b[0m     \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    777\u001b[0m       result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[0;32m--> 778\u001b[0;31m                          run_metadata_ptr)\n\u001b[0m\u001b[1;32m    779\u001b[0m       \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    780\u001b[0m         \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc\u001b[0m in \u001b[0;36m_run\u001b[0;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m    967\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    968\u001b[0m     \u001b[0;31m# Create a fetch handler to take care of the structure of fetches.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 969\u001b[0;31m     \u001b[0mfetch_handler\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_FetchHandler\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_graph\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeed_dict_string\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    970\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    971\u001b[0m     \u001b[0;31m# Run request and get response.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, graph, fetches, feeds)\u001b[0m\n\u001b[1;32m    406\u001b[0m     \"\"\"\n\u001b[1;32m    407\u001b[0m     \u001b[0;32mwith\u001b[0m \u001b[0mgraph\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mas_default\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 408\u001b[0;31m       \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_fetch_mapper\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_FetchMapper\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_fetch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    409\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_fetches\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    410\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_targets\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc\u001b[0m in \u001b[0;36mfor_fetch\u001b[0;34m(fetch)\u001b[0m\n\u001b[1;32m    228\u001b[0m     \u001b[0;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mlist\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtuple\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    229\u001b[0m       \u001b[0;31m# NOTE(touts): This is also the code path for namedtuples.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 230\u001b[0;31m       \u001b[0;32mreturn\u001b[0m \u001b[0m_ListFetchMapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    231\u001b[0m     \u001b[0;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    232\u001b[0m       \u001b[0;32mreturn\u001b[0m \u001b[0m_DictFetchMapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, fetches)\u001b[0m\n\u001b[1;32m    335\u001b[0m     \"\"\"\n\u001b[1;32m    336\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_fetch_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 337\u001b[0;31m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0m_FetchMapper\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_fetch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mfetch\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    338\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_unique_fetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_value_indices\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_uniquify_fetches\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    339\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc\u001b[0m in \u001b[0;36mfor_fetch\u001b[0;34m(fetch)\u001b[0m\n\u001b[1;32m    228\u001b[0m     \u001b[0;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mlist\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtuple\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    229\u001b[0m       \u001b[0;31m# NOTE(touts): This is also the code path for namedtuples.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 230\u001b[0;31m       \u001b[0;32mreturn\u001b[0m \u001b[0m_ListFetchMapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    231\u001b[0m     \u001b[0;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    232\u001b[0m       \u001b[0;32mreturn\u001b[0m \u001b[0m_DictFetchMapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, fetches)\u001b[0m\n\u001b[1;32m    335\u001b[0m     \"\"\"\n\u001b[1;32m    336\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_fetch_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 337\u001b[0;31m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0m_FetchMapper\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_fetch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mfetch\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    338\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_unique_fetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_value_indices\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_uniquify_fetches\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    339\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc\u001b[0m in \u001b[0;36mfor_fetch\u001b[0;34m(fetch)\u001b[0m\n\u001b[1;32m    236\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtensor_type\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    237\u001b[0m           \u001b[0mfetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcontraction_fn\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfetch_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 238\u001b[0;31m           \u001b[0;32mreturn\u001b[0m \u001b[0m_ElementFetchMapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcontraction_fn\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    239\u001b[0m     \u001b[0;31m# Did not find anything.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    240\u001b[0m     raise TypeError('Fetch argument %r has invalid type %r' %\n",
      "\u001b[0;32m/home/hewei/.local/lib/python2.7/site-packages/tensorflow/python/client/session.pyc\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, fetches, contraction_fn)\u001b[0m\n\u001b[1;32m    269\u001b[0m         raise TypeError('Fetch argument %r has invalid type %r, '\n\u001b[1;32m    270\u001b[0m                         \u001b[0;34m'must be a string or Tensor. (%s)'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 271\u001b[0;31m                         % (fetch, type(fetch), str(e)))\n\u001b[0m\u001b[1;32m    272\u001b[0m       \u001b[0;32mexcept\u001b[0m \u001b[0mValueError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    273\u001b[0m         raise ValueError('Fetch argument %r cannot be interpreted as a '\n",
      "\u001b[0;31mTypeError\u001b[0m: Fetch argument array([[1583, 3168,    2,    2,    2,    2,    2,    2,    2,    2,    2,\n           2,    2,    2,    2,    2,    2,    2,    2,    2,    2,    2,\n           2,    2,    2,    2,    2,    2,    2,    2,    2,    2,    2,\n           2,    2,    2,    2,    2,    2,    2]]) has invalid type <type 'numpy.ndarray'>, must be a string or Tensor. (Can not convert a ndarray into a Tensor or Operation.)"
     ]
    }
   ],
   "source": [
    "test_x = []\n",
    "test_encode_input = rpadd(X_train[10],maxlend)\n",
    "prt(\"test_desc\",test_encode_input)\n",
    "for i in range(maxlenh):\n",
    "    new_decoder_input = rpadd(test_x,maxlenh,prefix=beg)\n",
    "    decoder_prediction_ = sess.run([decoder_prediction],\n",
    "             feed_dict = {\n",
    "                encoder_inputs : [test_encode_input],\n",
    "                decoder_inputs : [new_decoder_input]\n",
    "             }\n",
    "    )\n",
    "    #print decoder_prediction\n",
    "    if decoder_prediction_[0][i] == eos:\n",
    "        break\n",
    "    else:\n",
    "        test_x.append(decoder_prediction[i])\n",
    "prt(\"[*预测标题*]\",test_x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 2",
   "language": "python",
   "name": "python2"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
