{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# shit"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "code_folding": []
   },
   "outputs": [],
   "source": [
    "# 基础设置\n",
    "%load_ext autoreload\n",
    "%autoreload 2\n",
    "import os\n",
    "import sys\n",
    "import time\n",
    "import tensorflow as tf\n",
    "sys.path.append(\"E:\\\\GitHub\\\\QA-abstract-and-reasoning\")\n",
    "os.chdir(\"E:\\\\GitHub\\\\QA-abstract-and-reasoning\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1 Physical GPUs, 1 Logical GPUs\n",
      "Building the model ...\n",
      "Initializing from scratch.\n",
      "开始训练模型..\n",
      "mode: train\n",
      "epochs: 2\n",
      "batch_size: 64\n",
      "max_enc_len: 302\n",
      "max_dec_len: 37\n",
      "优化器信息：\n",
      "name: Adam\n",
      "clipnorm: 1\n",
      "learning_rate: 0.01\n",
      "decay: 1e-06\n",
      "beta_1: 0.9\n",
      "beta_2: 0.999\n",
      "epsilon: 1e-07\n",
      "amsgrad: False\n",
      "Epoch 1 Batch 1 Loss 3.1397\n",
      "Epoch 1 Batch 2 Loss 3.1889\n",
      "Epoch 1 Batch 3 Loss 3.7268\n",
      "Epoch 1 Batch 4 Loss 2.5972\n",
      "Epoch 1 Batch 5 Loss 2.1843\n",
      "Epoch 1 Batch 6 Loss 2.4704\n",
      "Epoch 1 Batch 7 Loss 2.0071\n",
      "Epoch 1 Batch 8 Loss 2.4526\n",
      "Epoch 1 Batch 9 Loss 2.4828\n",
      "Epoch 1 Batch 10 Loss 1.8599\n",
      "Epoch 1 Batch 11 Loss 1.8695\n",
      "Epoch 1 Batch 12 Loss 2.0444\n",
      "Epoch 1 Batch 13 Loss 2.1399\n",
      "Epoch 1 Batch 14 Loss 2.1955\n",
      "Epoch 1 Batch 15 Loss 1.7700\n",
      "Epoch 1 Batch 16 Loss 2.3578\n",
      "Epoch 1 Batch 17 Loss 1.7180\n",
      "Epoch 1 Batch 18 Loss 2.0114\n",
      "Epoch 1 Batch 19 Loss 1.6288\n",
      "Epoch 1 Batch 20 Loss 1.5244\n",
      "Epoch 1 Batch 21 Loss 1.8616\n",
      "Epoch 1 Batch 22 Loss 1.8346\n",
      "Epoch 1 Batch 23 Loss 1.6022\n",
      "Epoch 1 Batch 24 Loss 1.5044\n",
      "Epoch 1 Batch 25 Loss 1.8672\n",
      "Epoch 1 Batch 26 Loss 1.9859\n",
      "Epoch 1 Batch 27 Loss 1.4635\n",
      "Epoch 1 Batch 28 Loss 1.6455\n",
      "Epoch 1 Batch 29 Loss 1.6248\n",
      "Epoch 1 Batch 30 Loss 1.6565\n",
      "Epoch 1 Batch 31 Loss 1.4129\n",
      "Epoch 1 Batch 32 Loss 1.8474\n",
      "Epoch 1 Batch 33 Loss 1.8720\n",
      "Epoch 1 Batch 34 Loss 1.6017\n",
      "Epoch 1 Batch 35 Loss 1.4816\n",
      "Epoch 1 Batch 36 Loss 1.7530\n",
      "Epoch 1 Batch 37 Loss 1.6968\n",
      "Epoch 1 Batch 38 Loss 1.6057\n",
      "Epoch 1 Batch 39 Loss 1.8534\n",
      "Epoch 1 Batch 40 Loss 1.5332\n",
      "Epoch 1 Batch 41 Loss 1.8285\n",
      "Epoch 1 Batch 42 Loss 1.6273\n",
      "Epoch 1 Batch 43 Loss 1.6100\n",
      "Epoch 1 Batch 44 Loss 1.5783\n",
      "Epoch 1 Batch 45 Loss 1.5357\n",
      "Epoch 1 Batch 46 Loss 1.8880\n",
      "Epoch 1 Batch 47 Loss 1.7048\n",
      "Epoch 1 Batch 48 Loss 1.6274\n",
      "Epoch 1 Batch 49 Loss 1.7149\n",
      "Epoch 1 Batch 50 Loss 1.6029\n",
      "Epoch 1 Batch 51 Loss 1.5984\n",
      "Epoch 1 Batch 52 Loss 1.6049\n",
      "Epoch 1 Batch 53 Loss 1.5036\n",
      "Epoch 1 Batch 54 Loss 1.4825\n",
      "Epoch 1 Batch 55 Loss 1.8050\n",
      "Epoch 1 Batch 56 Loss 1.8201\n",
      "Epoch 1 Batch 57 Loss 1.6923\n",
      "Epoch 1 Batch 58 Loss 1.9974\n",
      "Epoch 1 Batch 59 Loss 1.5161\n",
      "Epoch 1 Batch 60 Loss 1.4336\n",
      "Epoch 1 Batch 61 Loss 1.7174\n",
      "Epoch 1 Batch 62 Loss 1.7826\n",
      "Epoch 1 Batch 63 Loss 1.7413\n",
      "Epoch 1 Batch 64 Loss 1.6653\n",
      "Epoch 1 Batch 65 Loss 1.6299\n",
      "Epoch 1 Batch 66 Loss 1.7340\n",
      "Epoch 1 Batch 67 Loss 1.8624\n",
      "Epoch 1 Batch 68 Loss 1.7423\n",
      "Epoch 1 Batch 69 Loss 1.5384\n",
      "Epoch 1 Batch 70 Loss 1.8267\n",
      "Epoch 1 Batch 71 Loss 1.4572\n",
      "Epoch 1 Batch 72 Loss 1.5173\n",
      "Epoch 1 Batch 73 Loss 1.9097\n",
      "Epoch 1 Batch 74 Loss 1.7015\n",
      "Epoch 1 Batch 75 Loss 1.6560\n",
      "Epoch 1 Batch 76 Loss 1.5623\n",
      "Epoch 1 Batch 77 Loss 1.5128\n",
      "Epoch 1 Batch 78 Loss 1.8828\n",
      "Epoch 1 Batch 79 Loss 1.6679\n",
      "Epoch 1 Batch 80 Loss 1.5302\n",
      "Epoch 1 Batch 81 Loss 1.9834\n",
      "Epoch 1 Batch 82 Loss 1.9744\n",
      "Epoch 1 Batch 83 Loss 1.8983\n",
      "Epoch 1 Batch 84 Loss 2.0430\n",
      "Epoch 1 Batch 85 Loss 1.5188\n",
      "Epoch 1 Batch 86 Loss 1.9161\n",
      "Epoch 1 Batch 87 Loss 1.5648\n",
      "Epoch 1 Batch 88 Loss 1.5787\n",
      "Epoch 1 Batch 89 Loss 1.8674\n",
      "Epoch 1 Batch 90 Loss 1.5756\n",
      "Epoch 1 Batch 91 Loss 1.7824\n",
      "Epoch 1 Batch 92 Loss 1.8874\n",
      "Epoch 1 Batch 93 Loss 1.5909\n",
      "Epoch 1 Batch 94 Loss 1.6428\n",
      "Epoch 1 Batch 95 Loss 1.6460\n",
      "Epoch 1 Batch 96 Loss 1.8805\n",
      "Epoch 1 Batch 97 Loss 1.9919\n",
      "Epoch 1 Batch 98 Loss 1.4913\n",
      "Epoch 1 Batch 99 Loss 1.6394\n",
      "Epoch 1 Batch 100 Loss 2.0660\n",
      "Epoch 1 Batch 101 Loss 1.8377\n",
      "Epoch 1 Batch 102 Loss 1.6172\n",
      "Epoch 1 Batch 103 Loss 1.6978\n",
      "Epoch 1 Batch 104 Loss 1.7793\n",
      "Epoch 1 Batch 105 Loss 1.6627\n",
      "Epoch 1 Batch 106 Loss 1.6201\n",
      "Epoch 1 Batch 107 Loss 1.6366\n",
      "Epoch 1 Batch 108 Loss 1.7051\n",
      "Epoch 1 Batch 109 Loss 1.8294\n",
      "Epoch 1 Batch 110 Loss 1.8741\n",
      "Epoch 1 Batch 111 Loss 1.6165\n",
      "Epoch 1 Batch 112 Loss 1.8010\n",
      "Epoch 1 Batch 113 Loss 1.9992\n",
      "Epoch 1 Batch 114 Loss 1.7232\n",
      "Epoch 1 Batch 115 Loss 1.5221\n",
      "Epoch 1 Batch 116 Loss 1.8215\n",
      "Epoch 1 Batch 117 Loss 1.6954\n",
      "Epoch 1 Batch 118 Loss 1.7681\n",
      "Epoch 1 Batch 119 Loss 1.5652\n",
      "Epoch 1 Batch 120 Loss 1.8171\n",
      "Epoch 1 Batch 121 Loss 1.6996\n",
      "Epoch 1 Batch 122 Loss 1.6984\n",
      "Epoch 1 Batch 123 Loss 1.8010\n",
      "Epoch 1 Batch 124 Loss 1.8352\n",
      "Epoch 1 Batch 125 Loss 1.8202\n",
      "Epoch 1 Batch 126 Loss 1.7275\n",
      "Epoch 1 Batch 127 Loss 1.5857\n",
      "Epoch 1 Batch 128 Loss 1.6832\n",
      "Epoch 1 Batch 129 Loss 1.6832\n",
      "Epoch 1 Batch 130 Loss 1.7758\n",
      "Epoch 1 Batch 131 Loss 1.7406\n",
      "Epoch 1 Batch 132 Loss 1.7411\n",
      "Epoch 1 Batch 133 Loss 1.6715\n",
      "Epoch 1 Batch 134 Loss 1.8444\n",
      "Epoch 1 Batch 135 Loss 1.7476\n",
      "Epoch 1 Batch 136 Loss 1.6954\n",
      "Epoch 1 Batch 137 Loss 1.7376\n",
      "Epoch 1 Batch 138 Loss 1.9726\n",
      "Epoch 1 Batch 139 Loss 1.7674\n",
      "Epoch 1 Batch 140 Loss 1.6717\n",
      "Epoch 1 Batch 141 Loss 1.9866\n",
      "Epoch 1 Batch 142 Loss 1.7038\n",
      "Epoch 1 Batch 143 Loss 2.1466\n",
      "Epoch 1 Batch 144 Loss 1.7834\n",
      "Epoch 1 Batch 145 Loss 1.7637\n",
      "Epoch 1 Batch 146 Loss 1.9141\n",
      "Epoch 1 Batch 147 Loss 1.7914\n",
      "Epoch 1 Batch 148 Loss 1.7983\n",
      "Epoch 1 Batch 149 Loss 1.7692\n",
      "Epoch 1 Batch 150 Loss 1.7329\n",
      "Epoch 1 Batch 151 Loss 1.8309\n",
      "Epoch 1 Batch 152 Loss 1.5301\n",
      "Epoch 1 Batch 153 Loss 1.6708\n",
      "Epoch 1 Batch 154 Loss 1.8475\n",
      "Epoch 1 Batch 155 Loss 1.7277\n",
      "Epoch 1 Batch 156 Loss 1.8215\n",
      "Epoch 1 Batch 157 Loss 1.9530\n",
      "Epoch 1 Batch 158 Loss 1.6603\n",
      "Epoch 1 Batch 159 Loss 1.8356\n",
      "Epoch 1 Batch 160 Loss 1.9003\n",
      "Epoch 1 Batch 161 Loss 1.8324\n",
      "Epoch 1 Batch 162 Loss 1.9188\n",
      "Epoch 1 Batch 163 Loss 1.7707\n",
      "Epoch 1 Batch 164 Loss 1.7612\n",
      "Epoch 1 Batch 165 Loss 1.6972\n",
      "Epoch 1 Batch 166 Loss 1.6992\n",
      "Epoch 1 Batch 167 Loss 1.7935\n",
      "Epoch 1 Batch 168 Loss 1.8193\n",
      "Epoch 1 Batch 169 Loss 1.7392\n",
      "Epoch 1 Batch 170 Loss 1.8587\n",
      "Epoch 1 Batch 171 Loss 2.0300\n",
      "Epoch 1 Batch 172 Loss 2.0834\n",
      "Epoch 1 Batch 173 Loss 1.8462\n",
      "Epoch 1 Batch 174 Loss 1.9854\n",
      "Epoch 1 Batch 175 Loss 1.8212\n",
      "Epoch 1 Batch 176 Loss 1.9385\n",
      "Epoch 1 Batch 177 Loss 1.7466\n",
      "Epoch 1 Batch 178 Loss 1.8450\n",
      "Epoch 1 Batch 179 Loss 1.7808\n",
      "Epoch 1 Batch 180 Loss 1.7387\n",
      "Epoch 1 Batch 181 Loss 2.0596\n",
      "Epoch 1 Batch 182 Loss 1.8885\n",
      "Epoch 1 Batch 183 Loss 1.8820\n",
      "Epoch 1 Batch 184 Loss 1.7956\n",
      "Epoch 1 Batch 185 Loss 1.9488\n",
      "Epoch 1 Batch 186 Loss 2.0076\n",
      "Epoch 1 Batch 187 Loss 1.8152\n",
      "Epoch 1 Batch 188 Loss 1.6750\n",
      "Epoch 1 Batch 189 Loss 2.0363\n",
      "Epoch 1 Batch 190 Loss 2.0424\n",
      "Epoch 1 Batch 191 Loss 1.7411\n",
      "Epoch 1 Batch 192 Loss 1.8157\n",
      "Epoch 1 Batch 193 Loss 1.9587\n",
      "Epoch 1 Batch 194 Loss 1.7192\n",
      "Epoch 1 Batch 195 Loss 1.7160\n",
      "Epoch 1 Batch 196 Loss 1.9108\n",
      "Epoch 1 Batch 197 Loss 1.8700\n",
      "Epoch 1 Batch 198 Loss 1.8412\n",
      "Epoch 1 Batch 199 Loss 1.4975\n",
      "Epoch 1 Batch 200 Loss 1.8703\n",
      "Epoch 1 Batch 201 Loss 1.9431\n",
      "Epoch 1 Batch 202 Loss 2.1904\n",
      "Epoch 1 Batch 203 Loss 2.0176\n",
      "Epoch 1 Batch 204 Loss 1.6834\n",
      "Epoch 1 Batch 205 Loss 1.8573\n",
      "Epoch 1 Batch 206 Loss 1.7791\n",
      "Epoch 1 Batch 207 Loss 1.6952\n",
      "Epoch 1 Batch 208 Loss 1.9663\n",
      "Epoch 1 Batch 209 Loss 1.8860\n",
      "Epoch 1 Batch 210 Loss 1.7005\n",
      "Epoch 1 Batch 211 Loss 1.8970\n",
      "Epoch 1 Batch 212 Loss 1.8910\n",
      "Epoch 1 Batch 213 Loss 1.8350\n",
      "Epoch 1 Batch 214 Loss 1.9680\n",
      "Epoch 1 Batch 215 Loss 1.8998\n",
      "Epoch 1 Batch 216 Loss 1.6643\n",
      "Epoch 1 Batch 217 Loss 2.0132\n",
      "Epoch 1 Batch 218 Loss 1.7051\n",
      "Epoch 1 Batch 219 Loss 1.8680\n",
      "Epoch 1 Batch 220 Loss 1.9779\n",
      "Epoch 1 Batch 221 Loss 1.9002\n",
      "Epoch 1 Batch 222 Loss 1.8527\n",
      "Epoch 1 Batch 223 Loss 1.7091\n",
      "Epoch 1 Batch 224 Loss 1.7128\n",
      "Epoch 1 Batch 225 Loss 2.0267\n",
      "Epoch 1 Batch 226 Loss 1.8044\n",
      "Epoch 1 Batch 227 Loss 1.8102\n",
      "Epoch 1 Batch 228 Loss 1.8552\n",
      "Epoch 1 Batch 229 Loss 2.1226\n",
      "Epoch 1 Batch 230 Loss 1.8503\n",
      "Epoch 1 Batch 231 Loss 2.0112\n",
      "Epoch 1 Batch 232 Loss 1.9263\n",
      "Epoch 1 Batch 233 Loss 1.9873\n",
      "Epoch 1 Batch 234 Loss 1.9859\n",
      "Epoch 1 Batch 235 Loss 2.0439\n",
      "Epoch 1 Batch 236 Loss 2.0738\n",
      "Epoch 1 Batch 237 Loss 1.8934\n",
      "Epoch 1 Batch 238 Loss 1.7842\n",
      "Epoch 1 Batch 239 Loss 2.0296\n",
      "Epoch 1 Batch 240 Loss 2.0539\n",
      "Epoch 1 Batch 241 Loss 1.9774\n",
      "Epoch 1 Batch 242 Loss 1.7457\n",
      "Epoch 1 Batch 243 Loss 1.9531\n",
      "Epoch 1 Batch 244 Loss 2.2315\n",
      "Epoch 1 Batch 245 Loss 2.0544\n",
      "Epoch 1 Batch 246 Loss 2.0028\n",
      "Epoch 1 Batch 247 Loss 1.7112\n",
      "Epoch 1 Batch 248 Loss 1.9369\n",
      "Epoch 1 Batch 249 Loss 2.0435\n",
      "Epoch 1 Batch 250 Loss 1.9690\n",
      "Epoch 1 Batch 251 Loss 2.2698\n",
      "Epoch 1 Batch 252 Loss 1.8645\n",
      "Epoch 1 Batch 253 Loss 2.0477\n",
      "Epoch 1 Batch 254 Loss 1.9907\n",
      "Epoch 1 Batch 255 Loss 2.1451\n",
      "Epoch 1 Batch 256 Loss 1.8890\n",
      "Epoch 1 Batch 257 Loss 1.6437\n",
      "Epoch 1 Batch 258 Loss 1.7487\n",
      "Epoch 1 Batch 259 Loss 1.9251\n",
      "Epoch 1 Batch 260 Loss 1.9249\n",
      "Epoch 1 Batch 261 Loss 1.9371\n",
      "Epoch 1 Batch 262 Loss 1.8239\n",
      "Epoch 1 Batch 263 Loss 2.0053\n",
      "Epoch 1 Batch 264 Loss 1.9448\n",
      "Epoch 1 Batch 265 Loss 1.9151\n",
      "Epoch 1 Batch 266 Loss 1.7619\n",
      "Epoch 1 Batch 267 Loss 2.0812\n",
      "Epoch 1 Batch 268 Loss 1.8676\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1 Batch 269 Loss 1.8323\n",
      "Epoch 1 Batch 270 Loss 1.7659\n",
      "Epoch 1 Batch 271 Loss 1.8500\n",
      "Epoch 1 Batch 272 Loss 1.9036\n",
      "Epoch 1 Batch 273 Loss 2.1829\n",
      "Epoch 1 Batch 274 Loss 1.9251\n",
      "Epoch 1 Batch 275 Loss 2.0485\n",
      "Epoch 1 Batch 276 Loss 1.8061\n",
      "Epoch 1 Batch 277 Loss 1.8919\n",
      "Epoch 1 Batch 278 Loss 1.8570\n",
      "Epoch 1 Batch 279 Loss 2.2017\n",
      "Epoch 1 Batch 280 Loss 2.0003\n",
      "Epoch 1 Batch 281 Loss 2.0004\n",
      "Epoch 1 Batch 282 Loss 2.0420\n",
      "Epoch 1 Batch 283 Loss 1.9461\n",
      "Epoch 1 Batch 284 Loss 1.8749\n",
      "Epoch 1 Batch 285 Loss 1.7944\n",
      "Epoch 1 Batch 286 Loss 2.0232\n",
      "Epoch 1 Batch 287 Loss 1.9170\n",
      "Epoch 1 Batch 288 Loss 1.8783\n",
      "Epoch 1 Batch 289 Loss 2.0081\n",
      "Epoch 1 Batch 290 Loss 1.9733\n",
      "Epoch 1 Batch 291 Loss 1.8772\n",
      "Epoch 1 Batch 292 Loss 2.0338\n",
      "Epoch 1 Batch 293 Loss 1.8663\n",
      "Epoch 1 Batch 294 Loss 1.8422\n",
      "Epoch 1 Batch 295 Loss 2.1739\n",
      "Epoch 1 Batch 296 Loss 1.7846\n",
      "Epoch 1 Batch 297 Loss 2.2070\n",
      "Epoch 1 Batch 298 Loss 1.9933\n",
      "Epoch 1 Batch 299 Loss 1.9514\n",
      "Epoch 1 Batch 300 Loss 1.9728\n",
      "Epoch 1 Batch 301 Loss 1.8552\n",
      "Epoch 1 Batch 302 Loss 1.8709\n",
      "Epoch 1 Batch 303 Loss 1.8983\n",
      "Epoch 1 Batch 304 Loss 1.8834\n",
      "Epoch 1 Batch 305 Loss 1.9603\n",
      "Epoch 1 Batch 306 Loss 2.1149\n",
      "Epoch 1 Batch 307 Loss 2.0703\n",
      "Epoch 1 Batch 308 Loss 2.1441\n",
      "Epoch 1 Batch 309 Loss 1.9075\n",
      "Epoch 1 Batch 310 Loss 2.0951\n",
      "Epoch 1 Batch 311 Loss 1.8121\n",
      "Epoch 1 Batch 312 Loss 1.8375\n",
      "Epoch 1 Batch 313 Loss 1.8483\n",
      "Epoch 1 Batch 314 Loss 2.0730\n",
      "Epoch 1 Batch 315 Loss 2.0877\n",
      "Epoch 1 Batch 316 Loss 2.1690\n",
      "Epoch 1 Batch 317 Loss 1.9204\n",
      "Epoch 1 Batch 318 Loss 1.8186\n",
      "Epoch 1 Batch 319 Loss 1.9901\n",
      "Epoch 1 Batch 320 Loss 2.3425\n",
      "Epoch 1 Batch 321 Loss 2.1245\n",
      "Epoch 1 Batch 322 Loss 1.8009\n",
      "Epoch 1 Batch 323 Loss 1.8152\n",
      "Epoch 1 Batch 324 Loss 2.0775\n",
      "Epoch 1 Batch 325 Loss 2.0959\n",
      "Epoch 1 Batch 326 Loss 1.6210\n",
      "Epoch 1 Batch 327 Loss 1.9900\n",
      "Epoch 1 Batch 328 Loss 2.0272\n",
      "Epoch 1 Batch 329 Loss 1.8848\n",
      "Epoch 1 Batch 330 Loss 1.9747\n",
      "Epoch 1 Batch 331 Loss 1.7621\n",
      "Epoch 1 Batch 332 Loss 1.9857\n",
      "Epoch 1 Batch 333 Loss 1.8421\n",
      "Epoch 1 Batch 334 Loss 2.1434\n",
      "Epoch 1 Batch 335 Loss 1.8419\n",
      "Epoch 1 Batch 336 Loss 2.1317\n",
      "Epoch 1 Batch 337 Loss 1.8584\n",
      "Epoch 1 Batch 338 Loss 2.2769\n",
      "Epoch 1 Batch 339 Loss 1.9548\n",
      "Epoch 1 Batch 340 Loss 1.9752\n",
      "Epoch 1 Batch 341 Loss 1.8648\n",
      "Epoch 1 Batch 342 Loss 1.9042\n",
      "Epoch 1 Batch 343 Loss 2.1521\n",
      "Epoch 1 Batch 344 Loss 1.9607\n",
      "Epoch 1 Batch 345 Loss 2.0030\n",
      "Epoch 1 Batch 346 Loss 1.8496\n",
      "Epoch 1 Batch 347 Loss 1.9805\n",
      "Epoch 1 Batch 348 Loss 2.2172\n",
      "Epoch 1 Batch 349 Loss 1.8026\n",
      "Epoch 1 Batch 350 Loss 2.1322\n",
      "Epoch 1 Batch 351 Loss 2.2706\n",
      "Epoch 1 Batch 352 Loss 2.2188\n",
      "Epoch 1 Batch 353 Loss 1.8742\n",
      "Epoch 1 Batch 354 Loss 1.9689\n",
      "Epoch 1 Batch 355 Loss 1.8372\n",
      "Epoch 1 Batch 356 Loss 2.1413\n",
      "Epoch 1 Batch 357 Loss 1.8385\n",
      "Epoch 1 Batch 358 Loss 1.8821\n",
      "Epoch 1 Batch 359 Loss 2.0933\n",
      "Epoch 1 Batch 360 Loss 2.0910\n",
      "Epoch 1 Batch 361 Loss 1.8499\n",
      "Epoch 1 Batch 362 Loss 1.8808\n",
      "Epoch 1 Batch 363 Loss 2.0027\n",
      "Epoch 1 Batch 364 Loss 1.9534\n",
      "Epoch 1 Batch 365 Loss 1.7315\n",
      "Epoch 1 Batch 366 Loss 1.8237\n",
      "Epoch 1 Batch 367 Loss 1.9000\n",
      "Epoch 1 Batch 368 Loss 2.1003\n",
      "Epoch 1 Batch 369 Loss 2.2237\n",
      "Epoch 1 Batch 370 Loss 2.2754\n",
      "Epoch 1 Batch 371 Loss 1.7817\n",
      "Epoch 1 Batch 372 Loss 1.9578\n",
      "Epoch 1 Batch 373 Loss 1.9626\n",
      "Epoch 1 Batch 374 Loss 2.0388\n",
      "Epoch 1 Batch 375 Loss 2.1275\n",
      "Epoch 1 Batch 376 Loss 1.5109\n",
      "Epoch 1 Batch 377 Loss 1.8387\n",
      "Epoch 1 Batch 378 Loss 1.8751\n",
      "Epoch 1 Batch 379 Loss 2.1428\n",
      "Epoch 1 Batch 380 Loss 2.2847\n",
      "Epoch 1 Batch 381 Loss 2.0284\n",
      "Epoch 1 Batch 382 Loss 2.2226\n",
      "Epoch 1 Batch 383 Loss 2.0158\n",
      "Epoch 1 Batch 384 Loss 2.3533\n",
      "Epoch 1 Batch 385 Loss 1.9849\n",
      "Epoch 1 Batch 386 Loss 1.8923\n",
      "Epoch 1 Batch 387 Loss 2.0170\n",
      "Epoch 1 Batch 388 Loss 1.9983\n",
      "Epoch 1 Batch 389 Loss 2.0535\n",
      "Epoch 1 Batch 390 Loss 2.0004\n",
      "Epoch 1 Batch 391 Loss 2.1179\n",
      "Epoch 1 Batch 392 Loss 2.1174\n",
      "Epoch 1 Batch 393 Loss 2.2538\n",
      "Epoch 1 Batch 394 Loss 1.9229\n",
      "Epoch 1 Batch 395 Loss 1.9531\n",
      "Epoch 1 Batch 396 Loss 1.8817\n",
      "Epoch 1 Batch 397 Loss 2.0861\n",
      "Epoch 1 Batch 398 Loss 2.2567\n",
      "Epoch 1 Batch 399 Loss 2.3160\n",
      "Epoch 1 Batch 400 Loss 2.1172\n",
      "Epoch 1 Batch 401 Loss 1.7468\n",
      "Epoch 1 Batch 402 Loss 1.9388\n",
      "Epoch 1 Batch 403 Loss 2.1659\n",
      "Epoch 1 Batch 404 Loss 1.8826\n",
      "Epoch 1 Batch 405 Loss 1.7809\n",
      "Epoch 1 Batch 406 Loss 2.1724\n",
      "Epoch 1 Batch 407 Loss 1.9250\n",
      "Epoch 1 Batch 408 Loss 1.8613\n",
      "Epoch 1 Batch 409 Loss 2.0281\n",
      "Epoch 1 Batch 410 Loss 1.9710\n",
      "Epoch 1 Batch 411 Loss 1.8703\n",
      "Epoch 1 Batch 412 Loss 2.4808\n",
      "Epoch 1 Batch 413 Loss 2.0461\n",
      "Epoch 1 Batch 414 Loss 2.0047\n",
      "Epoch 1 Batch 415 Loss 1.8016\n",
      "Epoch 1 Batch 416 Loss 1.9259\n",
      "Epoch 1 Batch 417 Loss 1.8464\n",
      "Epoch 1 Batch 418 Loss 2.0935\n",
      "Epoch 1 Batch 419 Loss 2.0574\n",
      "Epoch 1 Batch 420 Loss 1.9019\n",
      "Epoch 1 Batch 421 Loss 2.0283\n",
      "Epoch 1 Batch 422 Loss 1.9569\n",
      "Epoch 1 Batch 423 Loss 2.0538\n",
      "Epoch 1 Batch 424 Loss 2.0804\n",
      "Epoch 1 Batch 425 Loss 2.3855\n",
      "Epoch 1 Batch 426 Loss 2.0239\n",
      "Epoch 1 Batch 427 Loss 2.1939\n",
      "Epoch 1 Batch 428 Loss 2.0407\n",
      "Epoch 1 Batch 429 Loss 2.1598\n",
      "Epoch 1 Batch 430 Loss 2.1517\n",
      "Epoch 1 Batch 431 Loss 2.0493\n",
      "Epoch 1 Batch 432 Loss 1.8125\n",
      "Epoch 1 Batch 433 Loss 2.2048\n",
      "Epoch 1 Batch 434 Loss 1.8750\n",
      "Epoch 1 Batch 435 Loss 2.0230\n",
      "Epoch 1 Batch 436 Loss 1.9682\n",
      "Epoch 1 Batch 437 Loss 2.1808\n",
      "Epoch 1 Batch 438 Loss 1.8497\n",
      "Epoch 1 Batch 439 Loss 2.2074\n",
      "Epoch 1 Batch 440 Loss 2.2057\n",
      "Epoch 1 Batch 441 Loss 2.1663\n",
      "Epoch 1 Batch 442 Loss 2.1183\n",
      "Epoch 1 Batch 443 Loss 2.1844\n",
      "Epoch 1 Batch 444 Loss 2.0727\n",
      "Epoch 1 Batch 445 Loss 2.0061\n",
      "Epoch 1 Batch 446 Loss 1.9874\n",
      "Epoch 1 Batch 447 Loss 1.9207\n",
      "Epoch 1 Batch 448 Loss 2.1982\n",
      "Epoch 1 Batch 449 Loss 2.2576\n",
      "Epoch 1 Batch 450 Loss 1.9102\n",
      "Epoch 1 Batch 451 Loss 2.1272\n",
      "Epoch 1 Batch 452 Loss 2.2251\n",
      "Epoch 1 Batch 453 Loss 1.8596\n",
      "Epoch 1 Batch 454 Loss 2.2783\n",
      "Epoch 1 Batch 455 Loss 2.1873\n",
      "Epoch 1 Batch 456 Loss 2.1091\n",
      "Epoch 1 Batch 457 Loss 2.2247\n",
      "Epoch 1 Batch 458 Loss 2.0295\n",
      "Epoch 1 Batch 459 Loss 1.8620\n",
      "Epoch 1 Batch 460 Loss 2.3030\n",
      "Epoch 1 Batch 461 Loss 1.9429\n",
      "Epoch 1 Batch 462 Loss 1.9651\n",
      "Epoch 1 Batch 463 Loss 2.0837\n",
      "Epoch 1 Batch 464 Loss 2.2595\n",
      "Epoch 1 Batch 465 Loss 2.2552\n",
      "Epoch 1 Batch 466 Loss 2.1520\n",
      "Epoch 1 Batch 467 Loss 2.3652\n",
      "Epoch 1 Batch 468 Loss 2.4737\n",
      "Epoch 1 Batch 469 Loss 1.9917\n",
      "Epoch 1 Batch 470 Loss 1.8806\n",
      "Epoch 1 Batch 471 Loss 1.9986\n",
      "Epoch 1 Batch 472 Loss 1.8951\n",
      "Epoch 1 Batch 473 Loss 2.1748\n",
      "Epoch 1 Batch 474 Loss 2.3868\n",
      "Epoch 1 Batch 475 Loss 2.0057\n",
      "Epoch 1 Batch 476 Loss 2.0703\n",
      "Epoch 1 Batch 477 Loss 2.1534\n",
      "Epoch 1 Batch 478 Loss 2.4221\n",
      "Epoch 1 Batch 479 Loss 2.0577\n",
      "Epoch 1 Batch 480 Loss 1.9565\n",
      "Epoch 1 Batch 481 Loss 1.9303\n",
      "Epoch 1 Batch 482 Loss 2.1481\n",
      "Epoch 1 Batch 483 Loss 2.0211\n",
      "Epoch 1 Batch 484 Loss 1.7181\n",
      "Epoch 1 Batch 485 Loss 2.0836\n",
      "Epoch 1 Batch 486 Loss 1.8589\n",
      "Epoch 1 Batch 487 Loss 2.1635\n",
      "Epoch 1 Batch 488 Loss 2.0112\n",
      "Epoch 1 Batch 489 Loss 2.0815\n",
      "Epoch 1 Batch 490 Loss 1.8369\n",
      "Epoch 1 Batch 491 Loss 2.2630\n",
      "Epoch 1 Batch 492 Loss 1.8827\n",
      "Epoch 1 Batch 493 Loss 2.0762\n",
      "Epoch 1 Batch 494 Loss 2.1146\n",
      "Epoch 1 Batch 495 Loss 2.0173\n",
      "Epoch 1 Batch 496 Loss 2.2171\n",
      "Epoch 1 Batch 497 Loss 2.0834\n",
      "Epoch 1 Batch 498 Loss 2.3227\n",
      "Epoch 1 Batch 499 Loss 1.8397\n",
      "Epoch 1 Batch 500 Loss 2.3400\n",
      "Epoch 1 Batch 501 Loss 2.1806\n",
      "Epoch 1 Batch 502 Loss 1.7627\n",
      "Epoch 1 Batch 503 Loss 2.1200\n",
      "Epoch 1 Batch 504 Loss 1.8505\n",
      "Epoch 1 Batch 505 Loss 2.0801\n",
      "Epoch 1 Batch 506 Loss 2.2921\n",
      "Epoch 1 Batch 507 Loss 1.7287\n",
      "Epoch 1 Batch 508 Loss 1.9305\n",
      "Epoch 1 Batch 509 Loss 1.9024\n",
      "Epoch 1 Batch 510 Loss 1.8514\n",
      "Epoch 1 Batch 511 Loss 2.1278\n",
      "Epoch 1 Batch 512 Loss 2.2934\n",
      "Epoch 1 Batch 513 Loss 2.3296\n",
      "Epoch 1 Batch 514 Loss 2.0729\n",
      "Epoch 1 Batch 515 Loss 2.0398\n",
      "Epoch 1 Batch 516 Loss 1.9118\n",
      "Epoch 1 Batch 517 Loss 2.1391\n",
      "Epoch 1 Batch 518 Loss 1.9252\n",
      "Epoch 1 Batch 519 Loss 2.2217\n",
      "Epoch 1 Batch 520 Loss 1.9197\n",
      "Epoch 1 Batch 521 Loss 1.8667\n",
      "Epoch 1 Batch 522 Loss 1.9300\n",
      "Epoch 1 Batch 523 Loss 2.0582\n",
      "Epoch 1 Batch 524 Loss 2.2234\n",
      "Epoch 1 Batch 525 Loss 2.2295\n",
      "Epoch 1 Batch 526 Loss 1.8889\n",
      "Epoch 1 Batch 527 Loss 1.9911\n",
      "Epoch 1 Batch 528 Loss 2.2359\n",
      "Epoch 1 Batch 529 Loss 2.1853\n",
      "Epoch 1 Batch 530 Loss 1.9327\n",
      "Epoch 1 Batch 531 Loss 2.0367\n",
      "Epoch 1 Batch 532 Loss 1.9232\n",
      "Epoch 1 Batch 533 Loss 1.9291\n",
      "Epoch 1 Batch 534 Loss 2.4094\n",
      "Epoch 1 Batch 535 Loss 2.0516\n",
      "Epoch 1 Batch 536 Loss 1.9151\n",
      "Epoch 1 Batch 537 Loss 2.2366\n",
      "Epoch 1 Batch 538 Loss 1.6861\n",
      "Epoch 1 Batch 539 Loss 1.8731\n",
      "Epoch 1 Batch 540 Loss 2.1587\n",
      "Epoch 1 Batch 541 Loss 1.8929\n",
      "Epoch 1 Batch 542 Loss 2.1809\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1 Batch 543 Loss 2.1503\n",
      "Epoch 1 Batch 544 Loss 2.1424\n",
      "Epoch 1 Batch 545 Loss 1.9070\n",
      "Epoch 1 Batch 546 Loss 2.1906\n",
      "Epoch 1 Batch 547 Loss 1.9149\n",
      "Epoch 1 Batch 548 Loss 1.9342\n",
      "Epoch 1 Batch 549 Loss 2.1799\n",
      "Epoch 1 Batch 550 Loss 2.1262\n",
      "Epoch 1 Batch 551 Loss 2.0927\n",
      "Epoch 1 Batch 552 Loss 2.1668\n",
      "Epoch 1 Batch 553 Loss 2.1099\n",
      "Epoch 1 Batch 554 Loss 2.2421\n",
      "Epoch 1 Batch 555 Loss 2.1720\n",
      "Epoch 1 Batch 556 Loss 2.0439\n",
      "Epoch 1 Batch 557 Loss 2.2366\n",
      "Epoch 1 Batch 558 Loss 1.9358\n",
      "Epoch 1 Batch 559 Loss 1.9578\n",
      "Epoch 1 Batch 560 Loss 2.1301\n",
      "Epoch 1 Batch 561 Loss 2.2268\n",
      "Epoch 1 Batch 562 Loss 1.9175\n",
      "Epoch 1 Batch 563 Loss 1.9149\n",
      "Epoch 1 Batch 564 Loss 1.8875\n",
      "Epoch 1 Batch 565 Loss 2.1217\n",
      "Epoch 1 Batch 566 Loss 1.9711\n",
      "Epoch 1 Batch 567 Loss 1.9571\n",
      "Epoch 1 Batch 568 Loss 2.1765\n",
      "Epoch 1 Batch 569 Loss 1.9937\n",
      "Epoch 1 Batch 570 Loss 1.8339\n",
      "Epoch 1 Batch 571 Loss 2.0439\n",
      "Epoch 1 Batch 572 Loss 1.9174\n",
      "Epoch 1 Batch 573 Loss 2.0088\n",
      "Epoch 1 Batch 574 Loss 2.1908\n",
      "Epoch 1 Batch 575 Loss 1.9596\n",
      "Epoch 1 Batch 576 Loss 2.0997\n",
      "Epoch 1 Batch 577 Loss 2.3441\n",
      "Epoch 1 Batch 578 Loss 2.1765\n",
      "Epoch 1 Batch 579 Loss 2.0268\n",
      "Epoch 1 Batch 580 Loss 1.4932\n",
      "Epoch 1 Batch 581 Loss 1.9411\n",
      "Epoch 1 Batch 582 Loss 2.0554\n",
      "Epoch 1 Batch 583 Loss 1.9186\n",
      "Epoch 1 Batch 584 Loss 2.0432\n",
      "Epoch 1 Batch 585 Loss 2.0648\n",
      "Epoch 1 Batch 586 Loss 2.2228\n",
      "Epoch 1 Batch 587 Loss 2.3214\n",
      "Epoch 1 Batch 588 Loss 2.4010\n",
      "Epoch 1 Batch 589 Loss 2.1560\n",
      "Epoch 1 Batch 590 Loss 1.8820\n",
      "Epoch 1 Batch 591 Loss 1.9663\n",
      "Epoch 1 Batch 592 Loss 2.1986\n",
      "Epoch 1 Batch 593 Loss 2.0604\n",
      "Epoch 1 Batch 594 Loss 2.1478\n",
      "Epoch 1 Batch 595 Loss 2.1972\n",
      "Epoch 1 Batch 596 Loss 2.1498\n",
      "Epoch 1 Batch 597 Loss 1.9874\n",
      "Epoch 1 Batch 598 Loss 2.0401\n",
      "Epoch 1 Batch 599 Loss 2.1368\n",
      "Epoch 1 Batch 600 Loss 1.9648\n",
      "Epoch 1 Batch 601 Loss 2.3406\n",
      "Epoch 1 Batch 602 Loss 2.1326\n",
      "Epoch 1 Batch 603 Loss 1.8206\n",
      "Epoch 1 Batch 604 Loss 2.0997\n",
      "Epoch 1 Batch 605 Loss 2.0893\n",
      "Epoch 1 Batch 606 Loss 1.9869\n",
      "Epoch 1 Batch 607 Loss 2.3298\n",
      "Epoch 1 Batch 608 Loss 1.8135\n",
      "Epoch 1 Batch 609 Loss 2.0826\n",
      "Epoch 1 Batch 610 Loss 2.1530\n",
      "Epoch 1 Batch 611 Loss 1.8819\n",
      "Epoch 1 Batch 612 Loss 2.3641\n",
      "Epoch 1 Batch 613 Loss 2.1801\n",
      "Epoch 1 Batch 614 Loss 2.0179\n",
      "Epoch 1 Batch 615 Loss 2.3118\n",
      "Epoch 1 Batch 616 Loss 1.8243\n",
      "Epoch 1 Batch 617 Loss 1.8963\n",
      "Epoch 1 Batch 618 Loss 2.0802\n",
      "Epoch 1 Batch 619 Loss 1.7951\n",
      "Epoch 1 Batch 620 Loss 2.2807\n",
      "Epoch 1 Batch 621 Loss 2.0218\n",
      "Epoch 1 Batch 622 Loss 2.1910\n",
      "Epoch 1 Batch 623 Loss 1.9780\n",
      "Epoch 1 Batch 624 Loss 2.0784\n",
      "Epoch 1 Batch 625 Loss 1.9485\n",
      "Epoch 1 Batch 626 Loss 1.8019\n",
      "Epoch 1 Batch 627 Loss 2.2413\n",
      "Epoch 1 Batch 628 Loss 1.9860\n",
      "Epoch 1 Batch 629 Loss 1.9219\n",
      "Epoch 1 Batch 630 Loss 2.3374\n",
      "Epoch 1 Batch 631 Loss 1.9940\n",
      "Epoch 1 Batch 632 Loss 2.2541\n",
      "Epoch 1 Batch 633 Loss 1.8699\n",
      "Epoch 1 Batch 634 Loss 2.1018\n",
      "Epoch 1 Batch 635 Loss 2.1288\n",
      "Epoch 1 Batch 636 Loss 2.1052\n",
      "Epoch 1 Batch 637 Loss 1.9549\n",
      "Epoch 1 Batch 638 Loss 1.9622\n",
      "Epoch 1 Batch 639 Loss 2.2572\n",
      "Epoch 1 Batch 640 Loss 2.1438\n",
      "Epoch 1 Batch 641 Loss 2.2746\n",
      "Epoch 1 Batch 642 Loss 2.0145\n",
      "Epoch 1 Batch 643 Loss 2.1482\n",
      "Epoch 1 Batch 644 Loss 1.8891\n",
      "Epoch 1 Batch 645 Loss 1.9607\n",
      "Epoch 1 Batch 646 Loss 2.2493\n",
      "Epoch 1 Batch 647 Loss 1.9901\n",
      "Epoch 1 Batch 648 Loss 2.1019\n",
      "Epoch 1 Batch 649 Loss 2.2552\n",
      "Epoch 1 Batch 650 Loss 2.0975\n",
      "Epoch 1 Batch 651 Loss 2.0116\n",
      "Epoch 1 Batch 652 Loss 1.8585\n",
      "Epoch 1 Batch 653 Loss 2.1621\n",
      "Epoch 1 Batch 654 Loss 2.3054\n",
      "Epoch 1 Batch 655 Loss 2.0699\n",
      "Epoch 1 Batch 656 Loss 2.2050\n",
      "Epoch 1 Batch 657 Loss 1.9745\n",
      "Epoch 1 Batch 658 Loss 2.1240\n",
      "Epoch 1 Batch 659 Loss 2.0391\n",
      "Epoch 1 Batch 660 Loss 1.9269\n",
      "Epoch 1 Batch 661 Loss 2.4455\n",
      "Epoch 1 Batch 662 Loss 2.2382\n",
      "Epoch 1 Batch 663 Loss 2.1433\n",
      "Epoch 1 Batch 664 Loss 1.8659\n",
      "Epoch 1 Batch 665 Loss 2.0793\n",
      "Epoch 1 Batch 666 Loss 2.1411\n",
      "Epoch 1 Batch 667 Loss 2.0839\n",
      "Epoch 1 Batch 668 Loss 2.0251\n",
      "Epoch 1 Batch 669 Loss 2.2564\n",
      "Epoch 1 Batch 670 Loss 2.1128\n",
      "Epoch 1 Batch 671 Loss 2.3190\n",
      "Epoch 1 Batch 672 Loss 1.9799\n",
      "Epoch 1 Batch 673 Loss 2.2628\n",
      "Epoch 1 Batch 674 Loss 2.1350\n",
      "Epoch 1 Batch 675 Loss 2.1155\n",
      "Epoch 1 Batch 676 Loss 2.1183\n",
      "Epoch 1 Batch 677 Loss 2.1326\n",
      "Epoch 1 Batch 678 Loss 1.9171\n",
      "Epoch 1 Batch 679 Loss 2.1623\n",
      "Epoch 1 Batch 680 Loss 2.1793\n",
      "Epoch 1 Batch 681 Loss 2.1433\n",
      "Epoch 1 Batch 682 Loss 1.9720\n",
      "Epoch 1 Batch 683 Loss 2.0097\n",
      "Epoch 1 Batch 684 Loss 2.2586\n",
      "Epoch 1 Batch 685 Loss 1.9340\n",
      "Epoch 1 Batch 686 Loss 1.9938\n",
      "Epoch 1 Batch 687 Loss 2.0805\n",
      "Epoch 1 Batch 688 Loss 2.1136\n",
      "Epoch 1 Batch 689 Loss 1.8471\n",
      "Epoch 1 Batch 690 Loss 2.3942\n",
      "Epoch 1 Batch 691 Loss 2.1608\n",
      "Epoch 1 Batch 692 Loss 2.4134\n",
      "Epoch 1 Batch 693 Loss 2.0048\n",
      "Epoch 1 Batch 694 Loss 2.0345\n",
      "Epoch 1 Batch 695 Loss 2.0658\n",
      "Epoch 1 Batch 696 Loss 1.8549\n",
      "Epoch 1 Batch 697 Loss 2.2914\n",
      "Epoch 1 Batch 698 Loss 2.3137\n",
      "Epoch 1 Batch 699 Loss 2.4022\n",
      "Epoch 1 Batch 700 Loss 2.0695\n",
      "Epoch 1 Batch 701 Loss 1.8702\n",
      "Epoch 1 Batch 702 Loss 2.1571\n",
      "Epoch 1 Batch 703 Loss 2.1750\n",
      "Epoch 1 Batch 704 Loss 2.1724\n",
      "Epoch 1 Batch 705 Loss 1.7857\n",
      "Epoch 1 Batch 706 Loss 1.8504\n",
      "Epoch 1 Batch 707 Loss 2.3303\n",
      "Epoch 1 Batch 708 Loss 2.3440\n",
      "Epoch 1 Batch 709 Loss 2.2255\n",
      "Epoch 1 Batch 710 Loss 1.7722\n",
      "Epoch 1 Batch 711 Loss 2.1970\n",
      "Epoch 1 Batch 712 Loss 2.1171\n",
      "Epoch 1 Batch 713 Loss 2.5078\n",
      "Epoch 1 Batch 714 Loss 2.2360\n",
      "Epoch 1 Batch 715 Loss 1.9440\n",
      "Epoch 1 Batch 716 Loss 2.2442\n",
      "Epoch 1 Batch 717 Loss 1.7328\n",
      "Epoch 1 Batch 718 Loss 2.2533\n",
      "Epoch 1 Batch 719 Loss 2.1108\n",
      "Epoch 1 Batch 720 Loss 2.4099\n",
      "Epoch 1 Batch 721 Loss 2.0016\n",
      "Epoch 1 Batch 722 Loss 2.0716\n",
      "Epoch 1 Batch 723 Loss 2.1926\n",
      "Epoch 1 Batch 724 Loss 2.4671\n",
      "Epoch 1 Batch 725 Loss 2.3793\n",
      "Epoch 1 Batch 726 Loss 2.2743\n",
      "Epoch 1 Batch 727 Loss 2.0544\n",
      "Epoch 1 Batch 728 Loss 2.0231\n",
      "Epoch 1 Batch 729 Loss 2.3589\n",
      "Epoch 1 Batch 730 Loss 2.1746\n",
      "Epoch 1 Batch 731 Loss 2.5439\n",
      "Epoch 1 Batch 732 Loss 2.2092\n",
      "Epoch 1 Batch 733 Loss 2.2016\n",
      "Epoch 1 Batch 734 Loss 2.0349\n",
      "Epoch 1 Batch 735 Loss 2.4469\n",
      "Epoch 1 Batch 736 Loss 1.9070\n",
      "Epoch 1 Batch 737 Loss 1.7458\n",
      "Epoch 1 Batch 738 Loss 2.1901\n",
      "Epoch 1 Batch 739 Loss 2.1716\n",
      "Epoch 1 Batch 740 Loss 2.0758\n",
      "Epoch 1 Batch 741 Loss 2.1677\n",
      "Epoch 1 Batch 742 Loss 2.1438\n",
      "Epoch 1 Batch 743 Loss 2.2564\n",
      "Epoch 1 Batch 744 Loss 2.1935\n",
      "Epoch 1 Batch 745 Loss 1.9997\n",
      "Epoch 1 Batch 746 Loss 2.1186\n",
      "Epoch 1 Batch 747 Loss 2.3290\n",
      "Epoch 1 Batch 748 Loss 2.1262\n",
      "Epoch 1 Batch 749 Loss 2.0920\n",
      "Epoch 1 Batch 750 Loss 1.8320\n",
      "Epoch 1 Batch 751 Loss 2.0997\n",
      "Epoch 1 Batch 752 Loss 2.0844\n",
      "Epoch 1 Batch 753 Loss 2.3516\n",
      "Epoch 1 Batch 754 Loss 1.8609\n",
      "Epoch 1 Batch 755 Loss 2.1334\n",
      "Epoch 1 Batch 756 Loss 2.4485\n",
      "Epoch 1 Batch 757 Loss 2.4315\n",
      "Epoch 1 Batch 758 Loss 2.1654\n",
      "Epoch 1 Batch 759 Loss 2.3811\n",
      "Epoch 1 Batch 760 Loss 2.1175\n",
      "Epoch 1 Batch 761 Loss 2.0717\n",
      "Epoch 1 Batch 762 Loss 2.0573\n",
      "Epoch 1 Batch 763 Loss 2.3096\n",
      "Epoch 1 Batch 764 Loss 2.3457\n",
      "Epoch 1 Batch 765 Loss 2.2768\n",
      "Epoch 1 Batch 766 Loss 2.0309\n",
      "Epoch 1 Batch 767 Loss 2.3318\n",
      "Epoch 1 Batch 768 Loss 2.4055\n",
      "Epoch 1 Batch 769 Loss 2.3676\n",
      "Epoch 1 Batch 770 Loss 2.2189\n",
      "Epoch 1 Batch 771 Loss 2.0969\n",
      "Epoch 1 Batch 772 Loss 2.1291\n",
      "Epoch 1 Batch 773 Loss 2.2111\n",
      "Epoch 1 Batch 774 Loss 1.9784\n",
      "Epoch 1 Batch 775 Loss 2.0738\n",
      "Epoch 1 Batch 776 Loss 2.2544\n",
      "Epoch 1 Batch 777 Loss 2.2589\n",
      "Epoch 1 Batch 778 Loss 2.4057\n",
      "Epoch 1 Batch 779 Loss 2.0899\n",
      "Epoch 1 Batch 780 Loss 2.4301\n",
      "Epoch 1 Batch 781 Loss 2.1814\n",
      "Epoch 1 Batch 782 Loss 2.0135\n",
      "Epoch 1 Batch 783 Loss 2.1907\n",
      "Epoch 1 Batch 784 Loss 1.9720\n",
      "Epoch 1 Batch 785 Loss 2.5012\n",
      "Epoch 1 Batch 786 Loss 2.0907\n",
      "Epoch 1 Batch 787 Loss 2.2344\n",
      "Epoch 1 Batch 788 Loss 2.3214\n",
      "Epoch 1 Batch 789 Loss 2.0755\n",
      "Epoch 1 Batch 790 Loss 2.1184\n",
      "Epoch 1 Batch 791 Loss 2.1651\n",
      "Epoch 1 Batch 792 Loss 1.8923\n",
      "Epoch 1 Batch 793 Loss 1.9504\n",
      "Epoch 1 Batch 794 Loss 2.2931\n",
      "Epoch 1 Batch 795 Loss 2.1867\n",
      "Epoch 1 Batch 796 Loss 2.4754\n",
      "Epoch 1 Batch 797 Loss 2.3669\n",
      "Epoch 1 Batch 798 Loss 1.9183\n",
      "Epoch 1 Batch 799 Loss 2.0670\n",
      "Epoch 1 Batch 800 Loss 1.9223\n",
      "Epoch 1 Batch 801 Loss 2.2407\n",
      "Epoch 1 Batch 802 Loss 2.3231\n",
      "Epoch 1 Batch 803 Loss 1.8990\n",
      "Epoch 1 Batch 804 Loss 2.1703\n",
      "Epoch 1 Batch 805 Loss 2.2004\n",
      "Epoch 1 Batch 806 Loss 2.3747\n",
      "Epoch 1 Batch 807 Loss 2.2856\n",
      "Epoch 1 Batch 808 Loss 2.1419\n",
      "Epoch 1 Batch 809 Loss 2.1070\n",
      "Epoch 1 Batch 810 Loss 2.1926\n",
      "Epoch 1 Batch 811 Loss 2.1778\n",
      "Epoch 1 Batch 812 Loss 2.4376\n",
      "Epoch 1 Batch 813 Loss 2.0695\n",
      "Epoch 1 Batch 814 Loss 2.5288\n",
      "Epoch 1 Batch 815 Loss 2.4851\n",
      "Epoch 1 Batch 816 Loss 1.9940\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1 Batch 817 Loss 2.3179\n",
      "Epoch 1 Batch 818 Loss 2.3326\n",
      "Epoch 1 Batch 819 Loss 2.2388\n",
      "Epoch 1 Batch 820 Loss 1.9377\n",
      "Epoch 1 Batch 821 Loss 1.8587\n",
      "Epoch 1 Batch 822 Loss 2.1946\n",
      "Epoch 1 Batch 823 Loss 2.2307\n",
      "Epoch 1 Batch 824 Loss 2.4543\n",
      "Epoch 1 Batch 825 Loss 2.0139\n",
      "Epoch 1 Batch 826 Loss 2.2404\n",
      "Epoch 1 Batch 827 Loss 2.4659\n",
      "Epoch 1 Batch 828 Loss 2.1982\n",
      "Epoch 1 Batch 829 Loss 2.1694\n",
      "Epoch 1 Batch 830 Loss 2.0169\n",
      "Epoch 1 Batch 831 Loss 2.5029\n",
      "Epoch 1 Batch 832 Loss 2.3120\n",
      "Epoch 1 Batch 833 Loss 2.0362\n",
      "Epoch 1 Batch 834 Loss 2.3236\n",
      "Epoch 1 Batch 835 Loss 2.2389\n",
      "Epoch 1 Batch 836 Loss 2.2738\n",
      "Epoch 1 Batch 837 Loss 1.9994\n",
      "Epoch 1 Batch 838 Loss 2.1853\n",
      "Epoch 1 Batch 839 Loss 2.1131\n",
      "Epoch 1 Batch 840 Loss 2.2625\n",
      "Epoch 1 Batch 841 Loss 2.2058\n",
      "Epoch 1 Batch 842 Loss 2.2570\n",
      "Epoch 1 Batch 843 Loss 2.1348\n",
      "Epoch 1 Batch 844 Loss 1.8839\n",
      "Epoch 1 Batch 845 Loss 2.0052\n",
      "Epoch 1 Batch 846 Loss 2.0830\n",
      "Epoch 1 Batch 847 Loss 2.3335\n",
      "Epoch 1 Batch 848 Loss 2.2582\n",
      "Epoch 1 Batch 849 Loss 2.2369\n",
      "Epoch 1 Batch 850 Loss 2.8099\n",
      "Epoch 1 Batch 851 Loss 2.0245\n",
      "Epoch 1 Batch 852 Loss 1.9994\n",
      "Epoch 1 Batch 853 Loss 2.0697\n",
      "Epoch 1 Batch 854 Loss 2.5113\n",
      "Epoch 1 Batch 855 Loss 2.0827\n",
      "Epoch 1 Batch 856 Loss 1.9940\n",
      "Epoch 1 Batch 857 Loss 2.2379\n",
      "Epoch 1 Batch 858 Loss 2.3938\n",
      "Epoch 1 Batch 859 Loss 2.2033\n",
      "Epoch 1 Batch 860 Loss 2.2252\n",
      "Epoch 1 Batch 861 Loss 2.1255\n",
      "Epoch 1 Batch 862 Loss 2.0045\n",
      "Epoch 1 Batch 863 Loss 2.3593\n",
      "Epoch 1 Batch 864 Loss 2.6428\n",
      "Epoch 1 Batch 865 Loss 2.0454\n",
      "Epoch 1 Batch 866 Loss 1.9888\n",
      "Epoch 1 Batch 867 Loss 2.6600\n",
      "Epoch 1 Batch 868 Loss 2.3253\n",
      "Epoch 1 Batch 869 Loss 2.1911\n",
      "Epoch 1 Batch 870 Loss 2.5863\n",
      "Epoch 1 Batch 871 Loss 2.1757\n",
      "Epoch 1 Batch 872 Loss 2.1162\n",
      "Epoch 1 Batch 873 Loss 2.3512\n",
      "Epoch 1 Batch 874 Loss 2.1509\n",
      "Epoch 1 Batch 875 Loss 1.8467\n",
      "Epoch 1 Batch 876 Loss 2.5187\n",
      "Epoch 1 Batch 877 Loss 2.3313\n",
      "Epoch 1 Batch 878 Loss 2.0082\n",
      "Epoch 1 Batch 879 Loss 2.5054\n",
      "Epoch 1 Batch 880 Loss 2.1337\n",
      "Epoch 1 Batch 881 Loss 2.3596\n",
      "Epoch 1 Batch 882 Loss 2.4371\n",
      "Epoch 1 Batch 883 Loss 2.0255\n",
      "Epoch 1 Batch 884 Loss 2.2618\n",
      "Epoch 1 Batch 885 Loss 2.2634\n",
      "Epoch 1 Batch 886 Loss 2.6062\n",
      "Epoch 1 Batch 887 Loss 2.4678\n",
      "Epoch 1 Batch 888 Loss 2.2765\n",
      "Epoch 1 Batch 889 Loss 2.0335\n",
      "Epoch 1 Batch 890 Loss 2.2849\n",
      "Epoch 1 Batch 891 Loss 2.5117\n",
      "Epoch 1 Batch 892 Loss 2.4568\n",
      "Epoch 1 Batch 893 Loss 2.5317\n",
      "Epoch 1 Batch 894 Loss 2.1916\n",
      "Epoch 1 Batch 895 Loss 2.3718\n",
      "Epoch 1 Batch 896 Loss 2.2001\n",
      "Epoch 1 Batch 897 Loss 2.6160\n",
      "Epoch 1 Batch 898 Loss 2.1643\n",
      "Epoch 1 Batch 899 Loss 1.9197\n",
      "Epoch 1 Batch 900 Loss 2.2983\n",
      "Epoch 1 Batch 901 Loss 2.5797\n",
      "Epoch 1 Batch 902 Loss 2.4172\n",
      "Epoch 1 Batch 903 Loss 1.9908\n",
      "Epoch 1 Batch 904 Loss 2.1265\n",
      "Epoch 1 Batch 905 Loss 2.0301\n",
      "Epoch 1 Batch 906 Loss 2.1047\n",
      "Epoch 1 Batch 907 Loss 2.1703\n",
      "Epoch 1 Batch 908 Loss 2.2642\n",
      "Epoch 1 Batch 909 Loss 2.2461\n",
      "Epoch 1 Batch 910 Loss 2.3334\n",
      "Epoch 1 Batch 911 Loss 2.2069\n",
      "Epoch 1 Batch 912 Loss 2.2180\n",
      "Epoch 1 Batch 913 Loss 2.1248\n",
      "Epoch 1 Batch 914 Loss 2.4448\n",
      "Epoch 1 Batch 915 Loss 2.4139\n",
      "Epoch 1 Batch 916 Loss 2.2031\n",
      "Epoch 1 Batch 917 Loss 2.1631\n",
      "Epoch 1 Batch 918 Loss 2.4341\n",
      "Epoch 1 Batch 919 Loss 2.1607\n",
      "Epoch 1 Batch 920 Loss 2.1535\n",
      "Epoch 1 Batch 921 Loss 2.1592\n",
      "Epoch 1 Batch 922 Loss 2.4237\n",
      "Epoch 1 Batch 923 Loss 2.3090\n",
      "Epoch 1 Batch 924 Loss 2.2032\n",
      "Epoch 1 Batch 925 Loss 2.1480\n",
      "Epoch 1 Batch 926 Loss 2.5403\n",
      "Epoch 1 Batch 927 Loss 2.2199\n",
      "Epoch 1 Batch 928 Loss 2.6908\n",
      "Epoch 1 Batch 929 Loss 2.2617\n",
      "Epoch 1 Batch 930 Loss 2.3458\n",
      "Epoch 1 Batch 931 Loss 2.1869\n",
      "Epoch 1 Batch 932 Loss 2.1817\n",
      "Epoch 1 Batch 933 Loss 2.0126\n",
      "Epoch 1 Batch 934 Loss 2.2999\n",
      "Epoch 1 Batch 935 Loss 2.2681\n",
      "Epoch 1 Batch 936 Loss 2.3188\n",
      "Epoch 1 Batch 937 Loss 2.4068\n",
      "Epoch 1 Batch 938 Loss 1.9257\n",
      "Epoch 1 Batch 939 Loss 2.8715\n",
      "Epoch 1 Batch 940 Loss 2.3284\n",
      "Epoch 1 Batch 941 Loss 2.4703\n",
      "Epoch 1 Batch 942 Loss 1.9278\n",
      "Epoch 1 Batch 943 Loss 2.5752\n",
      "Epoch 1 Batch 944 Loss 2.3016\n",
      "Epoch 1 Batch 945 Loss 2.3519\n",
      "Epoch 1 Batch 946 Loss 2.6067\n",
      "Epoch 1 Batch 947 Loss 2.1725\n",
      "Epoch 1 Batch 948 Loss 2.3183\n",
      "Epoch 1 Batch 949 Loss 2.2597\n",
      "Epoch 1 Batch 950 Loss 2.5236\n",
      "Epoch 1 Batch 951 Loss 2.5606\n",
      "Epoch 1 Batch 952 Loss 2.3692\n",
      "Epoch 1 Batch 953 Loss 2.0084\n",
      "Epoch 1 Batch 954 Loss 2.0497\n",
      "Epoch 1 Batch 955 Loss 1.9541\n",
      "Epoch 1 Batch 956 Loss 2.5700\n",
      "Epoch 1 Batch 957 Loss 2.7822\n",
      "Epoch 1 Batch 958 Loss 2.3446\n",
      "Epoch 1 Batch 959 Loss 2.2781\n",
      "Epoch 1 Batch 960 Loss 2.2167\n",
      "Epoch 1 Batch 961 Loss 2.2253\n",
      "Epoch 1 Batch 962 Loss 2.2260\n",
      "Epoch 1 Batch 963 Loss 1.9500\n",
      "Epoch 1 Batch 964 Loss 2.3157\n",
      "Epoch 1 Batch 965 Loss 2.5531\n",
      "Epoch 1 Batch 966 Loss 2.2616\n",
      "Epoch 1 Batch 967 Loss 2.3057\n",
      "Epoch 1 Batch 968 Loss 2.0159\n",
      "Epoch 1 Batch 969 Loss 2.5752\n",
      "Epoch 1 Batch 970 Loss 2.2113\n",
      "Epoch 1 Batch 971 Loss 2.4458\n",
      "Epoch 1 Batch 972 Loss 2.4514\n",
      "Epoch 1 Batch 973 Loss 2.3490\n",
      "Epoch 1 Batch 974 Loss 2.4427\n",
      "Epoch 1 Batch 975 Loss 2.4432\n",
      "Epoch 1 Batch 976 Loss 2.0994\n",
      "Epoch 1 Batch 977 Loss 2.2681\n",
      "Epoch 1 Batch 978 Loss 2.2518\n",
      "Epoch 1 Batch 979 Loss 2.2987\n",
      "Epoch 1 Batch 980 Loss 2.3651\n",
      "Epoch 1 Batch 981 Loss 2.3746\n",
      "Epoch 1 Batch 982 Loss 2.0803\n",
      "Epoch 1 Batch 983 Loss 2.2311\n",
      "Epoch 1 Batch 984 Loss 2.5075\n",
      "Epoch 1 Batch 985 Loss 2.2246\n",
      "Epoch 1 Batch 986 Loss 2.5013\n",
      "Epoch 1 Batch 987 Loss 2.4623\n",
      "Epoch 1 Batch 988 Loss 2.3641\n",
      "Epoch 1 Batch 989 Loss 2.2926\n",
      "Epoch 1 Batch 990 Loss 2.3205\n",
      "Epoch 1 Batch 991 Loss 2.1898\n",
      "Epoch 1 Batch 992 Loss 2.3653\n",
      "Epoch 1 Batch 993 Loss 2.3349\n",
      "Epoch 1 Batch 994 Loss 2.6204\n",
      "Epoch 1 Batch 995 Loss 2.1218\n",
      "Epoch 1 Batch 996 Loss 2.2386\n",
      "Epoch 1 Batch 997 Loss 2.2458\n",
      "Epoch 1 Batch 998 Loss 2.3592\n",
      "Epoch 1 Batch 999 Loss 2.3055\n",
      "Epoch 1 Batch 1000 Loss 2.2248\n",
      "Epoch 1 Batch 1001 Loss 2.1478\n",
      "Epoch 1 Batch 1002 Loss 2.2143\n",
      "Epoch 1 Batch 1003 Loss 2.3767\n",
      "Epoch 1 Batch 1004 Loss 2.4526\n",
      "Epoch 1 Batch 1005 Loss 2.2948\n",
      "Epoch 1 Batch 1006 Loss 2.3635\n",
      "Epoch 1 Batch 1007 Loss 2.5546\n",
      "Epoch 1 Batch 1008 Loss 2.4741\n",
      "Epoch 1 Batch 1009 Loss 2.2270\n",
      "Epoch 1 Batch 1010 Loss 2.1507\n",
      "Epoch 1 Batch 1011 Loss 2.7432\n",
      "Epoch 1 Batch 1012 Loss 2.5299\n",
      "Epoch 1 Batch 1013 Loss 2.2152\n",
      "Epoch 1 Batch 1014 Loss 2.2291\n",
      "Epoch 1 Batch 1015 Loss 2.3214\n",
      "Epoch 1 Batch 1016 Loss 2.3542\n",
      "Epoch 1 Batch 1017 Loss 2.4660\n",
      "Epoch 1 Batch 1018 Loss 2.2281\n",
      "Epoch 1 Batch 1019 Loss 2.4856\n",
      "Epoch 1 Batch 1020 Loss 2.6518\n",
      "Epoch 1 Batch 1021 Loss 2.3258\n",
      "Epoch 1 Batch 1022 Loss 2.4924\n",
      "Epoch 1 Batch 1023 Loss 2.4144\n",
      "Epoch 1 Batch 1024 Loss 2.6474\n",
      "Epoch 1 Batch 1025 Loss 2.3999\n",
      "Epoch 1 Batch 1026 Loss 2.5337\n",
      "Epoch 1 Batch 1027 Loss 1.9406\n",
      "Epoch 1 Batch 1028 Loss 2.1111\n",
      "Epoch 1 Batch 1029 Loss 1.9341\n",
      "Epoch 1 Batch 1030 Loss 2.3817\n",
      "Epoch 1 Batch 1031 Loss 2.3653\n",
      "Epoch 1 Batch 1032 Loss 2.5683\n",
      "Epoch 1 Batch 1033 Loss 2.5996\n",
      "Epoch 1 Batch 1034 Loss 2.3478\n",
      "Epoch 1 Batch 1035 Loss 2.5598\n",
      "Epoch 1 Batch 1036 Loss nan\n",
      "Epoch 1 Batch 1037 Loss nan\n",
      "Epoch 1 Batch 1038 Loss nan\n",
      "Epoch 1 Batch 1039 Loss nan\n",
      "Epoch 1 Batch 1040 Loss nan\n",
      "Epoch 1 Batch 1041 Loss nan\n",
      "Epoch 1 Batch 1042 Loss nan\n",
      "Epoch 1 Batch 1043 Loss nan\n",
      "Epoch 1 Batch 1044 Loss nan\n",
      "Epoch 1 Batch 1045 Loss nan\n",
      "Epoch 1 Batch 1046 Loss nan\n",
      "Epoch 1 Batch 1047 Loss nan\n",
      "Epoch 1 Batch 1048 Loss nan\n",
      "Epoch 1 Batch 1049 Loss nan\n",
      "Epoch 1 Batch 1050 Loss nan\n",
      "Epoch 1 Batch 1051 Loss nan\n",
      "Epoch 1 Batch 1052 Loss nan\n",
      "Epoch 1 Batch 1053 Loss nan\n",
      "Epoch 1 Batch 1054 Loss nan\n",
      "Epoch 1 Batch 1055 Loss nan\n",
      "Epoch 1 Batch 1056 Loss nan\n",
      "Epoch 1 Batch 1057 Loss nan\n",
      "Epoch 1 Batch 1058 Loss nan\n",
      "Epoch 1 Batch 1059 Loss nan\n",
      "Epoch 1 Batch 1060 Loss nan\n"
     ]
    }
   ],
   "source": [
    "%run seq2seq_tf2/train.py --epochs 2"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1 Physical GPUs, 1 Logical GPUs\n",
      "Building the model ...\n",
      "Creating the vocab ...\n",
      "Creating the checkpoint manager\n",
      "Creating the checkpoint manager\n",
      "Restored from E:\\GitHub\\QA-abstract-and-reasoning\\data\\checkpoints\\seq2seq_checkpoints\\ckpt-4\n",
      "Model restored\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████████████████████████████████████████████████████████████████████████████| 79/79 [01:03<00:00,  1.25it/s]\n"
     ]
    }
   ],
   "source": [
    "%run seq2seq_tf2/test.py --mode \"test\" --batch_size 3"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 实验dataset"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {},
   "outputs": [],
   "source": [
    "a = tf.constant([1,2,3,4,5,6,7,8,9,10,11,12])\n",
    "ds = tf.data.Dataset.from_tensor_slices(a)\n",
    "ds = ds.batch(3).shuffle(5).repeat()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [],
   "source": [
    "from seq2seq_tf2.batcher import batcher\n",
    "from utils.saveLoader import Vocab\n",
    "from utils.config import VOCAB_PAD\n",
    "vocab = Vocab(VOCAB_PAD)\n",
    "# get params\n",
    "%run utils/params.py"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "dataset = batcher(vocab, params)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [],
   "source": [
    "enc_params, dec_params = next(iter(dataset))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 80,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<tf.Tensor: id=21222, shape=(64, 37), dtype=int32, numpy=\n",
       "array([[   45,  2005,    48, ..., 21873, 21873, 21873],\n",
       "       [  170,    47,    54, ..., 21873, 21873, 21873],\n",
       "       [    4,   105,   217, ..., 21873, 21873, 21873],\n",
       "       ...,\n",
       "       [    4,   105,     6, ..., 21873, 21873, 21873],\n",
       "       [   39,   273, 21872, ..., 21873, 21873, 21873],\n",
       "       [    4,   105,   217, ..., 21873, 21873, 21873]])>"
      ]
     },
     "execution_count": 80,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dec_params[\"target\"]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 75,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<tf.Tensor: id=21219, shape=(64, 37), dtype=int32, numpy=\n",
       "array([[21870,    45,  2005, ..., 21873, 21873, 21873],\n",
       "       [21870,   170,    47, ..., 21873, 21873, 21873],\n",
       "       [21870,     4,   105, ..., 21873, 21873, 21873],\n",
       "       ...,\n",
       "       [21870,     4,   105, ..., 21873, 21873, 21873],\n",
       "       [21870,    39,   273, ..., 21873, 21873, 21873],\n",
       "       [21870,     4,   105, ..., 21873, 21873, 21873]])>"
      ]
     },
     "execution_count": 75,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dec_params[\"dec_input\"]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 74,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<tf.Tensor: id=21215, shape=(64, 10), dtype=int32, numpy=\n",
       "array([[21870,   899,  1539,   180, 21872, 21873, 21873, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,   170,    47,   767,    17,     4,   174,   767, 21872,\n",
       "        21873],\n",
       "       [21870,  1417,   260,   126,  1417, 21872, 21873, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,   103,   233,   183,   502,    93, 21872, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,  1337,   329,   182,  1417,  1332,  4670,   692,     7,\n",
       "        21872],\n",
       "       [21870, 21871,     6,    62,  2390,   283,  5040,  1870,  3580,\n",
       "        21872],\n",
       "       [21870,   275,   118,   476,    68,    77,   279, 21872, 21873,\n",
       "        21873],\n",
       "       [21870,  1724,    95,   107,  1278,    12,    74,     4, 21872,\n",
       "        21873],\n",
       "       [21870,   456,    87,  3544,  2863,   140, 21872, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,  1188, 10302,   267,   950,   447, 21871, 13631, 21872,\n",
       "        21873],\n",
       "       [21870,  1319,  9360,  3011,  3868,  1405,   447,   398, 21872,\n",
       "        21873],\n",
       "       [21870,   312,    84,   169,   803,   147, 21872, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,  8239,    18,  4573,  3101,   102,  3522,  1441,   825,\n",
       "        21872],\n",
       "       [21870,  1026,   420,    63,   276,   178,   165, 21872, 21873,\n",
       "        21873],\n",
       "       [21870,  9530,  9215,    34,  3027,    73, 21872, 21873, 21873,\n",
       "        21873],\n",
       "       [21870, 12202,   437,  1276,   398, 21872, 21873, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,   258,    52, 21871,  7355,  1760, 21872, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,  3415,    13,  2255,  1023,   303,    38,   431, 21872,\n",
       "        21873],\n",
       "       [21870, 14733,   245, 21872, 21873, 21873, 21873, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,  2606, 12922,   237,  2196,   126, 21872, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,   642,  3007,  2172,    87, 21872, 21873, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,  4738,  6709,   100,    23,  3115, 16148,     7, 21872,\n",
       "        21873],\n",
       "       [21870,  2033,    18,    77,    79, 21872, 21873, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,  1179, 17757,  3600,  1990,   592,   126, 21872, 21873,\n",
       "        21873],\n",
       "       [21870,     0,    14,     0,    34,   147,   844,   322, 21871,\n",
       "        21872],\n",
       "       [21870,   209,    36,     4,   186,   596,     3,    36, 21872,\n",
       "        21873],\n",
       "       [21870, 11531,    18,  7652,  1145,    77,   285,  1136,    63,\n",
       "        21872],\n",
       "       [21870,     4,   153,   868,     2,  2924,  4870, 21872, 21873,\n",
       "        21873],\n",
       "       [21870,  1456,  5743, 21871,   252,   312,    84, 21872, 21873,\n",
       "        21873],\n",
       "       [21870,   454,   305,     4,    14,   305, 21872, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,   684,  6894,   284,  1443,   261,   378,  1203,   233,\n",
       "        21872],\n",
       "       [21870,  8256,    37,    65,     5,   716, 21872, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,  1293,  3443,   307,    18,   128, 21872, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,  1026,   844,   573,   100,     5,   100,   172, 21871,\n",
       "        21872],\n",
       "       [21870,  6294,  8679,  1471,    73,     9, 21872, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,    14,   157, 15555,    26, 21871,    83,   278, 21872,\n",
       "        21873],\n",
       "       [21870,   117,     9,     6,    74, 21872, 21873, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,  1113,    13,  2101,   435,    36, 21872, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,  1144,    81,    13,  1429,   237,  7658,    56, 21872,\n",
       "        21873],\n",
       "       [21870, 14821,     2,   536,  3809,   885,   147,   586, 21872,\n",
       "        21873],\n",
       "       [21870,   281,    73,    34,  2519,  3412,   220,   180, 21872,\n",
       "        21873],\n",
       "       [21870,  1865,  3146,   343,   656,    88,    35,  3482,   434,\n",
       "        21872],\n",
       "       [21870,     9,     6,    62, 21871,   573,   344,  4234, 21872,\n",
       "        21873],\n",
       "       [21870,    18,     4,  1228,    18,  1647, 21872, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,   556,     4,     1, 21872, 21873, 21873, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,   848,  2792,   252,   228,   381,   193, 21872, 21873,\n",
       "        21873],\n",
       "       [21870,    39,    51,   179,     5,   130,   496, 21872, 21873,\n",
       "        21873],\n",
       "       [21870,     0,   359,   946,   612,   233,   278,     6, 21872,\n",
       "        21873],\n",
       "       [21870,    88,   363,    23,  2110,   153,   415,  1266, 21872,\n",
       "        21873],\n",
       "       [21870,     4,   153,   850,     9,   399,   180, 21872, 21873,\n",
       "        21873],\n",
       "       [21870,  1655, 21871,   492,    42,  2091,  5291, 21872, 21873,\n",
       "        21873],\n",
       "       [21870,    88,  6319,     7, 21872, 21873, 21873, 21873, 21873,\n",
       "        21873],\n",
       "       [21870,   684,  4999,  2569,   740,    91,   100, 21872, 21873,\n",
       "        21873],\n",
       "       [21870,    14,   589,   466,    79,  1659,     4,   716, 21872,\n",
       "        21873],\n",
       "       [21870,   499,  2813,   919,   281,    73,   220,  1243,  7431,\n",
       "        21872],\n",
       "       [21870,   541,  5395,   255,    10,    42,     6,   507,   798,\n",
       "        21872],\n",
       "       [21870,  1485,  1437,    39, 21871,   804,   323, 21872, 21873,\n",
       "        21873],\n",
       "       [21870,  1291,  1461,   617,  2608,    65,     3,   309,   953,\n",
       "        21872],\n",
       "       [21870,   170,   205,    94,    95,   445,    88,   882, 21872,\n",
       "        21873],\n",
       "       [21870, 14470,  1144,     3,   317,    74,   767,  1624, 21872,\n",
       "        21873],\n",
       "       [21870,   153,   172,    47,    63,   682,  3475,    92, 21872,\n",
       "        21873],\n",
       "       [21870,   688,   422,    73,   316,  1363,   474,   158, 21872,\n",
       "        21873],\n",
       "       [21870,  5395,   394,     9,     6,    39,   273,   146, 21872,\n",
       "        21873],\n",
       "       [21870,   894,  1429,   271,    73, 21872, 21873, 21873, 21873,\n",
       "        21873]])>"
      ]
     },
     "execution_count": 74,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "enc_params[\"enc_input\"]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 65,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tf.Tensor(\n",
      "[[21870  1485  1437    39 21871   804   323 21872 21873 21873]\n",
      " [21870  8059 13762  8059  1036  3270   180 21872 21873 21873]\n",
      " [21870    54  2393  1145  2164   255  4166 11664 21872 21873]\n",
      " [21870     4   153   868     2  2924  4870 21872 21873 21873]\n",
      " [21870   851   848  2897    97  2543  5804 21872 21873 21873]\n",
      " [21870  1188 10302   267   950   447 21871 13631 21872 21873]\n",
      " [21870  2296  3640  1057 17870   176   492   381 21872 21873]\n",
      " [21870 21871 20423 12982     9   353   323 21872 21873 21873]\n",
      " [21870 21871 21871 21871     4    23   278 21872 21873 21873]\n",
      " [21870   103   233   183   502    93 21872 21873 21873 21873]\n",
      " [21870  3056   437  1126   863   150  2575   953 21872 21873]\n",
      " [21870   281    73    34  2519  3412   220   180 21872 21873]\n",
      " [21870   267    87    17 15972   617    17  6589    53 21872]\n",
      " [21870  1223   608   173  1140   102  4031 21872 21873 21873]\n",
      " [21870    10   214 21871   234  1470   565  2156 21872 21873]\n",
      " [21870   153    93  1542  1283    80   636 21871   342 21872]\n",
      " [21870   642  3007  2172    87 21872 21873 21873 21873 21873]\n",
      " [21870  1403     6   115   515 21871 21872 21873 21873 21873]\n",
      " [21870   694  1260  5155  6352  2390  1840 21871    73 21872]\n",
      " [21870   851  3627   609   266   147 21872 21873 21873 21873]\n",
      " [21870 14733   245 21872 21873 21873 21873 21873 21873 21873]\n",
      " [21870 10261 21871  9261  1424   359 21872 21873 21873 21873]\n",
      " [21870   209     1   300    50    51   870    41   555 21872]\n",
      " [21870    67   957  2154    13  2350   271   800 21872 21873]\n",
      " [21870   801 21871 21871  4343 21871   809   746     6 21872]\n",
      " [21870   326   205  6650   153  1287  2341 21872 21873 21873]\n",
      " [21870  4681  1258   441   327   142    68 21871 21872 21873]\n",
      " [21870   916  6575     0     6    74     1  1034  1541 21872]\n",
      " [21870  9775   245   381 21872 21873 21873 21873 21873 21873]\n",
      " [21870   251    14   214   868  1118     4  9959 21872 21873]\n",
      " [21870  9323   181    37    65   163   147   132 21872 21873]\n",
      " [21870  2350   266    73   131 21871  3458   323 21872 21873]\n",
      " [21870 16414   167   180 21872 21873 21873 21873 21873 21873]\n",
      " [21870    24    10   864  2194    10   323    27   116 21872]\n",
      " [21870   350   169 21871   644     4  1124   151    61 21872]\n",
      " [21870     0    27  1853   360    24   249   638 21872 21873]\n",
      " [21870  5234  2378 21871   136   103   236   342 21872 21873]\n",
      " [21870    77   115  2893 21872 21873 21873 21873 21873 21873]\n",
      " [21870 21871     4     0     1     1   128   473 21872 21873]\n",
      " [21870 21871   316  1363    91   284 11625   130 21872 21873]\n",
      " [21870   154   166  3914     4    63  1145     3    77 21872]\n",
      " [21870  3879  1366 21068 13166   131 21872 21873 21873 21873]\n",
      " [21870  3190    97   404   126   271   863    16 21872 21873]\n",
      " [21870   748  1320   538  1676   202   505 21872 21873 21873]\n",
      " [21870   437   162    94  4111  2349   167 21872 21873 21873]\n",
      " [21870   153   216   118    77   864     8 21871 21872 21873]\n",
      " [21870  5654 21871   278 21872 21873 21873 21873 21873 21873]\n",
      " [21870  1269   760   648 21871  1130  6309 21872 21873 21873]\n",
      " [21870  5898    65   457    14     6    62 21872 21873 21873]\n",
      " [21870  1337   329   182  1417  1332  4670   692     7 21872]\n",
      " [21870   437    36   294   456 21872 21873 21873 21873 21873]\n",
      " [21870   523   429   984   804   268    22    46 21872 21873]\n",
      " [21870 12369   377   332 21871  9698   202 21872 21873 21873]\n",
      " [21870  3600   573   100 21871 21872 21873 21873 21873 21873]\n",
      " [21870  4783  1738  1683   266   276  1683   227   268 21872]\n",
      " [21870     4  4403   153   384  4332   830   552 21872 21873]\n",
      " [21870  1016   499  7859   103   671    16 21872 21873 21873]\n",
      " [21870  7389 21871  1541  1124   151  2281 21871 21872 21873]\n",
      " [21870  4871     6  2198   140    51 21872 21873 21873 21873]\n",
      " [21870  3041 21871    63   165    18    13  1785 21872 21873]\n",
      " [21870   499  1142   437  1126   738   117    42   369 21872]\n",
      " [21870    58   224    36   478  1590 21871 21872 21873 21873]\n",
      " [21870   842  2352    14   347   767  9422 21872 21873 21873]\n",
      " [21870  6212   394 21871   278 21872 21873 21873 21873 21873]], shape=(64, 10), dtype=int32)\n",
      "tf.Tensor(\n",
      "[[21870   684  7790   639    10   584 21872 21873 21873 21873]\n",
      " [21870   339    37  3875    11  2034  6532  3522 21872 21873]\n",
      " [21870     0     6    62   468    31     9   107 21872 21873]\n",
      " [21870    80  2871   373   119   180 21872 21873 21873 21873]\n",
      " [21870  6080    14  1250 21871 21872 21873 21873 21873 21873]\n",
      " [21870   243   202     6 21871   111  1158   243  7726 21872]\n",
      " [21870  2331   684  1957  6206     9  3307   147   497 21872]\n",
      " [21870  5136   170   750   167    16 21872 21873 21873 21873]\n",
      " [21870 17278    10   447  4801    54  1077   102 21872 21873]\n",
      " [21870   153   238   932  9097    27 21872 21873 21873 21873]\n",
      " [21870 21871  2154 13289   180 21872 21873 21873 21873 21873]\n",
      " [21870  2562     1 21872 21873 21873 21873 21873 21873 21873]\n",
      " [21870  1437  5993   188 18481    49   127  1769  5993 21872]\n",
      " [21870    48     2   206  8132    87     4     0     2 21872]\n",
      " [21870   339   130   740  3122 21872 21873 21873 21873 21873]\n",
      " [21870 12202   437  1276   398 21872 21873 21873 21873 21873]\n",
      " [21870    40   296   346   853 21872 21873 21873 21873 21873]\n",
      " [21870  2082    11   683   646   114   526  2843   662 21872]\n",
      " [21870  1325  1293     2   132   759   108 21872 21873 21873]\n",
      " [21870 21871   845   631 21872 21873 21873 21873 21873 21873]\n",
      " [21870  1473  1050  1429  2129    10 21872 21873 21873 21873]\n",
      " [21870   891  9158  2020  6367   275     9   245   644 21872]\n",
      " [21870  1291  2897  1199   723 14064   477    84 21871 21872]\n",
      " [21870   153  1037  1559  1710   482   408   419   270 21872]\n",
      " [21870   851  1738    13   851  2792   299 21872 21873 21873]\n",
      " [21870  2859  2915   928   136 21871   476   248 21872 21873]\n",
      " [21870  2219   209     6  3261  1713    17  1785   579 21872]\n",
      " [21870    12    80   466  1560   119    74 21872 21873 21873]\n",
      " [21870  1789   672 14612   813   354  1512   152 21872 21873]\n",
      " [21870  1700  1546   680   109 21872 21873 21873 21873 21873]\n",
      " [21870  1782  1797    17  1154   237   526    18   551 21872]\n",
      " [21870  6080   153  4324  1795   268   711   998 21872 21873]\n",
      " [21870  3373   275 21871  2587    88    20  3455  4804 21872]\n",
      " [21870     9     6   399    93   693   984 21872 21873 21873]\n",
      " [21870   971   102   214   640  1768    21 21872 21873 21873]\n",
      " [21870     9   169  1121   712 11553   180 21872 21873 21873]\n",
      " [21870     0   359   946   612   233   278     6 21872 21873]\n",
      " [21870 13967 12424  1942    21   730    88   730 21872 21873]\n",
      " [21870  6623  1452   340  1828    74 21872 21873 21873 21873]\n",
      " [21870   142   222 21872 21873 21873 21873 21873 21873 21873]\n",
      " [21870  4602  8964 21871     4    69    40     6    62 21872]\n",
      " [21870  3108   260   271   171   183  2180   180 21872 21873]\n",
      " [21870    14   458   167   128   747    80   665    53 21872]\n",
      " [21870     4  1853  3011   624   696     4 21871   624 21872]\n",
      " [21870   851  1738   587   456  3811   200 21872 21873 21873]\n",
      " [21870  4594    26   322 21871  6284    47 21872 21873 21873]\n",
      " [21870     6    62  2586   213   766 21872 21873 21873 21873]\n",
      " [21870    18   888    12     4    19   174   233    16 21872]\n",
      " [21870  4369   115     1  4369 21871 21872 21873 21873 21873]\n",
      " [21870 21871    73     4     9 13190 21872 21873 21873 21873]\n",
      " [21870   108     9     2    17   727  5133     9    65 21872]\n",
      " [21870   547 16305   430   951     1 21872 21873 21873 21873]\n",
      " [21870  9555   482  1672   141   267  1396   144     3 21872]\n",
      " [21870 21871   485   169   398  1127   169  2269 21872 21873]\n",
      " [21870  1150  2023   542  1886  6758  1490   716     2 21872]\n",
      " [21870  1446  1837   169  7833    61   644 21872 21873 21873]\n",
      " [21870  1026   844   573   100     5   100   172 21871 21872]\n",
      " [21870   104  1050 11775  1153  3878  5657 21872 21873 21873]\n",
      " [21870  2271  2425   326  1673  2040  1369  2328  1412 21872]\n",
      " [21870   210    89   650   366   256    12 12548     1 21872]\n",
      " [21870 21871 21871  1636  2755    96   138    10 21872 21873]\n",
      " [21870  1293  1396  3119   104    54     4   267  5698 21872]\n",
      " [21870    77   115   525  1877    88    40   558  4633 21872]\n",
      " [21870    10   159  1065   179     3   274   213   773 21872]], shape=(64, 10), dtype=int32)\n"
     ]
    }
   ],
   "source": [
    "for j,i in enumerate(ds):\n",
    "    print(i[0][\"enc_input\"])\n",
    "    if j==1:break"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python [conda env:tf2.0]",
   "language": "python",
   "name": "conda-env-tf2.0-py"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.4"
  },
  "toc": {
   "base_numbering": 1,
   "nav_menu": {},
   "number_sections": true,
   "sideBar": true,
   "skip_h1_title": false,
   "title_cell": "Table of Contents",
   "title_sidebar": "Contents",
   "toc_cell": false,
   "toc_position": {},
   "toc_section_display": true,
   "toc_window_display": false
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
