{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/extdrive1/anaconda3/envs/amlenv/lib/python3.6/site-packages/sklearn/cross_validation.py:41: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.\n",
      "  \"This module will be removed in 0.20.\", DeprecationWarning)\n"
     ]
    }
   ],
   "source": [
    "%load_ext autoreload\n",
    "%autoreload 2\n",
    "import math\n",
    "import numpy as np\n",
    "\n",
    "\n",
    "from sklearn.cross_validation import train_test_split\n",
    "\n",
    "from matplotlib import pyplot as plt\n",
    "%matplotlib inline\n",
    "\n",
    "SEED = 199 "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[[-1.69864255  0.60587297  1.08623665 -1.35907589  0.09175171 -1.72917063\n",
      "   0.32811388 -1.33958995  1.5332894   1.61508971  0.84722853 -1.62794317\n",
      "  -1.52995417  1.12233683  1.2499034  -0.25931899  0.         -0.98360875\n",
      "  -0.14856415  0.14856415  1.5279518  -0.99834025 -0.42239949 -0.22830453\n",
      "  -1.02373279  1.02373279 -0.49957247  0.49957247 -0.97949492  0.97949492\n",
      "   0.         -0.65835683 -0.81664618  1.53652841 -0.1517957  -0.12879773\n",
      "  -0.12938451 -0.14993255 -0.14976212 -0.14856415 -0.13189904 -0.14526706\n",
      "  -0.1514585  -0.12977432 -0.15463491 -0.14438795 -0.14314887 -0.13698402\n",
      "  -0.14526706 -0.14027892 -0.14993255 -0.14118155 -0.14082114 -0.15313776\n",
      "  -0.13399234 -0.13900615 -0.15010279 -0.13698402 -0.14082114 -0.14856415\n",
      "  -0.1338033  -0.14596685 -0.1358693  -0.15078204 -0.13698402 -0.13937089\n",
      "  -0.14064061 -0.15297059 -0.14735729 -0.13605567 -0.14009775 -0.14959152\n",
      "  -0.13679882 -0.14189985 -0.14596685 -0.13304464 -0.14770303 -0.14561734\n",
      "  -0.14243638 -0.13624181  6.71558628 -0.14822025 -0.13209061 -0.15061249\n",
      "   0.20076094 -0.20076094  0.31709375 -0.31709375]]\n",
      "(20468, 88)\n",
      "(20468,)\n",
      "(13713, 88)\n",
      "(6755, 88)\n"
     ]
    }
   ],
   "source": [
    "X = np.load('X.npy')\n",
    "y = np.load('y.npy')\n",
    "print(X[0:1,:])\n",
    "\n",
    "print(X.shape, y.shape,sep='\\n')\n",
    "X_train, X_test, y_train, y_test = train_test_split(\n",
    "     X, y, test_size=.33, random_state=SEED)\n",
    "\n",
    "\n",
    "\n",
    "print(X_train.shape, X_test.shape,sep='\\n')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([13713, 88])\n"
     ]
    }
   ],
   "source": [
    "import torch\n",
    "from torch.utils.data import TensorDataset,DataLoader\n",
    "\n",
    "X_train,y_train = torch.FloatTensor(X_train), torch.FloatTensor(y_train)\n",
    "print(X_train.shape)\n",
    "batch_size = 512\n",
    "dataset = TensorDataset(X_train,y_train)\n",
    "dataloader = DataLoader(dataset, \n",
    "                              batch_size = batch_size,\n",
    "                              shuffle = True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "hyper_params = {'nn_lr':1e-2,\n",
    "               'lh_lr':1e-3,\n",
    "               'batch_size':1024,\n",
    "               'epochs':100,\n",
    "               'grid_size':64,\n",
    "               'grid_bounds':(-1,1),\n",
    "               'latent_dim':2,\n",
    "               'input_dim':X_train.size(1),\n",
    "               'num_mixtures':4\n",
    "              }"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "999\n",
      "{'nn_lr': 0.01, 'lh_lr': 0.001, 'batch_size': 1024, 'epochs': 100, 'grid_size': 64, 'grid_bounds': (-1, 1), 'latent_dim': 2, 'input_dim': 88, 'num_mixtures': 4}\n"
     ]
    }
   ],
   "source": [
    "\n",
    "from trainer import SvDklTrainer\n",
    "trainer = SvDklTrainer(hyper_params, aml_run=None)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train Epoch: 1 [002/027], Loss: 0.834310\n",
      "Train Epoch: 1 [004/027], Loss: 0.785887\n",
      "Train Epoch: 1 [006/027], Loss: 0.834269\n",
      "Train Epoch: 1 [008/027], Loss: 0.776174\n",
      "Train Epoch: 1 [010/027], Loss: 0.826071\n",
      "Train Epoch: 1 [012/027], Loss: 0.856823\n",
      "Train Epoch: 1 [014/027], Loss: 0.811506\n",
      "Train Epoch: 1 [016/027], Loss: 0.795897\n",
      "Train Epoch: 1 [018/027], Loss: 0.826306\n",
      "Train Epoch: 1 [020/027], Loss: 0.843640\n",
      "Train Epoch: 1 [022/027], Loss: 0.810999\n",
      "Train Epoch: 1 [024/027], Loss: 0.785681\n",
      "Train Epoch: 1 [026/027], Loss: 0.818965\n",
      "Train Epoch: 2 [002/027], Loss: 0.784435\n",
      "Train Epoch: 2 [004/027], Loss: 0.820190\n",
      "Train Epoch: 2 [006/027], Loss: 0.833081\n",
      "Train Epoch: 2 [008/027], Loss: 0.812980\n",
      "Train Epoch: 2 [010/027], Loss: 0.814532\n",
      "Train Epoch: 2 [012/027], Loss: 0.789288\n",
      "Train Epoch: 2 [014/027], Loss: 0.823490\n",
      "Train Epoch: 2 [016/027], Loss: 0.870302\n",
      "Train Epoch: 2 [018/027], Loss: 0.830079\n",
      "Train Epoch: 2 [020/027], Loss: 0.798621\n",
      "Train Epoch: 2 [022/027], Loss: 0.806757\n",
      "Train Epoch: 2 [024/027], Loss: 0.813771\n",
      "Train Epoch: 2 [026/027], Loss: 0.805975\n",
      "Train Epoch: 3 [002/027], Loss: 0.783423\n",
      "Train Epoch: 3 [004/027], Loss: 0.792605\n",
      "Train Epoch: 3 [006/027], Loss: 0.782973\n",
      "Train Epoch: 3 [008/027], Loss: 0.817702\n",
      "Train Epoch: 3 [010/027], Loss: 0.814274\n",
      "Train Epoch: 3 [012/027], Loss: 0.807108\n",
      "Train Epoch: 3 [014/027], Loss: 0.804603\n",
      "Train Epoch: 3 [016/027], Loss: 0.790485\n",
      "Train Epoch: 3 [018/027], Loss: 0.799453\n",
      "Train Epoch: 3 [020/027], Loss: 0.799558\n",
      "Train Epoch: 3 [022/027], Loss: 0.800345\n",
      "Train Epoch: 3 [024/027], Loss: 0.797432\n",
      "Train Epoch: 3 [026/027], Loss: 0.802213\n",
      "Train Epoch: 4 [002/027], Loss: 0.790879\n",
      "Train Epoch: 4 [004/027], Loss: 0.795745\n",
      "Train Epoch: 4 [006/027], Loss: 0.782778\n",
      "Train Epoch: 4 [008/027], Loss: 0.809178\n",
      "Train Epoch: 4 [010/027], Loss: 0.788039\n",
      "Train Epoch: 4 [012/027], Loss: 0.788803\n",
      "Train Epoch: 4 [014/027], Loss: 0.786826\n",
      "Train Epoch: 4 [016/027], Loss: 0.808552\n",
      "Train Epoch: 4 [018/027], Loss: 0.790036\n",
      "Train Epoch: 4 [020/027], Loss: 0.825363\n",
      "Train Epoch: 4 [022/027], Loss: 0.774285\n",
      "Train Epoch: 4 [024/027], Loss: 0.796355\n",
      "Train Epoch: 4 [026/027], Loss: 0.803372\n",
      "Train Epoch: 5 [002/027], Loss: 0.790694\n",
      "Train Epoch: 5 [004/027], Loss: 0.806653\n",
      "Train Epoch: 5 [006/027], Loss: 0.784401\n",
      "Train Epoch: 5 [008/027], Loss: 0.783003\n",
      "Train Epoch: 5 [010/027], Loss: 0.805806\n",
      "Train Epoch: 5 [012/027], Loss: 0.783823\n",
      "Train Epoch: 5 [014/027], Loss: 0.781593\n",
      "Train Epoch: 5 [016/027], Loss: 0.794797\n",
      "Train Epoch: 5 [018/027], Loss: 0.777916\n",
      "Train Epoch: 5 [020/027], Loss: 0.774501\n",
      "Train Epoch: 5 [022/027], Loss: 0.783825\n",
      "Train Epoch: 5 [024/027], Loss: 0.790547\n",
      "Train Epoch: 5 [026/027], Loss: 0.785971\n",
      "Train Epoch: 6 [002/027], Loss: 0.789927\n",
      "Train Epoch: 6 [004/027], Loss: 0.767340\n",
      "Train Epoch: 6 [006/027], Loss: 0.786795\n",
      "Train Epoch: 6 [008/027], Loss: 0.790173\n",
      "Train Epoch: 6 [010/027], Loss: 0.774259\n",
      "Train Epoch: 6 [012/027], Loss: 0.779405\n",
      "Train Epoch: 6 [014/027], Loss: 0.786987\n",
      "Train Epoch: 6 [016/027], Loss: 0.761499\n",
      "Train Epoch: 6 [018/027], Loss: 0.803841\n",
      "Train Epoch: 6 [020/027], Loss: 0.786918\n",
      "Train Epoch: 6 [022/027], Loss: 0.781281\n",
      "Train Epoch: 6 [024/027], Loss: 0.769491\n",
      "Train Epoch: 6 [026/027], Loss: 0.779637\n",
      "Train Epoch: 7 [002/027], Loss: 0.772331\n",
      "Train Epoch: 7 [004/027], Loss: 0.814595\n",
      "Train Epoch: 7 [006/027], Loss: 0.807081\n",
      "Train Epoch: 7 [008/027], Loss: 0.757735\n",
      "Train Epoch: 7 [010/027], Loss: 0.760134\n",
      "Train Epoch: 7 [012/027], Loss: 0.795885\n",
      "Train Epoch: 7 [014/027], Loss: 0.778656\n",
      "Train Epoch: 7 [016/027], Loss: 0.756770\n",
      "Train Epoch: 7 [018/027], Loss: 0.760498\n",
      "Train Epoch: 7 [020/027], Loss: 0.786673\n",
      "Train Epoch: 7 [022/027], Loss: 0.793245\n",
      "Train Epoch: 7 [024/027], Loss: 0.781580\n",
      "Train Epoch: 7 [026/027], Loss: 0.768419\n",
      "Train Epoch: 8 [002/027], Loss: 0.780336\n",
      "Train Epoch: 8 [004/027], Loss: 0.771434\n",
      "Train Epoch: 8 [006/027], Loss: 0.771020\n",
      "Train Epoch: 8 [008/027], Loss: 0.759632\n",
      "Train Epoch: 8 [010/027], Loss: 0.770960\n",
      "Train Epoch: 8 [012/027], Loss: 0.777496\n",
      "Train Epoch: 8 [014/027], Loss: 0.779346\n",
      "Train Epoch: 8 [016/027], Loss: 0.809650\n",
      "Train Epoch: 8 [018/027], Loss: 0.758890\n",
      "Train Epoch: 8 [020/027], Loss: 0.785565\n",
      "Train Epoch: 8 [022/027], Loss: 0.739073\n",
      "Train Epoch: 8 [024/027], Loss: 0.781021\n",
      "Train Epoch: 8 [026/027], Loss: 0.758407\n",
      "Train Epoch: 9 [002/027], Loss: 0.771312\n",
      "Train Epoch: 9 [004/027], Loss: 0.762660\n",
      "Train Epoch: 9 [006/027], Loss: 0.742769\n",
      "Train Epoch: 9 [008/027], Loss: 0.758140\n",
      "Train Epoch: 9 [010/027], Loss: 0.757597\n",
      "Train Epoch: 9 [012/027], Loss: 0.760699\n",
      "Train Epoch: 9 [014/027], Loss: 0.768954\n",
      "Train Epoch: 9 [016/027], Loss: 0.742293\n",
      "Train Epoch: 9 [018/027], Loss: 0.746183\n",
      "Train Epoch: 9 [020/027], Loss: 0.758202\n",
      "Train Epoch: 9 [022/027], Loss: 0.771281\n",
      "Train Epoch: 9 [024/027], Loss: 0.744812\n",
      "Train Epoch: 9 [026/027], Loss: 0.768939\n",
      "Train Epoch: 10 [002/027], Loss: 0.757592\n",
      "Train Epoch: 10 [004/027], Loss: 0.766166\n",
      "Train Epoch: 10 [006/027], Loss: 0.753240\n",
      "Train Epoch: 10 [008/027], Loss: 0.758909\n",
      "Train Epoch: 10 [010/027], Loss: 0.761721\n",
      "Train Epoch: 10 [012/027], Loss: 0.739533\n",
      "Train Epoch: 10 [014/027], Loss: 0.751007\n",
      "Train Epoch: 10 [016/027], Loss: 0.760893\n",
      "Train Epoch: 10 [018/027], Loss: 0.764591\n",
      "Train Epoch: 10 [020/027], Loss: 0.746724\n",
      "Train Epoch: 10 [022/027], Loss: 0.760814\n",
      "Train Epoch: 10 [024/027], Loss: 0.748503\n",
      "Train Epoch: 10 [026/027], Loss: 0.747833\n",
      "Train Epoch: 11 [002/027], Loss: 0.753208\n",
      "Train Epoch: 11 [004/027], Loss: 0.736847\n",
      "Train Epoch: 11 [006/027], Loss: 0.735775\n",
      "Train Epoch: 11 [008/027], Loss: 0.765504\n",
      "Train Epoch: 11 [010/027], Loss: 0.733238\n",
      "Train Epoch: 11 [012/027], Loss: 0.749604\n",
      "Train Epoch: 11 [014/027], Loss: 0.752922\n",
      "Train Epoch: 11 [016/027], Loss: 0.740008\n",
      "Train Epoch: 11 [018/027], Loss: 0.765760\n",
      "Train Epoch: 11 [020/027], Loss: 0.749385\n",
      "Train Epoch: 11 [022/027], Loss: 0.733396\n",
      "Train Epoch: 11 [024/027], Loss: 0.741123\n",
      "Train Epoch: 11 [026/027], Loss: 0.741986\n",
      "Train Epoch: 12 [002/027], Loss: 0.759936\n",
      "Train Epoch: 12 [004/027], Loss: 0.741212\n",
      "Train Epoch: 12 [006/027], Loss: 0.742851\n",
      "Train Epoch: 12 [008/027], Loss: 0.765577\n",
      "Train Epoch: 12 [010/027], Loss: 0.736153\n",
      "Train Epoch: 12 [012/027], Loss: 0.785056\n",
      "Train Epoch: 12 [014/027], Loss: 0.767634\n",
      "Train Epoch: 12 [016/027], Loss: 0.745897\n",
      "Train Epoch: 12 [018/027], Loss: 0.742275\n",
      "Train Epoch: 12 [020/027], Loss: 0.740071\n",
      "Train Epoch: 12 [022/027], Loss: 0.733569\n",
      "Train Epoch: 12 [024/027], Loss: 0.734858\n",
      "Train Epoch: 12 [026/027], Loss: 0.733163\n",
      "Train Epoch: 13 [002/027], Loss: 0.740128\n",
      "Train Epoch: 13 [004/027], Loss: 0.740659\n",
      "Train Epoch: 13 [006/027], Loss: 0.741424\n",
      "Train Epoch: 13 [008/027], Loss: 0.743927\n",
      "Train Epoch: 13 [010/027], Loss: 0.739534\n",
      "Train Epoch: 13 [012/027], Loss: 0.747224\n",
      "Train Epoch: 13 [014/027], Loss: 0.739155\n",
      "Train Epoch: 13 [016/027], Loss: 0.728161\n",
      "Train Epoch: 13 [018/027], Loss: 0.727440\n",
      "Train Epoch: 13 [020/027], Loss: 0.729483\n",
      "Train Epoch: 13 [022/027], Loss: 0.755571\n",
      "Train Epoch: 13 [024/027], Loss: 0.719787\n",
      "Train Epoch: 13 [026/027], Loss: 0.726557\n",
      "Train Epoch: 14 [002/027], Loss: 0.752246\n",
      "Train Epoch: 14 [004/027], Loss: 0.726408\n",
      "Train Epoch: 14 [006/027], Loss: 0.728527\n",
      "Train Epoch: 14 [008/027], Loss: 0.718037\n",
      "Train Epoch: 14 [010/027], Loss: 0.734004\n",
      "Train Epoch: 14 [012/027], Loss: 0.719195\n",
      "Train Epoch: 14 [014/027], Loss: 0.734900\n",
      "Train Epoch: 14 [016/027], Loss: 0.726370\n",
      "Train Epoch: 14 [018/027], Loss: 0.733049\n",
      "Train Epoch: 14 [020/027], Loss: 0.728725\n",
      "Train Epoch: 14 [022/027], Loss: 0.756568\n",
      "Train Epoch: 14 [024/027], Loss: 0.744723\n",
      "Train Epoch: 14 [026/027], Loss: 0.718114\n",
      "Train Epoch: 15 [002/027], Loss: 0.727813\n",
      "Train Epoch: 15 [004/027], Loss: 0.740544\n",
      "Train Epoch: 15 [006/027], Loss: 0.724855\n",
      "Train Epoch: 15 [008/027], Loss: 0.720916\n",
      "Train Epoch: 15 [010/027], Loss: 0.720120\n",
      "Train Epoch: 15 [012/027], Loss: 0.716628\n",
      "Train Epoch: 15 [014/027], Loss: 0.713979\n",
      "Train Epoch: 15 [016/027], Loss: 0.721854\n",
      "Train Epoch: 15 [018/027], Loss: 0.717654\n",
      "Train Epoch: 15 [020/027], Loss: 0.727931\n",
      "Train Epoch: 15 [022/027], Loss: 0.711505\n",
      "Train Epoch: 15 [024/027], Loss: 0.729329\n",
      "Train Epoch: 15 [026/027], Loss: 0.714434\n",
      "Train Epoch: 16 [002/027], Loss: 0.717537\n",
      "Train Epoch: 16 [004/027], Loss: 0.726455\n",
      "Train Epoch: 16 [006/027], Loss: 0.743628\n",
      "Train Epoch: 16 [008/027], Loss: 0.720973\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train Epoch: 16 [010/027], Loss: 0.725159\n",
      "Train Epoch: 16 [012/027], Loss: 0.722996\n",
      "Train Epoch: 16 [014/027], Loss: 0.711511\n",
      "Train Epoch: 16 [016/027], Loss: 0.712018\n",
      "Train Epoch: 16 [018/027], Loss: 0.720741\n",
      "Train Epoch: 16 [020/027], Loss: 0.709539\n",
      "Train Epoch: 16 [022/027], Loss: 0.719532\n",
      "Train Epoch: 16 [024/027], Loss: 0.725175\n",
      "Train Epoch: 16 [026/027], Loss: 0.730467\n",
      "Train Epoch: 17 [002/027], Loss: 0.709272\n",
      "Train Epoch: 17 [004/027], Loss: 0.730999\n",
      "Train Epoch: 17 [006/027], Loss: 0.716975\n",
      "Train Epoch: 17 [008/027], Loss: 0.707642\n",
      "Train Epoch: 17 [010/027], Loss: 0.765721\n",
      "Train Epoch: 17 [012/027], Loss: 0.713549\n",
      "Train Epoch: 17 [014/027], Loss: 0.718043\n",
      "Train Epoch: 17 [016/027], Loss: 0.707577\n",
      "Train Epoch: 17 [018/027], Loss: 0.735463\n",
      "Train Epoch: 17 [020/027], Loss: 0.702964\n",
      "Train Epoch: 17 [022/027], Loss: 0.703486\n",
      "Train Epoch: 17 [024/027], Loss: 0.708759\n",
      "Train Epoch: 17 [026/027], Loss: 0.703087\n",
      "Train Epoch: 18 [002/027], Loss: 0.709483\n",
      "Train Epoch: 18 [004/027], Loss: 0.727625\n",
      "Train Epoch: 18 [006/027], Loss: 0.710716\n",
      "Train Epoch: 18 [008/027], Loss: 0.733659\n",
      "Train Epoch: 18 [010/027], Loss: 0.713183\n",
      "Train Epoch: 18 [012/027], Loss: 0.700308\n",
      "Train Epoch: 18 [014/027], Loss: 0.702950\n",
      "Train Epoch: 18 [016/027], Loss: 0.726901\n",
      "Train Epoch: 18 [018/027], Loss: 0.728131\n",
      "Train Epoch: 18 [020/027], Loss: 0.709803\n",
      "Train Epoch: 18 [022/027], Loss: 0.698702\n",
      "Train Epoch: 18 [024/027], Loss: 0.704355\n",
      "Train Epoch: 18 [026/027], Loss: 0.700590\n",
      "Train Epoch: 19 [002/027], Loss: 0.737374\n",
      "Train Epoch: 19 [004/027], Loss: 0.722445\n",
      "Train Epoch: 19 [006/027], Loss: 0.754988\n",
      "Train Epoch: 19 [008/027], Loss: 0.711948\n",
      "Train Epoch: 19 [010/027], Loss: 0.700929\n",
      "Train Epoch: 19 [012/027], Loss: 0.716481\n",
      "Train Epoch: 19 [014/027], Loss: 0.716111\n",
      "Train Epoch: 19 [016/027], Loss: 0.725043\n",
      "Train Epoch: 19 [018/027], Loss: 0.700719\n",
      "Train Epoch: 19 [020/027], Loss: 0.710845\n",
      "Train Epoch: 19 [022/027], Loss: 0.718460\n",
      "Train Epoch: 19 [024/027], Loss: 0.708762\n",
      "Train Epoch: 19 [026/027], Loss: 0.703240\n",
      "Train Epoch: 20 [002/027], Loss: 0.700561\n",
      "Train Epoch: 20 [004/027], Loss: 0.694303\n",
      "Train Epoch: 20 [006/027], Loss: 0.696016\n",
      "Train Epoch: 20 [008/027], Loss: 0.705317\n",
      "Train Epoch: 20 [010/027], Loss: 0.703408\n",
      "Train Epoch: 20 [012/027], Loss: 0.693043\n",
      "Train Epoch: 20 [014/027], Loss: 0.710128\n",
      "Train Epoch: 20 [016/027], Loss: 0.717671\n",
      "Train Epoch: 20 [018/027], Loss: 0.696983\n",
      "Train Epoch: 20 [020/027], Loss: 0.709968\n",
      "Train Epoch: 20 [022/027], Loss: 0.694526\n",
      "Train Epoch: 20 [024/027], Loss: 0.695837\n",
      "Train Epoch: 20 [026/027], Loss: 0.701459\n",
      "Train Epoch: 21 [002/027], Loss: 0.717574\n",
      "Train Epoch: 21 [004/027], Loss: 0.719413\n",
      "Train Epoch: 21 [006/027], Loss: 0.696447\n",
      "Train Epoch: 21 [008/027], Loss: 0.696688\n",
      "Train Epoch: 21 [010/027], Loss: 0.723864\n",
      "Train Epoch: 21 [012/027], Loss: 0.704917\n",
      "Train Epoch: 21 [014/027], Loss: 0.724617\n",
      "Train Epoch: 21 [016/027], Loss: 0.703499\n",
      "Train Epoch: 21 [018/027], Loss: 0.690584\n",
      "Train Epoch: 21 [020/027], Loss: 0.701556\n",
      "Train Epoch: 21 [022/027], Loss: 0.689268\n",
      "Train Epoch: 21 [024/027], Loss: 0.693251\n",
      "Train Epoch: 21 [026/027], Loss: 0.701082\n",
      "Train Epoch: 22 [002/027], Loss: 0.691009\n",
      "Train Epoch: 22 [004/027], Loss: 0.702443\n",
      "Train Epoch: 22 [006/027], Loss: 0.701862\n",
      "Train Epoch: 22 [008/027], Loss: 0.729804\n",
      "Train Epoch: 22 [010/027], Loss: 0.700882\n",
      "Train Epoch: 22 [012/027], Loss: 0.696293\n",
      "Train Epoch: 22 [014/027], Loss: 0.768786\n",
      "Train Epoch: 22 [016/027], Loss: 0.744995\n",
      "Train Epoch: 22 [018/027], Loss: 0.688898\n",
      "Train Epoch: 22 [020/027], Loss: 0.712952\n",
      "Train Epoch: 22 [022/027], Loss: 0.697936\n",
      "Train Epoch: 22 [024/027], Loss: 0.701586\n",
      "Train Epoch: 22 [026/027], Loss: 0.694628\n",
      "Train Epoch: 23 [002/027], Loss: 0.697671\n",
      "Train Epoch: 23 [004/027], Loss: 0.698729\n",
      "Train Epoch: 23 [006/027], Loss: 0.690442\n",
      "Train Epoch: 23 [008/027], Loss: 0.689235\n",
      "Train Epoch: 23 [010/027], Loss: 0.707772\n",
      "Train Epoch: 23 [012/027], Loss: 0.729487\n",
      "Train Epoch: 23 [014/027], Loss: 0.683597\n",
      "Train Epoch: 23 [016/027], Loss: 0.693657\n",
      "Train Epoch: 23 [018/027], Loss: 0.731825\n",
      "Train Epoch: 23 [020/027], Loss: 0.761132\n",
      "Train Epoch: 23 [022/027], Loss: 0.687194\n",
      "Train Epoch: 23 [024/027], Loss: 0.683808\n",
      "Train Epoch: 23 [026/027], Loss: 0.699109\n",
      "Train Epoch: 24 [002/027], Loss: 0.689745\n",
      "Train Epoch: 24 [004/027], Loss: 0.684278\n",
      "Train Epoch: 24 [006/027], Loss: 0.688980\n",
      "Train Epoch: 24 [008/027], Loss: 0.692301\n",
      "Train Epoch: 24 [010/027], Loss: 0.690221\n",
      "Train Epoch: 24 [012/027], Loss: 0.687500\n",
      "Train Epoch: 24 [014/027], Loss: 0.707717\n",
      "Train Epoch: 24 [016/027], Loss: 0.684510\n",
      "Train Epoch: 24 [018/027], Loss: 0.688644\n",
      "Train Epoch: 24 [020/027], Loss: 0.683233\n",
      "Train Epoch: 24 [022/027], Loss: 0.681545\n",
      "Train Epoch: 24 [024/027], Loss: 0.681917\n",
      "Train Epoch: 24 [026/027], Loss: 0.691145\n",
      "Train Epoch: 25 [002/027], Loss: 0.697585\n",
      "Train Epoch: 25 [004/027], Loss: 0.683600\n",
      "Train Epoch: 25 [006/027], Loss: 0.693124\n",
      "Train Epoch: 25 [008/027], Loss: 0.684146\n",
      "Train Epoch: 25 [010/027], Loss: 0.678775\n",
      "Train Epoch: 25 [012/027], Loss: 0.682013\n",
      "Train Epoch: 25 [014/027], Loss: 0.688612\n",
      "Train Epoch: 25 [016/027], Loss: 0.682240\n",
      "Train Epoch: 25 [018/027], Loss: 0.690583\n",
      "Train Epoch: 25 [020/027], Loss: 0.682230\n",
      "Train Epoch: 25 [022/027], Loss: 0.719515\n",
      "Train Epoch: 25 [024/027], Loss: 0.672682\n",
      "Train Epoch: 25 [026/027], Loss: 0.683456\n",
      "Train Epoch: 26 [002/027], Loss: 0.706014\n",
      "Train Epoch: 26 [004/027], Loss: 0.695807\n",
      "Train Epoch: 26 [006/027], Loss: 0.685837\n",
      "Train Epoch: 26 [008/027], Loss: 0.679239\n",
      "Train Epoch: 26 [010/027], Loss: 0.683873\n",
      "Train Epoch: 26 [012/027], Loss: 0.682219\n",
      "Train Epoch: 26 [014/027], Loss: 0.690785\n",
      "Train Epoch: 26 [016/027], Loss: 0.670065\n",
      "Train Epoch: 26 [018/027], Loss: 0.679362\n",
      "Train Epoch: 26 [020/027], Loss: 0.686716\n",
      "Train Epoch: 26 [022/027], Loss: 0.674266\n",
      "Train Epoch: 26 [024/027], Loss: 0.686816\n",
      "Train Epoch: 26 [026/027], Loss: 0.674800\n",
      "Train Epoch: 27 [002/027], Loss: 0.691457\n",
      "Train Epoch: 27 [004/027], Loss: 0.691028\n",
      "Train Epoch: 27 [006/027], Loss: 0.674005\n",
      "Train Epoch: 27 [008/027], Loss: 0.687626\n",
      "Train Epoch: 27 [010/027], Loss: 0.675516\n",
      "Train Epoch: 27 [012/027], Loss: 0.674270\n",
      "Train Epoch: 27 [014/027], Loss: 0.689653\n",
      "Train Epoch: 27 [016/027], Loss: 0.699510\n",
      "Train Epoch: 27 [018/027], Loss: 0.676202\n",
      "Train Epoch: 27 [020/027], Loss: 0.673946\n",
      "Train Epoch: 27 [022/027], Loss: 0.672680\n",
      "Train Epoch: 27 [024/027], Loss: 0.688582\n",
      "Train Epoch: 27 [026/027], Loss: 0.677021\n",
      "Train Epoch: 28 [002/027], Loss: 0.678852\n",
      "Train Epoch: 28 [004/027], Loss: 0.675034\n",
      "Train Epoch: 28 [006/027], Loss: 0.681633\n",
      "Train Epoch: 28 [008/027], Loss: 0.701115\n",
      "Train Epoch: 28 [010/027], Loss: 0.680083\n",
      "Train Epoch: 28 [012/027], Loss: 0.687772\n",
      "Train Epoch: 28 [014/027], Loss: 0.721926\n",
      "Train Epoch: 28 [016/027], Loss: 0.677866\n",
      "Train Epoch: 28 [018/027], Loss: 0.668016\n",
      "Train Epoch: 28 [020/027], Loss: 0.675421\n",
      "Train Epoch: 28 [022/027], Loss: 0.698367\n",
      "Train Epoch: 28 [024/027], Loss: 0.709035\n",
      "Train Epoch: 28 [026/027], Loss: 0.684577\n",
      "Train Epoch: 29 [002/027], Loss: 0.679006\n",
      "Train Epoch: 29 [004/027], Loss: 0.720779\n",
      "Train Epoch: 29 [006/027], Loss: 0.684765\n",
      "Train Epoch: 29 [008/027], Loss: 0.686268\n",
      "Train Epoch: 29 [010/027], Loss: 0.689631\n",
      "Train Epoch: 29 [012/027], Loss: 0.696856\n",
      "Train Epoch: 29 [014/027], Loss: 0.675389\n",
      "Train Epoch: 29 [016/027], Loss: 0.669500\n",
      "Train Epoch: 29 [018/027], Loss: 0.673706\n",
      "Train Epoch: 29 [020/027], Loss: 0.675456\n",
      "Train Epoch: 29 [022/027], Loss: 0.675951\n",
      "Train Epoch: 29 [024/027], Loss: 0.693814\n",
      "Train Epoch: 29 [026/027], Loss: 0.678517\n",
      "Train Epoch: 30 [002/027], Loss: 0.675425\n",
      "Train Epoch: 30 [004/027], Loss: 0.706413\n",
      "Train Epoch: 30 [006/027], Loss: 0.676336\n",
      "Train Epoch: 30 [008/027], Loss: 0.672510\n",
      "Train Epoch: 30 [010/027], Loss: 0.713685\n",
      "Train Epoch: 30 [012/027], Loss: 0.698357\n",
      "Train Epoch: 30 [014/027], Loss: 0.667398\n",
      "Train Epoch: 30 [016/027], Loss: 0.678425\n",
      "Train Epoch: 30 [018/027], Loss: 0.699091\n",
      "Train Epoch: 30 [020/027], Loss: 0.670957\n",
      "Train Epoch: 30 [022/027], Loss: 0.665863\n",
      "Train Epoch: 30 [024/027], Loss: 0.682381\n",
      "Train Epoch: 30 [026/027], Loss: 0.688524\n",
      "Train Epoch: 31 [002/027], Loss: 0.674341\n",
      "Train Epoch: 31 [004/027], Loss: 0.698617\n",
      "Train Epoch: 31 [006/027], Loss: 0.678217\n",
      "Train Epoch: 31 [008/027], Loss: 0.670318\n",
      "Train Epoch: 31 [010/027], Loss: 0.667228\n",
      "Train Epoch: 31 [012/027], Loss: 0.706629\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train Epoch: 31 [014/027], Loss: 0.671104\n",
      "Train Epoch: 31 [016/027], Loss: 0.665000\n",
      "Train Epoch: 31 [018/027], Loss: 0.672514\n",
      "Train Epoch: 31 [020/027], Loss: 0.668807\n",
      "Train Epoch: 31 [022/027], Loss: 0.681458\n",
      "Train Epoch: 31 [024/027], Loss: 0.683704\n",
      "Train Epoch: 31 [026/027], Loss: 0.673486\n",
      "Train Epoch: 32 [002/027], Loss: 0.670257\n",
      "Train Epoch: 32 [004/027], Loss: 0.665042\n",
      "Train Epoch: 32 [006/027], Loss: 0.674571\n",
      "Train Epoch: 32 [008/027], Loss: 0.679491\n",
      "Train Epoch: 32 [010/027], Loss: 0.665651\n",
      "Train Epoch: 32 [012/027], Loss: 0.669578\n",
      "Train Epoch: 32 [014/027], Loss: 0.672896\n",
      "Train Epoch: 32 [016/027], Loss: 0.669137\n",
      "Train Epoch: 32 [018/027], Loss: 0.676576\n",
      "Train Epoch: 32 [020/027], Loss: 0.673533\n",
      "Train Epoch: 32 [022/027], Loss: 0.672087\n",
      "Train Epoch: 32 [024/027], Loss: 0.666530\n",
      "Train Epoch: 32 [026/027], Loss: 0.669767\n",
      "Train Epoch: 33 [002/027], Loss: 0.679305\n",
      "Train Epoch: 33 [004/027], Loss: 0.665434\n",
      "Train Epoch: 33 [006/027], Loss: 0.678378\n",
      "Train Epoch: 33 [008/027], Loss: 0.672270\n",
      "Train Epoch: 33 [010/027], Loss: 0.686203\n",
      "Train Epoch: 33 [012/027], Loss: 0.668900\n",
      "Train Epoch: 33 [014/027], Loss: 0.668162\n",
      "Train Epoch: 33 [016/027], Loss: 0.658695\n",
      "Train Epoch: 33 [018/027], Loss: 0.661539\n",
      "Train Epoch: 33 [020/027], Loss: 0.673996\n",
      "Train Epoch: 33 [022/027], Loss: 0.667259\n",
      "Train Epoch: 33 [024/027], Loss: 0.688941\n",
      "Train Epoch: 33 [026/027], Loss: 0.686650\n",
      "Train Epoch: 34 [002/027], Loss: 0.666726\n",
      "Train Epoch: 34 [004/027], Loss: 0.737699\n",
      "Train Epoch: 34 [006/027], Loss: 0.659167\n",
      "Train Epoch: 34 [008/027], Loss: 0.672676\n",
      "Train Epoch: 34 [010/027], Loss: 0.670887\n",
      "Train Epoch: 34 [012/027], Loss: 0.673198\n",
      "Train Epoch: 34 [014/027], Loss: 0.680466\n",
      "Train Epoch: 34 [016/027], Loss: 0.669704\n",
      "Train Epoch: 34 [018/027], Loss: 0.651845\n",
      "Train Epoch: 34 [020/027], Loss: 0.670529\n",
      "Train Epoch: 34 [022/027], Loss: 0.673694\n",
      "Train Epoch: 34 [024/027], Loss: 0.688886\n",
      "Train Epoch: 34 [026/027], Loss: 0.663556\n",
      "Train Epoch: 35 [002/027], Loss: 0.688533\n",
      "Train Epoch: 35 [004/027], Loss: 0.675403\n",
      "Train Epoch: 35 [006/027], Loss: 0.747972\n",
      "Train Epoch: 35 [008/027], Loss: 0.660605\n",
      "Train Epoch: 35 [010/027], Loss: 0.671766\n",
      "Train Epoch: 35 [012/027], Loss: 0.668740\n",
      "Train Epoch: 35 [014/027], Loss: 0.661855\n",
      "Train Epoch: 35 [016/027], Loss: 0.660970\n",
      "Train Epoch: 35 [018/027], Loss: 0.660993\n",
      "Train Epoch: 35 [020/027], Loss: 0.665176\n",
      "Train Epoch: 35 [022/027], Loss: 0.661485\n",
      "Train Epoch: 35 [024/027], Loss: 0.671983\n",
      "Train Epoch: 35 [026/027], Loss: 0.670868\n",
      "Train Epoch: 36 [002/027], Loss: 0.665283\n",
      "Train Epoch: 36 [004/027], Loss: 0.660551\n",
      "Train Epoch: 36 [006/027], Loss: 0.669254\n",
      "Train Epoch: 36 [008/027], Loss: 0.680276\n",
      "Train Epoch: 36 [010/027], Loss: 0.668447\n",
      "Train Epoch: 36 [012/027], Loss: 0.668671\n",
      "Train Epoch: 36 [014/027], Loss: 0.661914\n",
      "Train Epoch: 36 [016/027], Loss: 0.693347\n",
      "Train Epoch: 36 [018/027], Loss: 0.647485\n",
      "Train Epoch: 36 [020/027], Loss: 0.674331\n",
      "Train Epoch: 36 [022/027], Loss: 0.661182\n",
      "Train Epoch: 36 [024/027], Loss: 0.663009\n",
      "Train Epoch: 36 [026/027], Loss: 0.681814\n",
      "Train Epoch: 37 [002/027], Loss: 0.652376\n",
      "Train Epoch: 37 [004/027], Loss: 0.662613\n",
      "Train Epoch: 37 [006/027], Loss: 0.667354\n",
      "Train Epoch: 37 [008/027], Loss: 0.661956\n",
      "Train Epoch: 37 [010/027], Loss: 0.657486\n",
      "Train Epoch: 37 [012/027], Loss: 0.668104\n",
      "Train Epoch: 37 [014/027], Loss: 0.662777\n",
      "Train Epoch: 37 [016/027], Loss: 0.667985\n",
      "Train Epoch: 37 [018/027], Loss: 0.650542\n",
      "Train Epoch: 37 [020/027], Loss: 0.653065\n",
      "Train Epoch: 37 [022/027], Loss: 0.661110\n",
      "Train Epoch: 37 [024/027], Loss: 0.663998\n",
      "Train Epoch: 37 [026/027], Loss: 0.658106\n",
      "Train Epoch: 38 [002/027], Loss: 0.660677\n",
      "Train Epoch: 38 [004/027], Loss: 0.675641\n",
      "Train Epoch: 38 [006/027], Loss: 0.685189\n",
      "Train Epoch: 38 [008/027], Loss: 0.664014\n",
      "Train Epoch: 38 [010/027], Loss: 0.645707\n",
      "Train Epoch: 38 [012/027], Loss: 0.662806\n",
      "Train Epoch: 38 [014/027], Loss: 0.663553\n",
      "Train Epoch: 38 [016/027], Loss: 0.672159\n",
      "Train Epoch: 38 [018/027], Loss: 0.669879\n",
      "Train Epoch: 38 [020/027], Loss: 0.678995\n",
      "Train Epoch: 38 [022/027], Loss: 0.658612\n",
      "Train Epoch: 38 [024/027], Loss: 0.664491\n",
      "Train Epoch: 38 [026/027], Loss: 0.659432\n",
      "Train Epoch: 39 [002/027], Loss: 0.676483\n",
      "Train Epoch: 39 [004/027], Loss: 0.679575\n",
      "Train Epoch: 39 [006/027], Loss: 0.672649\n",
      "Train Epoch: 39 [008/027], Loss: 0.662647\n",
      "Train Epoch: 39 [010/027], Loss: 0.658602\n",
      "Train Epoch: 39 [012/027], Loss: 0.676377\n",
      "Train Epoch: 39 [014/027], Loss: 0.730749\n",
      "Train Epoch: 39 [016/027], Loss: 0.663203\n",
      "Train Epoch: 39 [018/027], Loss: 0.653364\n",
      "Train Epoch: 39 [020/027], Loss: 0.663991\n",
      "Train Epoch: 39 [022/027], Loss: 0.656479\n",
      "Train Epoch: 39 [024/027], Loss: 0.692147\n",
      "Train Epoch: 39 [026/027], Loss: 0.663487\n",
      "Train Epoch: 40 [002/027], Loss: 0.660865\n",
      "Train Epoch: 40 [004/027], Loss: 0.671434\n",
      "Train Epoch: 40 [006/027], Loss: 0.670301\n",
      "Train Epoch: 40 [008/027], Loss: 0.652297\n",
      "Train Epoch: 40 [010/027], Loss: 0.665952\n",
      "Train Epoch: 40 [012/027], Loss: 0.666365\n",
      "Train Epoch: 40 [014/027], Loss: 0.658613\n",
      "Train Epoch: 40 [016/027], Loss: 0.654372\n",
      "Train Epoch: 40 [018/027], Loss: 0.650919\n",
      "Train Epoch: 40 [020/027], Loss: 0.670370\n",
      "Train Epoch: 40 [022/027], Loss: 0.657821\n",
      "Train Epoch: 40 [024/027], Loss: 0.650397\n",
      "Train Epoch: 40 [026/027], Loss: 0.655095\n",
      "Train Epoch: 41 [002/027], Loss: 0.657996\n",
      "Train Epoch: 41 [004/027], Loss: 0.657093\n",
      "Train Epoch: 41 [006/027], Loss: 0.655675\n",
      "Train Epoch: 41 [008/027], Loss: 0.652166\n",
      "Train Epoch: 41 [010/027], Loss: 0.652774\n",
      "Train Epoch: 41 [012/027], Loss: 0.658987\n",
      "Train Epoch: 41 [014/027], Loss: 0.658416\n",
      "Train Epoch: 41 [016/027], Loss: 0.652990\n",
      "Train Epoch: 41 [018/027], Loss: 0.660535\n",
      "Train Epoch: 41 [020/027], Loss: 0.659740\n",
      "Train Epoch: 41 [022/027], Loss: 0.659678\n",
      "Train Epoch: 41 [024/027], Loss: 0.653389\n",
      "Train Epoch: 41 [026/027], Loss: 0.654580\n",
      "Train Epoch: 42 [002/027], Loss: 0.661861\n",
      "Train Epoch: 42 [004/027], Loss: 0.658619\n",
      "Train Epoch: 42 [006/027], Loss: 0.653345\n",
      "Train Epoch: 42 [008/027], Loss: 0.660436\n",
      "Train Epoch: 42 [010/027], Loss: 0.689003\n",
      "Train Epoch: 42 [012/027], Loss: 0.648736\n",
      "Train Epoch: 42 [014/027], Loss: 0.661561\n",
      "Train Epoch: 42 [016/027], Loss: 0.656157\n",
      "Train Epoch: 42 [018/027], Loss: 0.650452\n",
      "Train Epoch: 42 [020/027], Loss: 0.669497\n",
      "Train Epoch: 42 [022/027], Loss: 0.654884\n",
      "Train Epoch: 42 [024/027], Loss: 0.645697\n",
      "Train Epoch: 42 [026/027], Loss: 0.656667\n",
      "Train Epoch: 43 [002/027], Loss: 0.649239\n",
      "Train Epoch: 43 [004/027], Loss: 0.664287\n",
      "Train Epoch: 43 [006/027], Loss: 0.643197\n",
      "Train Epoch: 43 [008/027], Loss: 0.650545\n",
      "Train Epoch: 43 [010/027], Loss: 0.650185\n",
      "Train Epoch: 43 [012/027], Loss: 0.668103\n",
      "Train Epoch: 43 [014/027], Loss: 0.656967\n",
      "Train Epoch: 43 [016/027], Loss: 0.655515\n",
      "Train Epoch: 43 [018/027], Loss: 0.654299\n",
      "Train Epoch: 43 [020/027], Loss: 0.652375\n",
      "Train Epoch: 43 [022/027], Loss: 0.655775\n",
      "Train Epoch: 43 [024/027], Loss: 0.671030\n",
      "Train Epoch: 43 [026/027], Loss: 0.655626\n",
      "Train Epoch: 44 [002/027], Loss: 0.639589\n",
      "Train Epoch: 44 [004/027], Loss: 0.654186\n",
      "Train Epoch: 44 [006/027], Loss: 0.657885\n",
      "Train Epoch: 44 [008/027], Loss: 0.650904\n",
      "Train Epoch: 44 [010/027], Loss: 0.646198\n",
      "Train Epoch: 44 [012/027], Loss: 0.655804\n",
      "Train Epoch: 44 [014/027], Loss: 0.654930\n",
      "Train Epoch: 44 [016/027], Loss: 0.671091\n",
      "Train Epoch: 44 [018/027], Loss: 0.669572\n",
      "Train Epoch: 44 [020/027], Loss: 0.656010\n",
      "Train Epoch: 44 [022/027], Loss: 0.650232\n",
      "Train Epoch: 44 [024/027], Loss: 0.655765\n",
      "Train Epoch: 44 [026/027], Loss: 0.657016\n",
      "Train Epoch: 45 [002/027], Loss: 0.663762\n",
      "Train Epoch: 45 [004/027], Loss: 0.656487\n",
      "Train Epoch: 45 [006/027], Loss: 0.671153\n",
      "Train Epoch: 45 [008/027], Loss: 0.646929\n",
      "Train Epoch: 45 [010/027], Loss: 0.648134\n",
      "Train Epoch: 45 [012/027], Loss: 0.653509\n",
      "Train Epoch: 45 [014/027], Loss: 0.647330\n",
      "Train Epoch: 45 [016/027], Loss: 0.658536\n",
      "Train Epoch: 45 [018/027], Loss: 0.664450\n",
      "Train Epoch: 45 [020/027], Loss: 0.642286\n",
      "Train Epoch: 45 [022/027], Loss: 0.650220\n",
      "Train Epoch: 45 [024/027], Loss: 0.670319\n",
      "Train Epoch: 45 [026/027], Loss: 0.655096\n",
      "Train Epoch: 46 [002/027], Loss: 0.653007\n",
      "Train Epoch: 46 [004/027], Loss: 0.655969\n",
      "Train Epoch: 46 [006/027], Loss: 0.688086\n",
      "Train Epoch: 46 [008/027], Loss: 0.659355\n",
      "Train Epoch: 46 [010/027], Loss: 0.652209\n",
      "Train Epoch: 46 [012/027], Loss: 0.658095\n",
      "Train Epoch: 46 [014/027], Loss: 0.653922\n",
      "Train Epoch: 46 [016/027], Loss: 0.661226\n",
      "Train Epoch: 46 [018/027], Loss: 0.650074\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train Epoch: 46 [020/027], Loss: 0.659871\n",
      "Train Epoch: 46 [022/027], Loss: 0.653581\n",
      "Train Epoch: 46 [024/027], Loss: 0.643858\n",
      "Train Epoch: 46 [026/027], Loss: 0.654985\n",
      "Train Epoch: 47 [002/027], Loss: 0.660136\n",
      "Train Epoch: 47 [004/027], Loss: 0.656751\n",
      "Train Epoch: 47 [006/027], Loss: 0.640330\n",
      "Train Epoch: 47 [008/027], Loss: 0.649694\n",
      "Train Epoch: 47 [010/027], Loss: 0.650954\n",
      "Train Epoch: 47 [012/027], Loss: 0.660453\n",
      "Train Epoch: 47 [014/027], Loss: 0.650705\n",
      "Train Epoch: 47 [016/027], Loss: 0.633317\n",
      "Train Epoch: 47 [018/027], Loss: 0.668475\n",
      "Train Epoch: 47 [020/027], Loss: 0.649769\n",
      "Train Epoch: 47 [022/027], Loss: 0.655783\n",
      "Train Epoch: 47 [024/027], Loss: 0.663486\n",
      "Train Epoch: 47 [026/027], Loss: 0.641228\n",
      "Train Epoch: 48 [002/027], Loss: 0.641729\n",
      "Train Epoch: 48 [004/027], Loss: 0.659379\n",
      "Train Epoch: 48 [006/027], Loss: 0.647614\n",
      "Train Epoch: 48 [008/027], Loss: 0.646560\n",
      "Train Epoch: 48 [010/027], Loss: 0.646394\n",
      "Train Epoch: 48 [012/027], Loss: 0.659078\n",
      "Train Epoch: 48 [014/027], Loss: 0.661831\n",
      "Train Epoch: 48 [016/027], Loss: 0.657532\n",
      "Train Epoch: 48 [018/027], Loss: 0.655568\n",
      "Train Epoch: 48 [020/027], Loss: 0.639623\n",
      "Train Epoch: 48 [022/027], Loss: 0.651166\n",
      "Train Epoch: 48 [024/027], Loss: 0.640045\n",
      "Train Epoch: 48 [026/027], Loss: 0.663787\n",
      "Train Epoch: 49 [002/027], Loss: 0.648930\n",
      "Train Epoch: 49 [004/027], Loss: 0.646734\n",
      "Train Epoch: 49 [006/027], Loss: 0.659560\n",
      "Train Epoch: 49 [008/027], Loss: 0.653550\n",
      "Train Epoch: 49 [010/027], Loss: 0.668315\n",
      "Train Epoch: 49 [012/027], Loss: 0.660142\n",
      "Train Epoch: 49 [014/027], Loss: 0.648699\n",
      "Train Epoch: 49 [016/027], Loss: 0.648606\n",
      "Train Epoch: 49 [018/027], Loss: 0.643280\n",
      "Train Epoch: 49 [020/027], Loss: 0.650704\n",
      "Train Epoch: 49 [022/027], Loss: 0.660715\n",
      "Train Epoch: 49 [024/027], Loss: 0.649906\n",
      "Train Epoch: 49 [026/027], Loss: 0.647290\n",
      "Train Epoch: 50 [002/027], Loss: 0.656617\n",
      "Train Epoch: 50 [004/027], Loss: 0.650601\n",
      "Train Epoch: 50 [006/027], Loss: 0.643367\n",
      "Train Epoch: 50 [008/027], Loss: 0.644346\n",
      "Train Epoch: 50 [010/027], Loss: 0.645791\n",
      "Train Epoch: 50 [012/027], Loss: 0.649547\n",
      "Train Epoch: 50 [014/027], Loss: 0.653644\n",
      "Train Epoch: 50 [016/027], Loss: 0.651958\n",
      "Train Epoch: 50 [018/027], Loss: 0.653001\n",
      "Train Epoch: 50 [020/027], Loss: 0.660148\n",
      "Train Epoch: 50 [022/027], Loss: 0.676054\n",
      "Train Epoch: 50 [024/027], Loss: 0.643513\n",
      "Train Epoch: 50 [026/027], Loss: 0.647698\n",
      "Train Epoch: 51 [002/027], Loss: 0.636267\n",
      "Train Epoch: 51 [004/027], Loss: 0.644055\n",
      "Train Epoch: 51 [006/027], Loss: 0.654064\n",
      "Train Epoch: 51 [008/027], Loss: 0.651768\n",
      "Train Epoch: 51 [010/027], Loss: 0.640781\n",
      "Train Epoch: 51 [012/027], Loss: 0.641835\n",
      "Train Epoch: 51 [014/027], Loss: 0.635838\n",
      "Train Epoch: 51 [016/027], Loss: 0.638004\n",
      "Train Epoch: 51 [018/027], Loss: 0.670465\n",
      "Train Epoch: 51 [020/027], Loss: 0.646612\n",
      "Train Epoch: 51 [022/027], Loss: 0.642948\n",
      "Train Epoch: 51 [024/027], Loss: 0.662357\n",
      "Train Epoch: 51 [026/027], Loss: 0.642242\n",
      "Train Epoch: 52 [002/027], Loss: 0.651380\n",
      "Train Epoch: 52 [004/027], Loss: 0.652042\n",
      "Train Epoch: 52 [006/027], Loss: 0.649584\n",
      "Train Epoch: 52 [008/027], Loss: 0.644092\n",
      "Train Epoch: 52 [010/027], Loss: 0.649285\n",
      "Train Epoch: 52 [012/027], Loss: 0.662565\n",
      "Train Epoch: 52 [014/027], Loss: 0.654382\n",
      "Train Epoch: 52 [016/027], Loss: 0.645160\n",
      "Train Epoch: 52 [018/027], Loss: 0.653976\n",
      "Train Epoch: 52 [020/027], Loss: 0.656424\n",
      "Train Epoch: 52 [022/027], Loss: 0.648757\n",
      "Train Epoch: 52 [024/027], Loss: 0.655784\n",
      "Train Epoch: 52 [026/027], Loss: 0.650145\n",
      "Train Epoch: 53 [002/027], Loss: 0.649861\n",
      "Train Epoch: 53 [004/027], Loss: 0.660659\n",
      "Train Epoch: 53 [006/027], Loss: 0.652039\n",
      "Train Epoch: 53 [008/027], Loss: 0.652307\n",
      "Train Epoch: 53 [010/027], Loss: 0.649737\n",
      "Train Epoch: 53 [012/027], Loss: 0.646226\n",
      "Train Epoch: 53 [014/027], Loss: 0.651950\n",
      "Train Epoch: 53 [016/027], Loss: 0.648089\n",
      "Train Epoch: 53 [018/027], Loss: 0.639431\n",
      "Train Epoch: 53 [020/027], Loss: 0.655849\n",
      "Train Epoch: 53 [022/027], Loss: 0.639852\n",
      "Train Epoch: 53 [024/027], Loss: 0.640245\n",
      "Train Epoch: 53 [026/027], Loss: 0.643737\n",
      "Train Epoch: 54 [002/027], Loss: 0.654631\n",
      "Train Epoch: 54 [004/027], Loss: 0.647952\n",
      "Train Epoch: 54 [006/027], Loss: 0.650892\n",
      "Train Epoch: 54 [008/027], Loss: 0.640587\n",
      "Train Epoch: 54 [010/027], Loss: 0.643520\n",
      "Train Epoch: 54 [012/027], Loss: 0.646535\n",
      "Train Epoch: 54 [014/027], Loss: 0.645583\n",
      "Train Epoch: 54 [016/027], Loss: 0.652716\n",
      "Train Epoch: 54 [018/027], Loss: 0.648576\n",
      "Train Epoch: 54 [020/027], Loss: 0.643095\n",
      "Train Epoch: 54 [022/027], Loss: 0.652334\n",
      "Train Epoch: 54 [024/027], Loss: 0.646398\n",
      "Train Epoch: 54 [026/027], Loss: 0.639376\n",
      "Train Epoch: 55 [002/027], Loss: 0.657354\n",
      "Train Epoch: 55 [004/027], Loss: 0.654433\n",
      "Train Epoch: 55 [006/027], Loss: 0.652392\n",
      "Train Epoch: 55 [008/027], Loss: 0.649658\n",
      "Train Epoch: 55 [010/027], Loss: 0.643586\n",
      "Train Epoch: 55 [012/027], Loss: 0.647258\n",
      "Train Epoch: 55 [014/027], Loss: 0.640159\n",
      "Train Epoch: 55 [016/027], Loss: 0.643973\n",
      "Train Epoch: 55 [018/027], Loss: 0.657215\n",
      "Train Epoch: 55 [020/027], Loss: 0.640667\n",
      "Train Epoch: 55 [022/027], Loss: 0.646689\n",
      "Train Epoch: 55 [024/027], Loss: 0.642004\n",
      "Train Epoch: 55 [026/027], Loss: 0.646779\n",
      "Train Epoch: 56 [002/027], Loss: 0.629255\n",
      "Train Epoch: 56 [004/027], Loss: 0.649914\n",
      "Train Epoch: 56 [006/027], Loss: 0.657127\n",
      "Train Epoch: 56 [008/027], Loss: 0.652816\n",
      "Train Epoch: 56 [010/027], Loss: 0.653596\n",
      "Train Epoch: 56 [012/027], Loss: 0.651875\n",
      "Train Epoch: 56 [014/027], Loss: 0.656614\n",
      "Train Epoch: 56 [016/027], Loss: 0.643337\n",
      "Train Epoch: 56 [018/027], Loss: 0.628038\n",
      "Train Epoch: 56 [020/027], Loss: 0.643187\n",
      "Train Epoch: 56 [022/027], Loss: 0.643247\n",
      "Train Epoch: 56 [024/027], Loss: 0.647507\n",
      "Train Epoch: 56 [026/027], Loss: 0.642198\n",
      "Train Epoch: 57 [002/027], Loss: 0.662770\n",
      "Train Epoch: 57 [004/027], Loss: 0.644825\n",
      "Train Epoch: 57 [006/027], Loss: 0.637253\n",
      "Train Epoch: 57 [008/027], Loss: 0.641224\n",
      "Train Epoch: 57 [010/027], Loss: 0.646510\n",
      "Train Epoch: 57 [012/027], Loss: 0.654375\n",
      "Train Epoch: 57 [014/027], Loss: 0.646411\n",
      "Train Epoch: 57 [016/027], Loss: 0.643708\n",
      "Train Epoch: 57 [018/027], Loss: 0.651491\n",
      "Train Epoch: 57 [020/027], Loss: 0.648848\n",
      "Train Epoch: 57 [022/027], Loss: 0.649913\n",
      "Train Epoch: 57 [024/027], Loss: 0.645934\n",
      "Train Epoch: 57 [026/027], Loss: 0.643967\n",
      "Train Epoch: 58 [002/027], Loss: 0.651517\n",
      "Train Epoch: 58 [004/027], Loss: 0.647849\n",
      "Train Epoch: 58 [006/027], Loss: 0.647191\n",
      "Train Epoch: 58 [008/027], Loss: 0.631215\n",
      "Train Epoch: 58 [010/027], Loss: 0.649456\n",
      "Train Epoch: 58 [012/027], Loss: 0.647819\n",
      "Train Epoch: 58 [014/027], Loss: 0.662340\n",
      "Train Epoch: 58 [016/027], Loss: 0.637507\n",
      "Train Epoch: 58 [018/027], Loss: 0.640996\n",
      "Train Epoch: 58 [020/027], Loss: 0.647484\n",
      "Train Epoch: 58 [022/027], Loss: 0.647362\n",
      "Train Epoch: 58 [024/027], Loss: 0.653372\n",
      "Train Epoch: 58 [026/027], Loss: 0.651152\n",
      "Train Epoch: 59 [002/027], Loss: 0.637856\n",
      "Train Epoch: 59 [004/027], Loss: 0.639017\n",
      "Train Epoch: 59 [006/027], Loss: 0.647322\n",
      "Train Epoch: 59 [008/027], Loss: 0.642984\n",
      "Train Epoch: 59 [010/027], Loss: 0.647249\n",
      "Train Epoch: 59 [012/027], Loss: 0.646628\n",
      "Train Epoch: 59 [014/027], Loss: 0.651045\n",
      "Train Epoch: 59 [016/027], Loss: 0.646463\n",
      "Train Epoch: 59 [018/027], Loss: 0.645270\n",
      "Train Epoch: 59 [020/027], Loss: 0.632908\n",
      "Train Epoch: 59 [022/027], Loss: 0.646445\n",
      "Train Epoch: 59 [024/027], Loss: 0.654078\n",
      "Train Epoch: 59 [026/027], Loss: 0.637992\n",
      "Train Epoch: 60 [002/027], Loss: 0.647242\n",
      "Train Epoch: 60 [004/027], Loss: 0.641908\n",
      "Train Epoch: 60 [006/027], Loss: 0.638906\n",
      "Train Epoch: 60 [008/027], Loss: 0.643445\n",
      "Train Epoch: 60 [010/027], Loss: 0.651975\n",
      "Train Epoch: 60 [012/027], Loss: 0.638244\n",
      "Train Epoch: 60 [014/027], Loss: 0.660839\n",
      "Train Epoch: 60 [016/027], Loss: 0.642921\n",
      "Train Epoch: 60 [018/027], Loss: 0.645554\n",
      "Train Epoch: 60 [020/027], Loss: 0.646651\n",
      "Train Epoch: 60 [022/027], Loss: 0.645431\n",
      "Train Epoch: 60 [024/027], Loss: 0.636284\n",
      "Train Epoch: 60 [026/027], Loss: 0.633377\n",
      "Train Epoch: 61 [002/027], Loss: 0.643749\n",
      "Train Epoch: 61 [004/027], Loss: 0.643686\n",
      "Train Epoch: 61 [006/027], Loss: 0.648161\n",
      "Train Epoch: 61 [008/027], Loss: 0.651760\n",
      "Train Epoch: 61 [010/027], Loss: 0.644425\n",
      "Train Epoch: 61 [012/027], Loss: 0.650717\n",
      "Train Epoch: 61 [014/027], Loss: 0.659840\n",
      "Train Epoch: 61 [016/027], Loss: 0.637326\n",
      "Train Epoch: 61 [018/027], Loss: 0.642388\n",
      "Train Epoch: 61 [020/027], Loss: 0.631170\n",
      "Train Epoch: 61 [022/027], Loss: 0.657156\n",
      "Train Epoch: 61 [024/027], Loss: 0.642708\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train Epoch: 61 [026/027], Loss: 0.648604\n",
      "Train Epoch: 62 [002/027], Loss: 0.650374\n",
      "Train Epoch: 62 [004/027], Loss: 0.644846\n",
      "Train Epoch: 62 [006/027], Loss: 0.646954\n",
      "Train Epoch: 62 [008/027], Loss: 0.647229\n",
      "Train Epoch: 62 [010/027], Loss: 0.652276\n",
      "Train Epoch: 62 [012/027], Loss: 0.635392\n",
      "Train Epoch: 62 [014/027], Loss: 0.649534\n",
      "Train Epoch: 62 [016/027], Loss: 0.624678\n",
      "Train Epoch: 62 [018/027], Loss: 0.649989\n",
      "Train Epoch: 62 [020/027], Loss: 0.641158\n",
      "Train Epoch: 62 [022/027], Loss: 0.632937\n",
      "Train Epoch: 62 [024/027], Loss: 0.639763\n",
      "Train Epoch: 62 [026/027], Loss: 0.639071\n",
      "Train Epoch: 63 [002/027], Loss: 0.646186\n",
      "Train Epoch: 63 [004/027], Loss: 0.631354\n",
      "Train Epoch: 63 [006/027], Loss: 0.631647\n",
      "Train Epoch: 63 [008/027], Loss: 0.640175\n",
      "Train Epoch: 63 [010/027], Loss: 0.654483\n",
      "Train Epoch: 63 [012/027], Loss: 0.643237\n",
      "Train Epoch: 63 [014/027], Loss: 0.645863\n",
      "Train Epoch: 63 [016/027], Loss: 0.641694\n",
      "Train Epoch: 63 [018/027], Loss: 0.649331\n",
      "Train Epoch: 63 [020/027], Loss: 0.643809\n",
      "Train Epoch: 63 [022/027], Loss: 0.650921\n",
      "Train Epoch: 63 [024/027], Loss: 0.657504\n",
      "Train Epoch: 63 [026/027], Loss: 0.632111\n",
      "Train Epoch: 64 [002/027], Loss: 0.635361\n",
      "Train Epoch: 64 [004/027], Loss: 0.641628\n",
      "Train Epoch: 64 [006/027], Loss: 0.631657\n",
      "Train Epoch: 64 [008/027], Loss: 0.642369\n",
      "Train Epoch: 64 [010/027], Loss: 0.634922\n",
      "Train Epoch: 64 [012/027], Loss: 0.654143\n",
      "Train Epoch: 64 [014/027], Loss: 0.652349\n",
      "Train Epoch: 64 [016/027], Loss: 0.649857\n",
      "Train Epoch: 64 [018/027], Loss: 0.642590\n",
      "Train Epoch: 64 [020/027], Loss: 0.639451\n",
      "Train Epoch: 64 [022/027], Loss: 0.630543\n",
      "Train Epoch: 64 [024/027], Loss: 0.635890\n",
      "Train Epoch: 64 [026/027], Loss: 0.642586\n",
      "Train Epoch: 65 [002/027], Loss: 0.636789\n",
      "Train Epoch: 65 [004/027], Loss: 0.655522\n",
      "Train Epoch: 65 [006/027], Loss: 0.636717\n",
      "Train Epoch: 65 [008/027], Loss: 0.643609\n",
      "Train Epoch: 65 [010/027], Loss: 0.630566\n",
      "Train Epoch: 65 [012/027], Loss: 0.644448\n",
      "Train Epoch: 65 [014/027], Loss: 0.658159\n",
      "Train Epoch: 65 [016/027], Loss: 0.639554\n",
      "Train Epoch: 65 [018/027], Loss: 0.650308\n",
      "Train Epoch: 65 [020/027], Loss: 0.642051\n",
      "Train Epoch: 65 [022/027], Loss: 0.636310\n",
      "Train Epoch: 65 [024/027], Loss: 0.655137\n",
      "Train Epoch: 65 [026/027], Loss: 0.635727\n",
      "Train Epoch: 66 [002/027], Loss: 0.633920\n",
      "Train Epoch: 66 [004/027], Loss: 0.642991\n",
      "Train Epoch: 66 [006/027], Loss: 0.650797\n",
      "Train Epoch: 66 [008/027], Loss: 0.641259\n",
      "Train Epoch: 66 [010/027], Loss: 0.646958\n",
      "Train Epoch: 66 [012/027], Loss: 0.647785\n",
      "Train Epoch: 66 [014/027], Loss: 0.634320\n",
      "Train Epoch: 66 [016/027], Loss: 0.631192\n",
      "Train Epoch: 66 [018/027], Loss: 0.650061\n",
      "Train Epoch: 66 [020/027], Loss: 0.642809\n",
      "Train Epoch: 66 [022/027], Loss: 0.636028\n",
      "Train Epoch: 66 [024/027], Loss: 0.641744\n",
      "Train Epoch: 66 [026/027], Loss: 0.653286\n",
      "Train Epoch: 67 [002/027], Loss: 0.649688\n",
      "Train Epoch: 67 [004/027], Loss: 0.641473\n",
      "Train Epoch: 67 [006/027], Loss: 0.652586\n",
      "Train Epoch: 67 [008/027], Loss: 0.630972\n",
      "Train Epoch: 67 [010/027], Loss: 0.650574\n",
      "Train Epoch: 67 [012/027], Loss: 0.650219\n",
      "Train Epoch: 67 [014/027], Loss: 0.648046\n",
      "Train Epoch: 67 [016/027], Loss: 0.650318\n",
      "Train Epoch: 67 [018/027], Loss: 0.639772\n",
      "Train Epoch: 67 [020/027], Loss: 0.642092\n",
      "Train Epoch: 67 [022/027], Loss: 0.630115\n",
      "Train Epoch: 67 [024/027], Loss: 0.640452\n",
      "Train Epoch: 67 [026/027], Loss: 0.643208\n",
      "Train Epoch: 68 [002/027], Loss: 0.637436\n",
      "Train Epoch: 68 [004/027], Loss: 0.642811\n",
      "Train Epoch: 68 [006/027], Loss: 0.630760\n",
      "Train Epoch: 68 [008/027], Loss: 0.637560\n",
      "Train Epoch: 68 [010/027], Loss: 0.650104\n",
      "Train Epoch: 68 [012/027], Loss: 0.645440\n",
      "Train Epoch: 68 [014/027], Loss: 0.638849\n",
      "Train Epoch: 68 [016/027], Loss: 0.632019\n",
      "Train Epoch: 68 [018/027], Loss: 0.655027\n",
      "Train Epoch: 68 [020/027], Loss: 0.640211\n",
      "Train Epoch: 68 [022/027], Loss: 0.642510\n",
      "Train Epoch: 68 [024/027], Loss: 0.651128\n",
      "Train Epoch: 68 [026/027], Loss: 0.647075\n",
      "Train Epoch: 69 [002/027], Loss: 0.645174\n",
      "Train Epoch: 69 [004/027], Loss: 0.633027\n",
      "Train Epoch: 69 [006/027], Loss: 0.646118\n",
      "Train Epoch: 69 [008/027], Loss: 0.639502\n",
      "Train Epoch: 69 [010/027], Loss: 0.633186\n",
      "Train Epoch: 69 [012/027], Loss: 0.659088\n",
      "Train Epoch: 69 [014/027], Loss: 0.643204\n",
      "Train Epoch: 69 [016/027], Loss: 0.635445\n",
      "Train Epoch: 69 [018/027], Loss: 0.630126\n",
      "Train Epoch: 69 [020/027], Loss: 0.639863\n",
      "Train Epoch: 69 [022/027], Loss: 0.649827\n",
      "Train Epoch: 69 [024/027], Loss: 0.638785\n",
      "Train Epoch: 69 [026/027], Loss: 0.635131\n",
      "Train Epoch: 70 [002/027], Loss: 0.640317\n",
      "Train Epoch: 70 [004/027], Loss: 0.656773\n",
      "Train Epoch: 70 [006/027], Loss: 0.655086\n",
      "Train Epoch: 70 [008/027], Loss: 0.646147\n",
      "Train Epoch: 70 [010/027], Loss: 0.628010\n",
      "Train Epoch: 70 [012/027], Loss: 0.639974\n",
      "Train Epoch: 70 [014/027], Loss: 0.638669\n",
      "Train Epoch: 70 [016/027], Loss: 0.632726\n",
      "Train Epoch: 70 [018/027], Loss: 0.661808\n",
      "Train Epoch: 70 [020/027], Loss: 0.639773\n",
      "Train Epoch: 70 [022/027], Loss: 0.639675\n",
      "Train Epoch: 70 [024/027], Loss: 0.645764\n",
      "Train Epoch: 70 [026/027], Loss: 0.634618\n",
      "Train Epoch: 71 [002/027], Loss: 0.639510\n",
      "Train Epoch: 71 [004/027], Loss: 0.644593\n",
      "Train Epoch: 71 [006/027], Loss: 0.634550\n",
      "Train Epoch: 71 [008/027], Loss: 0.639300\n",
      "Train Epoch: 71 [010/027], Loss: 0.643539\n",
      "Train Epoch: 71 [012/027], Loss: 0.642778\n",
      "Train Epoch: 71 [014/027], Loss: 0.635141\n",
      "Train Epoch: 71 [016/027], Loss: 0.649830\n",
      "Train Epoch: 71 [018/027], Loss: 0.636595\n",
      "Train Epoch: 71 [020/027], Loss: 0.626382\n",
      "Train Epoch: 71 [022/027], Loss: 0.633269\n",
      "Train Epoch: 71 [024/027], Loss: 0.647997\n",
      "Train Epoch: 71 [026/027], Loss: 0.650589\n",
      "Train Epoch: 72 [002/027], Loss: 0.636787\n",
      "Train Epoch: 72 [004/027], Loss: 0.635846\n",
      "Train Epoch: 72 [006/027], Loss: 0.648129\n",
      "Train Epoch: 72 [008/027], Loss: 0.639004\n",
      "Train Epoch: 72 [010/027], Loss: 0.653293\n",
      "Train Epoch: 72 [012/027], Loss: 0.645553\n",
      "Train Epoch: 72 [014/027], Loss: 0.645781\n",
      "Train Epoch: 72 [016/027], Loss: 0.624678\n",
      "Train Epoch: 72 [018/027], Loss: 0.648898\n",
      "Train Epoch: 72 [020/027], Loss: 0.643830\n",
      "Train Epoch: 72 [022/027], Loss: 0.642183\n",
      "Train Epoch: 72 [024/027], Loss: 0.641454\n",
      "Train Epoch: 72 [026/027], Loss: 0.635898\n",
      "Train Epoch: 73 [002/027], Loss: 0.638046\n",
      "Train Epoch: 73 [004/027], Loss: 0.641772\n",
      "Train Epoch: 73 [006/027], Loss: 0.648572\n",
      "Train Epoch: 73 [008/027], Loss: 0.638540\n",
      "Train Epoch: 73 [010/027], Loss: 0.632843\n",
      "Train Epoch: 73 [012/027], Loss: 0.640900\n",
      "Train Epoch: 73 [014/027], Loss: 0.656976\n",
      "Train Epoch: 73 [016/027], Loss: 0.634889\n",
      "Train Epoch: 73 [018/027], Loss: 0.634450\n",
      "Train Epoch: 73 [020/027], Loss: 0.638321\n",
      "Train Epoch: 73 [022/027], Loss: 0.645513\n",
      "Train Epoch: 73 [024/027], Loss: 0.630728\n",
      "Train Epoch: 73 [026/027], Loss: 0.645223\n",
      "Train Epoch: 74 [002/027], Loss: 0.646487\n",
      "Train Epoch: 74 [004/027], Loss: 0.645443\n",
      "Train Epoch: 74 [006/027], Loss: 0.637685\n",
      "Train Epoch: 74 [008/027], Loss: 0.649799\n",
      "Train Epoch: 74 [010/027], Loss: 0.636844\n",
      "Train Epoch: 74 [012/027], Loss: 0.643821\n",
      "Train Epoch: 74 [014/027], Loss: 0.640664\n",
      "Train Epoch: 74 [016/027], Loss: 0.636092\n",
      "Train Epoch: 74 [018/027], Loss: 0.643753\n",
      "Train Epoch: 74 [020/027], Loss: 0.640779\n",
      "Train Epoch: 74 [022/027], Loss: 0.635903\n",
      "Train Epoch: 74 [024/027], Loss: 0.643717\n",
      "Train Epoch: 74 [026/027], Loss: 0.634778\n",
      "Train Epoch: 75 [002/027], Loss: 0.657006\n",
      "Train Epoch: 75 [004/027], Loss: 0.648599\n",
      "Train Epoch: 75 [006/027], Loss: 0.644812\n",
      "Train Epoch: 75 [008/027], Loss: 0.654156\n",
      "Train Epoch: 75 [010/027], Loss: 0.626596\n",
      "Train Epoch: 75 [012/027], Loss: 0.630955\n",
      "Train Epoch: 75 [014/027], Loss: 0.650145\n",
      "Train Epoch: 75 [016/027], Loss: 0.632935\n",
      "Train Epoch: 75 [018/027], Loss: 0.638618\n",
      "Train Epoch: 75 [020/027], Loss: 0.619060\n",
      "Train Epoch: 75 [022/027], Loss: 0.655576\n",
      "Train Epoch: 75 [024/027], Loss: 0.654441\n",
      "Train Epoch: 75 [026/027], Loss: 0.626948\n",
      "Train Epoch: 76 [002/027], Loss: 0.647672\n",
      "Train Epoch: 76 [004/027], Loss: 0.641354\n",
      "Train Epoch: 76 [006/027], Loss: 0.640030\n",
      "Train Epoch: 76 [008/027], Loss: 0.629034\n",
      "Train Epoch: 76 [010/027], Loss: 0.639893\n",
      "Train Epoch: 76 [012/027], Loss: 0.646881\n",
      "Train Epoch: 76 [014/027], Loss: 0.646020\n",
      "Train Epoch: 76 [016/027], Loss: 0.641858\n",
      "Train Epoch: 76 [018/027], Loss: 0.639352\n",
      "Train Epoch: 76 [020/027], Loss: 0.634215\n",
      "Train Epoch: 76 [022/027], Loss: 0.644286\n",
      "Train Epoch: 76 [024/027], Loss: 0.634917\n",
      "Train Epoch: 76 [026/027], Loss: 0.635520\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train Epoch: 77 [002/027], Loss: 0.644890\n",
      "Train Epoch: 77 [004/027], Loss: 0.632405\n",
      "Train Epoch: 77 [006/027], Loss: 0.650054\n",
      "Train Epoch: 77 [008/027], Loss: 0.641529\n",
      "Train Epoch: 77 [010/027], Loss: 0.636625\n",
      "Train Epoch: 77 [012/027], Loss: 0.644008\n",
      "Train Epoch: 77 [014/027], Loss: 0.631955\n",
      "Train Epoch: 77 [016/027], Loss: 0.639724\n",
      "Train Epoch: 77 [018/027], Loss: 0.649914\n",
      "Train Epoch: 77 [020/027], Loss: 0.645597\n",
      "Train Epoch: 77 [022/027], Loss: 0.651081\n",
      "Train Epoch: 77 [024/027], Loss: 0.637848\n",
      "Train Epoch: 77 [026/027], Loss: 0.641049\n",
      "Train Epoch: 78 [002/027], Loss: 0.644282\n",
      "Train Epoch: 78 [004/027], Loss: 0.635793\n",
      "Train Epoch: 78 [006/027], Loss: 0.627586\n",
      "Train Epoch: 78 [008/027], Loss: 0.644187\n",
      "Train Epoch: 78 [010/027], Loss: 0.641842\n",
      "Train Epoch: 78 [012/027], Loss: 0.640036\n",
      "Train Epoch: 78 [014/027], Loss: 0.634766\n",
      "Train Epoch: 78 [016/027], Loss: 0.638232\n",
      "Train Epoch: 78 [018/027], Loss: 0.636457\n",
      "Train Epoch: 78 [020/027], Loss: 0.646230\n",
      "Train Epoch: 78 [022/027], Loss: 0.654807\n",
      "Train Epoch: 78 [024/027], Loss: 0.636002\n",
      "Train Epoch: 78 [026/027], Loss: 0.645947\n",
      "Train Epoch: 79 [002/027], Loss: 0.647186\n",
      "Train Epoch: 79 [004/027], Loss: 0.627651\n",
      "Train Epoch: 79 [006/027], Loss: 0.637794\n",
      "Train Epoch: 79 [008/027], Loss: 0.642898\n",
      "Train Epoch: 79 [010/027], Loss: 0.639825\n",
      "Train Epoch: 79 [012/027], Loss: 0.639204\n",
      "Train Epoch: 79 [014/027], Loss: 0.637486\n",
      "Train Epoch: 79 [016/027], Loss: 0.631280\n",
      "Train Epoch: 79 [018/027], Loss: 0.625279\n",
      "Train Epoch: 79 [020/027], Loss: 0.635732\n",
      "Train Epoch: 79 [022/027], Loss: 0.621457\n",
      "Train Epoch: 79 [024/027], Loss: 0.637155\n",
      "Train Epoch: 79 [026/027], Loss: 0.638047\n",
      "Train Epoch: 80 [002/027], Loss: 0.640391\n",
      "Train Epoch: 80 [004/027], Loss: 0.650001\n",
      "Train Epoch: 80 [006/027], Loss: 0.637133\n",
      "Train Epoch: 80 [008/027], Loss: 0.631465\n",
      "Train Epoch: 80 [010/027], Loss: 0.629207\n",
      "Train Epoch: 80 [012/027], Loss: 0.631469\n",
      "Train Epoch: 80 [014/027], Loss: 0.657559\n",
      "Train Epoch: 80 [016/027], Loss: 0.646773\n",
      "Train Epoch: 80 [018/027], Loss: 0.643637\n",
      "Train Epoch: 80 [020/027], Loss: 0.637185\n",
      "Train Epoch: 80 [022/027], Loss: 0.643603\n",
      "Train Epoch: 80 [024/027], Loss: 0.644706\n",
      "Train Epoch: 80 [026/027], Loss: 0.637378\n",
      "Train Epoch: 81 [002/027], Loss: 0.649155\n",
      "Train Epoch: 81 [004/027], Loss: 0.647940\n",
      "Train Epoch: 81 [006/027], Loss: 0.648405\n",
      "Train Epoch: 81 [008/027], Loss: 0.621021\n",
      "Train Epoch: 81 [010/027], Loss: 0.633409\n",
      "Train Epoch: 81 [012/027], Loss: 0.637319\n",
      "Train Epoch: 81 [014/027], Loss: 0.636140\n",
      "Train Epoch: 81 [016/027], Loss: 0.648551\n",
      "Train Epoch: 81 [018/027], Loss: 0.637064\n",
      "Train Epoch: 81 [020/027], Loss: 0.628784\n",
      "Train Epoch: 81 [022/027], Loss: 0.646356\n",
      "Train Epoch: 81 [024/027], Loss: 0.636000\n",
      "Train Epoch: 81 [026/027], Loss: 0.639482\n",
      "Train Epoch: 82 [002/027], Loss: 0.631351\n",
      "Train Epoch: 82 [004/027], Loss: 0.628069\n",
      "Train Epoch: 82 [006/027], Loss: 0.640445\n",
      "Train Epoch: 82 [008/027], Loss: 0.630294\n",
      "Train Epoch: 82 [010/027], Loss: 0.639304\n",
      "Train Epoch: 82 [012/027], Loss: 0.645313\n",
      "Train Epoch: 82 [014/027], Loss: 0.638738\n",
      "Train Epoch: 82 [016/027], Loss: 0.621145\n",
      "Train Epoch: 82 [018/027], Loss: 0.654366\n",
      "Train Epoch: 82 [020/027], Loss: 0.640676\n",
      "Train Epoch: 82 [022/027], Loss: 0.643170\n",
      "Train Epoch: 82 [024/027], Loss: 0.630035\n",
      "Train Epoch: 82 [026/027], Loss: 0.644443\n",
      "Train Epoch: 83 [002/027], Loss: 0.631941\n",
      "Train Epoch: 83 [004/027], Loss: 0.652184\n",
      "Train Epoch: 83 [006/027], Loss: 0.640437\n",
      "Train Epoch: 83 [008/027], Loss: 0.631460\n",
      "Train Epoch: 83 [010/027], Loss: 0.648975\n",
      "Train Epoch: 83 [012/027], Loss: 0.641324\n",
      "Train Epoch: 83 [014/027], Loss: 0.635393\n",
      "Train Epoch: 83 [016/027], Loss: 0.637174\n",
      "Train Epoch: 83 [018/027], Loss: 0.647022\n",
      "Train Epoch: 83 [020/027], Loss: 0.634077\n",
      "Train Epoch: 83 [022/027], Loss: 0.652073\n",
      "Train Epoch: 83 [024/027], Loss: 0.635779\n",
      "Train Epoch: 83 [026/027], Loss: 0.631945\n",
      "Train Epoch: 84 [002/027], Loss: 0.635159\n",
      "Train Epoch: 84 [004/027], Loss: 0.651281\n",
      "Train Epoch: 84 [006/027], Loss: 0.624990\n",
      "Train Epoch: 84 [008/027], Loss: 0.631829\n",
      "Train Epoch: 84 [010/027], Loss: 0.645183\n",
      "Train Epoch: 84 [012/027], Loss: 0.642631\n",
      "Train Epoch: 84 [014/027], Loss: 0.637209\n",
      "Train Epoch: 84 [016/027], Loss: 0.646550\n",
      "Train Epoch: 84 [018/027], Loss: 0.638273\n",
      "Train Epoch: 84 [020/027], Loss: 0.639152\n",
      "Train Epoch: 84 [022/027], Loss: 0.638228\n",
      "Train Epoch: 84 [024/027], Loss: 0.650819\n",
      "Train Epoch: 84 [026/027], Loss: 0.619404\n",
      "Train Epoch: 85 [002/027], Loss: 0.649025\n",
      "Train Epoch: 85 [004/027], Loss: 0.629354\n",
      "Train Epoch: 85 [006/027], Loss: 0.641009\n",
      "Train Epoch: 85 [008/027], Loss: 0.628627\n",
      "Train Epoch: 85 [010/027], Loss: 0.637795\n",
      "Train Epoch: 85 [012/027], Loss: 0.640898\n",
      "Train Epoch: 85 [014/027], Loss: 0.633254\n",
      "Train Epoch: 85 [016/027], Loss: 0.637417\n",
      "Train Epoch: 85 [018/027], Loss: 0.633599\n",
      "Train Epoch: 85 [020/027], Loss: 0.633605\n",
      "Train Epoch: 85 [022/027], Loss: 0.646468\n",
      "Train Epoch: 85 [024/027], Loss: 0.649594\n",
      "Train Epoch: 85 [026/027], Loss: 0.641854\n",
      "Train Epoch: 86 [002/027], Loss: 0.636674\n",
      "Train Epoch: 86 [004/027], Loss: 0.647157\n",
      "Train Epoch: 86 [006/027], Loss: 0.637892\n",
      "Train Epoch: 86 [008/027], Loss: 0.630992\n",
      "Train Epoch: 86 [010/027], Loss: 0.643724\n",
      "Train Epoch: 86 [012/027], Loss: 0.632943\n",
      "Train Epoch: 86 [014/027], Loss: 0.640076\n",
      "Train Epoch: 86 [016/027], Loss: 0.635088\n",
      "Train Epoch: 86 [018/027], Loss: 0.630279\n",
      "Train Epoch: 86 [020/027], Loss: 0.632753\n",
      "Train Epoch: 86 [022/027], Loss: 0.636799\n",
      "Train Epoch: 86 [024/027], Loss: 0.633486\n",
      "Train Epoch: 86 [026/027], Loss: 0.636752\n",
      "Train Epoch: 87 [002/027], Loss: 0.632562\n",
      "Train Epoch: 87 [004/027], Loss: 0.634678\n",
      "Train Epoch: 87 [006/027], Loss: 0.631567\n",
      "Train Epoch: 87 [008/027], Loss: 0.631818\n",
      "Train Epoch: 87 [010/027], Loss: 0.637284\n",
      "Train Epoch: 87 [012/027], Loss: 0.620706\n",
      "Train Epoch: 87 [014/027], Loss: 0.647028\n",
      "Train Epoch: 87 [016/027], Loss: 0.639566\n",
      "Train Epoch: 87 [018/027], Loss: 0.636807\n",
      "Train Epoch: 87 [020/027], Loss: 0.630102\n",
      "Train Epoch: 87 [022/027], Loss: 0.643108\n",
      "Train Epoch: 87 [024/027], Loss: 0.635607\n",
      "Train Epoch: 87 [026/027], Loss: 0.634867\n",
      "Train Epoch: 88 [002/027], Loss: 0.627374\n",
      "Train Epoch: 88 [004/027], Loss: 0.637303\n",
      "Train Epoch: 88 [006/027], Loss: 0.638299\n",
      "Train Epoch: 88 [008/027], Loss: 0.646618\n",
      "Train Epoch: 88 [010/027], Loss: 0.645345\n",
      "Train Epoch: 88 [012/027], Loss: 0.646629\n",
      "Train Epoch: 88 [014/027], Loss: 0.629860\n",
      "Train Epoch: 88 [016/027], Loss: 0.634629\n",
      "Train Epoch: 88 [018/027], Loss: 0.628724\n",
      "Train Epoch: 88 [020/027], Loss: 0.622973\n",
      "Train Epoch: 88 [022/027], Loss: 0.637273\n",
      "Train Epoch: 88 [024/027], Loss: 0.627676\n",
      "Train Epoch: 88 [026/027], Loss: 0.635801\n",
      "Train Epoch: 89 [002/027], Loss: 0.634965\n",
      "Train Epoch: 89 [004/027], Loss: 0.641041\n",
      "Train Epoch: 89 [006/027], Loss: 0.639424\n",
      "Train Epoch: 89 [008/027], Loss: 0.646604\n",
      "Train Epoch: 89 [010/027], Loss: 0.628941\n",
      "Train Epoch: 89 [012/027], Loss: 0.640024\n",
      "Train Epoch: 89 [014/027], Loss: 0.628220\n",
      "Train Epoch: 89 [016/027], Loss: 0.634717\n",
      "Train Epoch: 89 [018/027], Loss: 0.640736\n",
      "Train Epoch: 89 [020/027], Loss: 0.642598\n",
      "Train Epoch: 89 [022/027], Loss: 0.641457\n",
      "Train Epoch: 89 [024/027], Loss: 0.642649\n",
      "Train Epoch: 89 [026/027], Loss: 0.642507\n",
      "Train Epoch: 90 [002/027], Loss: 0.642650\n",
      "Train Epoch: 90 [004/027], Loss: 0.642410\n",
      "Train Epoch: 90 [006/027], Loss: 0.622021\n",
      "Train Epoch: 90 [008/027], Loss: 0.629836\n",
      "Train Epoch: 90 [010/027], Loss: 0.636985\n",
      "Train Epoch: 90 [012/027], Loss: 0.645151\n",
      "Train Epoch: 90 [014/027], Loss: 0.628963\n",
      "Train Epoch: 90 [016/027], Loss: 0.640965\n",
      "Train Epoch: 90 [018/027], Loss: 0.639374\n",
      "Train Epoch: 90 [020/027], Loss: 0.642945\n",
      "Train Epoch: 90 [022/027], Loss: 0.633035\n",
      "Train Epoch: 90 [024/027], Loss: 0.639119\n",
      "Train Epoch: 90 [026/027], Loss: 0.626883\n",
      "Train Epoch: 91 [002/027], Loss: 0.641431\n",
      "Train Epoch: 91 [004/027], Loss: 0.650220\n",
      "Train Epoch: 91 [006/027], Loss: 0.637047\n",
      "Train Epoch: 91 [008/027], Loss: 0.630212\n",
      "Train Epoch: 91 [010/027], Loss: 0.637683\n",
      "Train Epoch: 91 [012/027], Loss: 0.635589\n",
      "Train Epoch: 91 [014/027], Loss: 0.649103\n",
      "Train Epoch: 91 [016/027], Loss: 0.634619\n",
      "Train Epoch: 91 [018/027], Loss: 0.622220\n",
      "Train Epoch: 91 [020/027], Loss: 0.641508\n",
      "Train Epoch: 91 [022/027], Loss: 0.644827\n",
      "Train Epoch: 91 [024/027], Loss: 0.628546\n",
      "Train Epoch: 91 [026/027], Loss: 0.632355\n",
      "Train Epoch: 92 [002/027], Loss: 0.636570\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train Epoch: 92 [004/027], Loss: 0.640232\n",
      "Train Epoch: 92 [006/027], Loss: 0.637473\n",
      "Train Epoch: 92 [008/027], Loss: 0.627480\n",
      "Train Epoch: 92 [010/027], Loss: 0.638964\n",
      "Train Epoch: 92 [012/027], Loss: 0.634375\n",
      "Train Epoch: 92 [014/027], Loss: 0.639766\n",
      "Train Epoch: 92 [016/027], Loss: 0.639874\n",
      "Train Epoch: 92 [018/027], Loss: 0.643250\n",
      "Train Epoch: 92 [020/027], Loss: 0.621811\n",
      "Train Epoch: 92 [022/027], Loss: 0.651253\n",
      "Train Epoch: 92 [024/027], Loss: 0.630713\n",
      "Train Epoch: 92 [026/027], Loss: 0.637717\n",
      "Train Epoch: 93 [002/027], Loss: 0.631923\n",
      "Train Epoch: 93 [004/027], Loss: 0.631982\n",
      "Train Epoch: 93 [006/027], Loss: 0.632821\n",
      "Train Epoch: 93 [008/027], Loss: 0.635054\n",
      "Train Epoch: 93 [010/027], Loss: 0.640029\n",
      "Train Epoch: 93 [012/027], Loss: 0.640283\n",
      "Train Epoch: 93 [014/027], Loss: 0.649390\n",
      "Train Epoch: 93 [016/027], Loss: 0.629728\n",
      "Train Epoch: 93 [018/027], Loss: 0.646465\n",
      "Train Epoch: 93 [020/027], Loss: 0.628983\n",
      "Train Epoch: 93 [022/027], Loss: 0.636158\n",
      "Train Epoch: 93 [024/027], Loss: 0.623107\n",
      "Train Epoch: 93 [026/027], Loss: 0.627387\n",
      "Train Epoch: 94 [002/027], Loss: 0.627508\n",
      "Train Epoch: 94 [004/027], Loss: 0.635651\n",
      "Train Epoch: 94 [006/027], Loss: 0.641852\n",
      "Train Epoch: 94 [008/027], Loss: 0.632297\n",
      "Train Epoch: 94 [010/027], Loss: 0.625743\n",
      "Train Epoch: 94 [012/027], Loss: 0.647689\n",
      "Train Epoch: 94 [014/027], Loss: 0.631644\n",
      "Train Epoch: 94 [016/027], Loss: 0.642046\n",
      "Train Epoch: 94 [018/027], Loss: 0.637570\n",
      "Train Epoch: 94 [020/027], Loss: 0.641371\n",
      "Train Epoch: 94 [022/027], Loss: 0.651942\n",
      "Train Epoch: 94 [024/027], Loss: 0.639616\n",
      "Train Epoch: 94 [026/027], Loss: 0.636350\n",
      "Train Epoch: 95 [002/027], Loss: 0.652522\n",
      "Train Epoch: 95 [004/027], Loss: 0.628953\n",
      "Train Epoch: 95 [006/027], Loss: 0.631242\n",
      "Train Epoch: 95 [008/027], Loss: 0.631213\n",
      "Train Epoch: 95 [010/027], Loss: 0.629780\n",
      "Train Epoch: 95 [012/027], Loss: 0.641688\n",
      "Train Epoch: 95 [014/027], Loss: 0.627672\n",
      "Train Epoch: 95 [016/027], Loss: 0.629073\n",
      "Train Epoch: 95 [018/027], Loss: 0.640910\n",
      "Train Epoch: 95 [020/027], Loss: 0.635811\n",
      "Train Epoch: 95 [022/027], Loss: 0.630037\n",
      "Train Epoch: 95 [024/027], Loss: 0.637049\n",
      "Train Epoch: 95 [026/027], Loss: 0.637402\n",
      "Train Epoch: 96 [002/027], Loss: 0.647071\n",
      "Train Epoch: 96 [004/027], Loss: 0.640401\n",
      "Train Epoch: 96 [006/027], Loss: 0.654224\n",
      "Train Epoch: 96 [008/027], Loss: 0.638307\n",
      "Train Epoch: 96 [010/027], Loss: 0.644648\n",
      "Train Epoch: 96 [012/027], Loss: 0.629062\n",
      "Train Epoch: 96 [014/027], Loss: 0.634916\n",
      "Train Epoch: 96 [016/027], Loss: 0.641993\n",
      "Train Epoch: 96 [018/027], Loss: 0.643194\n",
      "Train Epoch: 96 [020/027], Loss: 0.636631\n",
      "Train Epoch: 96 [022/027], Loss: 0.632435\n",
      "Train Epoch: 96 [024/027], Loss: 0.644256\n",
      "Train Epoch: 96 [026/027], Loss: 0.651513\n",
      "Train Epoch: 97 [002/027], Loss: 0.636169\n",
      "Train Epoch: 97 [004/027], Loss: 0.629069\n",
      "Train Epoch: 97 [006/027], Loss: 0.635141\n",
      "Train Epoch: 97 [008/027], Loss: 0.631415\n",
      "Train Epoch: 97 [010/027], Loss: 0.628459\n",
      "Train Epoch: 97 [012/027], Loss: 0.631294\n",
      "Train Epoch: 97 [014/027], Loss: 0.652746\n",
      "Train Epoch: 97 [016/027], Loss: 0.636453\n",
      "Train Epoch: 97 [018/027], Loss: 0.628350\n",
      "Train Epoch: 97 [020/027], Loss: 0.640198\n",
      "Train Epoch: 97 [022/027], Loss: 0.632669\n",
      "Train Epoch: 97 [024/027], Loss: 0.629394\n",
      "Train Epoch: 97 [026/027], Loss: 0.640383\n",
      "Train Epoch: 98 [002/027], Loss: 0.635504\n",
      "Train Epoch: 98 [004/027], Loss: 0.647449\n",
      "Train Epoch: 98 [006/027], Loss: 0.632077\n",
      "Train Epoch: 98 [008/027], Loss: 0.633794\n",
      "Train Epoch: 98 [010/027], Loss: 0.649624\n",
      "Train Epoch: 98 [012/027], Loss: 0.644599\n",
      "Train Epoch: 98 [014/027], Loss: 0.644227\n",
      "Train Epoch: 98 [016/027], Loss: 0.633987\n",
      "Train Epoch: 98 [018/027], Loss: 0.640833\n",
      "Train Epoch: 98 [020/027], Loss: 0.645468\n",
      "Train Epoch: 98 [022/027], Loss: 0.628800\n",
      "Train Epoch: 98 [024/027], Loss: 0.639934\n",
      "Train Epoch: 98 [026/027], Loss: 0.638870\n",
      "Train Epoch: 99 [002/027], Loss: 0.636946\n",
      "Train Epoch: 99 [004/027], Loss: 0.629310\n",
      "Train Epoch: 99 [006/027], Loss: 0.646098\n",
      "Train Epoch: 99 [008/027], Loss: 0.643766\n",
      "Train Epoch: 99 [010/027], Loss: 0.647764\n",
      "Train Epoch: 99 [012/027], Loss: 0.644129\n",
      "Train Epoch: 99 [014/027], Loss: 0.623217\n",
      "Train Epoch: 99 [016/027], Loss: 0.639577\n",
      "Train Epoch: 99 [018/027], Loss: 0.645866\n",
      "Train Epoch: 99 [020/027], Loss: 0.630577\n",
      "Train Epoch: 99 [022/027], Loss: 0.642437\n",
      "Train Epoch: 99 [024/027], Loss: 0.617728\n",
      "Train Epoch: 99 [026/027], Loss: 0.654930\n",
      "Train Epoch: 100 [002/027], Loss: 0.638515\n",
      "Train Epoch: 100 [004/027], Loss: 0.637535\n",
      "Train Epoch: 100 [006/027], Loss: 0.641185\n",
      "Train Epoch: 100 [008/027], Loss: 0.639172\n",
      "Train Epoch: 100 [010/027], Loss: 0.650758\n",
      "Train Epoch: 100 [012/027], Loss: 0.626523\n",
      "Train Epoch: 100 [014/027], Loss: 0.634586\n",
      "Train Epoch: 100 [016/027], Loss: 0.634712\n",
      "Train Epoch: 100 [018/027], Loss: 0.638276\n",
      "Train Epoch: 100 [020/027], Loss: 0.642136\n",
      "Train Epoch: 100 [022/027], Loss: 0.635141\n",
      "Train Epoch: 100 [024/027], Loss: 0.626898\n",
      "Train Epoch: 100 [026/027], Loss: 0.640106\n"
     ]
    }
   ],
   "source": [
    "trainer.fit(dataloader)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([6755, 88])\n"
     ]
    }
   ],
   "source": [
    "X_test,y_test = torch.FloatTensor(X_test).to(trainer.device), torch.FloatTensor(y_test).to(trainer.device)\n",
    "print(X_test.shape)\n",
    "batch_size = 512\n",
    "dataset = TensorDataset(X_test,y_test)\n",
    "test_dataloader = DataLoader(dataset, \n",
    "                              batch_size = batch_size,\n",
    "                              shuffle = True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "AUC score:  0.51\n",
      "Accuracy score:  0.11\n"
     ]
    }
   ],
   "source": [
    "trainer.eval(test_dataloader)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 218,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/extdrive1/anaconda3/envs/amlenv/lib/python3.6/site-packages/torch/nn/functional.py:2016: UserWarning: Using a target size (torch.Size([512])) that is different to the input size (torch.Size([512, 1])) is deprecated. Please ensure they have the same size.\n",
      "  \"Please ensure they have the same size.\".format(target.size(), input.size()))\n",
      "/extdrive1/anaconda3/envs/amlenv/lib/python3.6/site-packages/torch/nn/functional.py:2016: UserWarning: Using a target size (torch.Size([401])) that is different to the input size (torch.Size([401, 1])) is deprecated. Please ensure they have the same size.\n",
      "  \"Please ensure they have the same size.\".format(target.size(), input.size()))\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "[<matplotlib.lines.Line2D at 0x7fc3bc0b1898>]"
      ]
     },
     "execution_count": 218,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAD8CAYAAACMwORRAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJztnXmYFNXV/7+nt9lXmGGHGXbZBBxRQBAMKkIibkk0idHEJW/UNzEaDe5bYlwSl+T1l4REE5O4xF1UAiKiiIpsyrDLAAMM2+z7TK/390fVra6qrupuhhlmujmf55mnu27drr53GL516txzzyEhBBiGYZjkwtHdA2AYhmE6HxZ3hmGYJITFnWEYJglhcWcYhklCWNwZhmGSEBZ3hmGYJITFnWEYJglhcWcYhklCWNwZhmGSEFd3fXHv3r1FUVFRd309wzBMQrJhw4ZqIURBrH7dJu5FRUVYv359d309wzBMQkJE++Lpx24ZhmGYJITFnWEYJglhcWcYhklCWNwZhmGSEBZ3hmGYJITFnWEYJglhcWcYhklCEk7c15XX4on3d8IXCHX3UBiGYXosCSfuG/bV4Q8fliEQYnFnGIaxI+HEndRXruvNMAxjT+KJu6rurO0MwzD2JJy4O1R1F2y6MwzD2JJw4i4JsbYzDMPYknDiTuyXYRiGiUniibv6KljdGYZhbEk8cZeGO2s7wzCMLYkn7uorazvDMIw9iSfuHC3DMAwTk4QTdwevpzIMw8Qk4cRdOt1DbLkzDMPYknDiLn3ubLozDMPYk3jizm4ZhmGYmCSeuEMuqHbzQBiGYXowiSfumuXO6s4wDGNHXOJORHOJaCcRlRHRQps+3yGibUS0lYhe7Nxh6r5HfWXLnWEYxh5XrA5E5ATwDIBzAVQAWEdEi4UQ23R9RgC4A8B0IUQdERV21YC1rJBd9QUMwzBJQDyW+xQAZUKIPUIIH4CXASww9bkOwDNCiDoAEEJUdu4wdaime4jTQjIMw9gSj7gPAHBAd1yhtukZCWAkEX1KRGuIaK7VhYjoeiJaT0Trq6qqOjRgit2FYRjmpCcecbfSU7PZ7AIwAsAsAFcA+BsR5UZ8SIhFQogSIURJQUHBsY5VGQxxtAzDMEws4hH3CgCDdMcDARyy6PO2EMIvhNgLYCcUse90OOUvwzBMbOIR93UARhBRMRF5AFwOYLGpz1sAZgMAEfWG4qbZ05kDlXDKX4ZhmNjEFHchRADATQCWAdgO4BUhxFYiepCILlS7LQNQQ0TbAKwEcJsQoqYrBsw7VBmGYWITMxQSAIQQSwAsMbXdq3svANyi/nQpXCCbYRgmNgm3Q1XCkZAMwzD2JJy4awWy2THDMAxjS+KJu/rKXhmGYRh7Ek/ceUGVYRgmJokn7pzyl2EYJiaJJ+6c8pdhGCYmCSfuDt7ExDAME5OEE3e5pMoFshmGYexJOHHn9AMMwzCxSTxx7+4BMAzDJACJJ+6c8pdhGCYmiSfu6itHyzAMw9iTeOLOPneGYZiYJK64d+8wGIZhejQJKO6c8pdhGCYWiSfu6iun/GUYhrEn8cSdU/4yDMPEJPHEXX1lrwzDMIw9iSfuvKDKMAwTk8QTd075yzAME5PEE3ctzp3VnWEYxo7EFffuHQbDMEyPJvHEnVP+MgzDxCTxxJ0jIRmGYWISl7gT0Vwi2klEZUS00OL81URURURfqT/Xdv5Q1e9SX1nbGYZh7HHF6kBETgDPADgXQAWAdUS0WAixzdT1P0KIm7pgjObxAOBoGYZhmGjEY7lPAVAmhNgjhPABeBnAgq4dlj1cIJthGCY28Yj7AAAHdMcVapuZS4molIheI6JBVhciouuJaD0Rra+qqurAcHmHKsMwTDzEI+5Wle3M0voOgCIhxAQAHwB43upCQohFQogSIURJQUHBsY1UDka6ZTr0aYZhmJODeMS9AoDeEh8I4JC+gxCiRgjhVQ//CuC0zhleJNItw6GQDMMw9sQj7usAjCCiYiLyALgcwGJ9ByLqpzu8EMD2zhuiEe0xgrWdYRjGlpjRMkKIABHdBGAZACeA54QQW4noQQDrhRCLAfyMiC4EEABQC+Dqrhpw2C3D6s4wDGNHTHEHACHEEgBLTG336t7fAeCOzh2aNbygyjAME5uE3aHK4s4wDGNP4ok7OFqGYRgmFokn7pzyl2EYJiYJK+5cIJthGMaexBN3Th3GMAwTk8QTd15QZRiGiUniinv3DoNhGKZHk3jizgWyGYZhYpJ44s4pfxmGYWKScOLuYJ87wzBMTBJO3MEFshmGYWKScOJOVtnlGYZhGAOJJ+7qKxvuDMMw9iSeuHPKX4ZhmJgknrirr2y5MwzD2JN44s7RMgzDMDFJPHHnlL8MwzAxSTxx55S/DMMwMUlgce/ecTAMw/RkElDcOVqGYRgmFokn7uorW+4MwzD2JJ64c8pfhmGYmCSeuHPKX4ZhmJgknrhzyl+GYZiYxCXuRDSXiHYSURkRLYzS7zIiEkRU0nlDNH+H8sqWO8MwjD0xxZ2InACeAXABgDEAriCiMRb9sgD8DMAXnT1Iw/dobhlWd4ZhGDvisdynACgTQuwRQvgAvAxggUW/hwA8BqC9E8cXAS+oMgzDxCYecR8A4IDuuEJt0yCiSQAGCSHejXYhIrqeiNYT0fqqqqpjHizAoZAMwzDxEI+4W5XH0KSViBwAngRwa6wLCSEWCSFKhBAlBQUF8Y9SPxhitwzDMEws4hH3CgCDdMcDARzSHWcBGAfgIyIqB3AmgMVdtaiqWe5dcXGGYZgkIR5xXwdgBBEVE5EHwOUAFsuTQogGIURvIUSREKIIwBoAFwoh1nfFgDlahmEYJjYxxV0IEQBwE4BlALYDeEUIsZWIHiSiC7t6gGbCuWUYhmEYO1zxdBJCLAGwxNR2r03fWcc/LHvMKX/3VrdgQG4aPK6E24/FMAzTZSScIuqjZWqavZj9u49w3+It3TomhmGYnkbiibsu5W+LNwgAWF1W3Z1DYhiG6XEknrirr0KEXTShULcNh2EYpkeSeOKu26HqcCgHIQ6dYRiGMZB44q5L+RsKKaIeDIXFvd0fRE2zt1vGxjAM01NIPHHXpfyVoq633G94YSNO+/UH3TE0hmGYHkPiirsAApq4h89/uKMSANDsDZzooTEMw/QYEk/cdSl/pcWud8uke5wAgMrGLk1OyTAM06NJPHHXWe5Wbpn8DA8AoLKJ/e4Mw5y8JJ64q68COnEPsbgzDMPoSTxxp3C0TNDC566JO7tlGIY5iUk8cVdfBYS2oBoUkT73+lb/iR4awzBMjyHxxF1V99W7qjVfu94tEwgq731B3rbKMMzJSwKKu6Lu6/fV4b3SwwCMC6rSmvcFWNwZhjl5SThx17NypxLTrve5+1WL3c+WO8MwJzEJLe77aloj2qRbhsWdYZiTmYQWdytkBI0/yMnEGIY5eUkacfcHQwgEQ/Cr+X99gRAO1rd186gYhmG6h6QR9xF3/Rfz/vCJ5pZ5b/NhTH/kQ3y+uwYAUNXkRZsvaPv53VXN+O2S7YbIG4ZhmEQlacQdAL4+2hzha//qQD0A4PTffIBL//SZ7Wd/+u8N+MuqPdhXG+nH9waCaOFEZAzDJBBJJe6AMYkYADR7/Vix/SgAYNvhRtvPyWhKbyDSup/39CcYe9+yzhskwzBMF+Pq7gF0NgGTuD+zcndcn/O4lPucletmd1XL8Q+MYRjmBJJ0lnusEEg794oU96b2E+9+OdrYjseX7WB/P8MwnUbSibs3xs7Uf36+z7I9pRvFfeHrpXhm5W6s31d3wr+bYZjkJC5xJ6K5RLSTiMqIaKHF+f8hos1E9BURrSaiMZ0/1PioipHq97lP90b45QEgxaUkHGtqt084JrqoELfMg8MpExiG6SxiijsROQE8A+ACAGMAXGEh3i8KIcYLISYCeAzAE50+0k7giimDUNXk1SJo9MTjlumqjVFOh/LdgRCLO8MwnUM8lvsUAGVCiD1CCB+AlwEs0HcQQujDUDKg1NLodtLcTsPxpEF5ABQfN2BcPHU5lIRk0Sz3Nr99nPzx4Fa/2+qJgmEYpiPEI+4DABzQHVeobQaI6EYi2g3Fcv+Z1YWI6HoiWk9E66uqqjoyXktkDnczKW7j9PrkpAJQimf/9r/bccq9S1HTrLhx5EJsYxTL3Wsj7qGQwPOflUfdJBUNl5MMY2AYhjle4hF3smiLMDGFEM8IIYYB+BWAu60uJIRYJIQoEUKUFBQUHNtIo2Ar7i7j9Ppmq+LeHsBfPt4DANhySHnokAux0dwydpb7ql1VuG/xVvz6vW3HNnAVl+qW6aonA4ZhTj7iEfcKAIN0xwMBHIrS/2UAFx3PoI6VdI91uL5cJJVIcd+v24W684hR3NstNjFJ2v1Gy7qyqR1vbKyAQ80xv/lgQ1zj3VPVjLoWn3bsVN0yrR20/BmGYczEI+7rAIwgomIi8gC4HMBifQciGqE7nA9gV+cNMTbxWO7/umYKctLdcDsJuyqbtPadR5oBhMXdzvUCRFrWsx//CLe8skmL0DlY14YWbwCLNx2KGllzzu8/xrlPrtKOpb/f7NYJhgQO1bdhS5w3DYZhGEnMHapCiAAR3QRgGQAngOeEEFuJ6EEA64UQiwHcRERzAPgB1AG4qisHbWZgXjp2HGmKaPfoxH3asN4AFCv/66OKoLschNoWRZilqJvj5PWLnO06cQ8EQ2hRxVg+CdS0+PDUB1/jr5/sRW6aGzNH2rueqpu9WLzpEHYdbYLDxnKf8eiHONTQjgG5afh04TnRfgUMwzAG4ko/IIRYAmCJqe1e3fufd/K4jonff/tUzH16FQ43tBvapbtD/z4zxaWlAh5emIlWXxC1Lb6wW8Zknetjz/WWe6vu/dMrwg8qzeoO2K2HGqOKOwD87KUvAQCXTFbWp83ifkidT0Nb9GLfje1+1Db7UNQ7I2o/hmFOHpJih2pOuhvXnFUc0S7rrerJSFFcOGluJ/rmpGJ3VTMmP7Qce6uV/DFmy12fSEzvsmm38Y/LWPhNFrH0gGLxm5E3kDafcmNYV16LWp1Pvs0fjOrmuexPn2HW7z6yPc8wzMlH0iQOsxLyYCiE5b+YaSjHJxdf8zM8SPc4Ud3sM3zGbLl7TZa7EAJbDzUiM8X6V7frqOIe+nBHJb4+2oSRfbKM17fYhSq/s9UXhC8Qwrf//DnGD8jRzUPAHxTwuKwCl6C5mRiGYSRJYbkDxtQAWamK8G452IgRfbIwZ0wf7ZwU5V6ZHqS5IwXaHBGjF/snln+N1zcexDf/uBrvbLIOGNqpirsvGMJ5T66KsNTNNw9A8dUDirhXNimuGHPkTTwx9FZPBdFYvava8ITAMEzykDTiHtKJ+9KbZwIActLcEf2kW0Za7mb0bpiKulbc9mqpdnygtg0rd1YCALYfsc4N3+4PGXbGfrG31nDeSqRr1KeHyqZ2nPXoSsvrxhMDb/VUYIcvEMIPnv0CVz23Nu7PMAyTOCSkuL95w7SI6BFptF4/cygG5Kbh9Z9OxVs3To/4bEFWCgAgL91a3PWW+y/+8xXWlivi/NNZwwCEfelev72Q/mh6ERZM7A8A2G4qEGJVDERaz9Hyxscj7seyQ1bG88snDYZhkouEFPdJg/MwIDfN0CYtd+l6P21IPootokfG9FN82a2+ANJUcXc7Ca/+z1Rcfvogg/huPRQW5rOG94aDgIo6JdLGF8UFMqwgE099dyI8Lgeqmo1ZKtt8kZ+TETZR89r4gnhtQwXeKz0MQHFDtfqMu2nb/UE0tfvRGOU6Enlzcli78RmGSXASUtytkD53p8XCqp5hBYrg17b4kKEurqa5nTi9KB+FWSlo94fwyroDaGr3G0ITpw7thQzdIqrZcr9z3mjtfd+cVBARCjJTUHqgweBnj7YDNlrWyTZ/AL98dRNufHEjWrwB/Oa97Rhz7zItCRqgiPupD7yPCfe/H/V3IPsCAFlml2AYJtFJGnGXhrQzhik6eUgeFkzsj/svHKtZ7nKRNUX1ld/+einKq5UIm5+dMxwf3DITDgchVedLN4v0D6cWae/7qgnKstPc+HxPDX756ibtXDyuk/nj+0W06S3+hW9sxt9W7wUAbK4IL7y2+YOIN7GkfEKJcS9kGCZBSRpxnzVK2TB0zujCqP3cTgeevnwSxvbP0Xzu/VQXjz5dwQ51wXTehH4YXqiEM+oXSs0JxlLdTrjV7I4yh02lalW/v/Wo1s8qWsbMdTOHomRInqFN73P/cn+4YpO+6Lf5xtHY7scLX+yzLN8n1xZY2xkmOUlocR+cn669P3VQLsofmY9Jg/OifMKITC3QXxV3tzP867jtNSVKZkh+2G+fqkshXNcaGUL4wS1n45FLxmvuG+n79gVD+L8PlV2s8SyMprmdhrEAMPjXh/QKz3unLu2C/tqhkMBfV+3BXW9uwTul4bDNsspmHGloD7tliHCkoR3Fd7yH0gplsfhgfRvOePgDlFd3bmHweG5sDMN0Dgkt7ktvnoEv7zm3w5+Xi6Kj+mQCiFzQdDtJc90AMLhl6lsjFy2H9MrA5VMGa8f/+NEU7f3v3v8aQPQom/D3OLQc7xJ9+cC6Fj/OKM7H5MG5qGkJt2/U1WBtag9o4/1kV7XWPueJj3Hmb1dosfUEYOXOSggBvLBmPwDgnU2HcLTRixfX7rcc3+aKBpT8ermWCz8eXttQgdH3LNV2AjMM07UktLine1zIy/B0+POXnTYQd807BdfPVMIczYKdaqrkZD6OxfThvXHhqUpIZLa6scpsucvQzCzdYm2a2xmxdqBfOK1q9qK4dwZ6ZaagvtWv9f3Dh2Van9pWn/ZksvVQI46Y8u785F8bACg+d9lPJjCT7im7DJl//ng3qpt9WF1WbXneiqVbjgAAvubQS4Y5ISS0uB8vKS4nrps5VMseKV06M0YoGSTNZfrM4h5r8RYA7vmmUm62sT2A//dRWYS491JvTjJ5GKAs7Jqjfo42hq3kqiYvememIDfNjT3VLZbl+WpbfNqTyPbDjTjztyssx0dEWhip06G4eWSUkDnPjkQ+VXSkLGAX1RhnGMbESS3uZuZP6IeN95yLqcN6AYgU71RTZadXfjI15jULslIwdahyvceW7kSjLsMjEfD05ZNw17xTcOqgXK09luUOAL0zPcjL8BiyVurZdKAetS3GJxGrhdWGNj+qVZePkwjnP7UKjy/bCSCKuGsFva2VesO+Onz/b2sMYwtPJ/Iz5dUt+GDb0Yh2hmE6Dou7ifwMD9Jt3C9pph2tVukNrNDnlden781P92BU3yxcN3OoYbes20kxxT0r1Y3cdPvvf/DdbXh9Y4WhrcVnXUJQunPMydesdtMC4eIiATUuv6KuFfe9vUXLbfOr10vxaVkNyirDCc2ihVzOfXoVrv3nevsODMMcMyzuFsjMkWYXQqqpbF9uuhtv3jANq381O+r19CGW9TpxP0t1/wBAmq5UIFGkuB+qN4p7RorTMjPlDWqaBCvkTlg7zAW67RZ/pVtGRvDc9mopnv98HzaoC7ryRlVvEVFk5ZaRYZkcTcMwnQeLuwVmC12iD4UEFMt90uA8DMxLt+wvSdE9CdQ2+zCkVzr+95zh+O0l47X23pnGhWGXSdzN6Q4yUlyWxbz7qDH2VtQ0R88AaV4PsHPLyBuP/H7pe5deGinu1zy/Xou9j2cnrAwv3XKwQVuAXbOnxjY3vhVFC9/Dr14rjd2xk6lt8UXcHBmmO2Fxt0CKkzD5h80uZnMsuh0eXb8Dda0o6pWBW88bZSjsPbww0/AZR4zF2nSPC9PUtQE9MvrGCrNrx8wbGw8aju38+dKi154E1KHK35dciG7zB7FqV5Xhs1ZueumykTefb/5xNf7n30o0z+WL1mDBM59GHbeZ/6w/cEz9j5dAMITJDy3HXW9uPqHfyzDRYHG3QFruZhdCtMRe0UjRWfwVdW2WvvoUk8vHbLmbyUxxYdLgPNz3rTGGdpnL3gp9xE08yBQLZZXN2m5bIGzhy9+HHKr0weufMuQCshRwK+tW5vipa/XhDdM6wbEQrVqVZMvBBtz79paIvs3eAO57ewtaYriurKhVnzjeVZO6MUxPIGkqMXUmeotaT6OFGyQePM5Id44VP5pehAY11t4qzHJIr3StqpR8ujCHZ2aY/PAZHqdWyFsWAokX6QOf88THAICHFozFxZMHaqGS0i3jIGOBb/2isaxrK8Xd6mkgzeNEszeA2hYfbnllU8T5eImWqVNy1XNrUdPiw81zRiI/w4MtBxuwYnslgqEQnv98H/rnpuEnZ9uvW1ghnzjMobMM052wuFsQdssYkVbowxePx6mDchAvKa74xP2+b43V3t90zgi8tNboXhhRmKmJu1xMNa8DmBdZhxdmYpOaXOxYLfejjV5DpaZ73t6KpVuP4NOyGgDAkcZ2VDV5NeFu8yti39Dmx0UT+2N1WTUONygpkqXPXS/AtS0+uJ2EDI8TVQDqWnzwOB1an3gs8e2HG/HTf2/Aoh+WGNYb9te0YnCvyLUQ6e6SkUDf/ONqw3lXnK42PZq426zVxEOLN4Cx9y3DQxeNw5VnDunwdRhGwm4ZC+wssFvOG4n+Oam4cGJ/jO0fv7ib/eenF+fH/MyA3DQ8/+MphjaZbRIA0tWKUmZ3jrkAyTCdL/8lXTqBV34yNSInvpmGNj8mP7Tc0CaFHQC+3F+P03/zAdbsUQqaaJZ7qx85aW70z03T8t9L9Jb75IeWY8ZjK7Wnj9pWvyFsNFoKZMlnu2tQXtOKha+XGkI3Zz5uXdHKrf5btHitI3M6Yn3LFBDHY7lXqnsNnv1kT4evwTB6WNwtkAJjNhynDeuNz+74hm1xbDvkdU4dmIOHLhqHs0cWxPW5mSN647FLJ2C2mvGyX05YjKWom58KslKMTwXFvSILluz89VxMKc7Hry8aF/ccoiGjZVq9SgHxJm8A2WlujO2fgw376hT3jo3Pvb7Vr7VVN3vR7A1o8fvxrHFIH/nB+ra48vY4TWGcZsy/T8muo0229WZlkXWryl7xEgypxVO4egrTSbC4W5CX7kGf7BQ8uGBs7M5xIKNI5o7rd0yP3ESE75w+SHNT9LUIcxxWoFjmd1wwGh/fNgs5uo1NG+85F98uGRTxGXljmD26EOvvnhP/RGLQ6gui1ReEEIp7aP74fmj1BbFs6xGtj5XPXZYX3FejvMqbmFXmTTNyHaGyyWvw9Uv2VrfggXe2artz5e5aO8vdLqXCuU+uwjf/8InlOZlALV6XTkVdK4oWvod3ddk65XhiLaQzTLzE9ddIRHOJaCcRlRHRQovztxDRNiIqJaIVRJTQTkOPy4Ev7pyDeRZFMzqCtNw7WhhDJjTrlxMp7kW9M7D+7jm4fuZQDDFZ6fkZHvTNScWu31yAPtnWIZK9M+1DJ/Xc960xuPas4qh9Wv0BzZLOSFFCNYcXZuLZ1Xs1/7m00q1EVBZI6a/O05w+wQqZLVMI4BWLEMgbXtiIv39ajt1Vym5Zp+aWsbbcrXblyrEfarBekJapneOtYbtNLd/41pfh0FM5HqeD7S2mc4j5l0RETgDPALgAwBgAVxDRGFO3LwGUCCEmAHgNwGOdPdBERopDR40yKe59c1Lx/74/GbfPHWU43zszJSJ1gB6304E3bogsFi558dozdNdSNlP1zU7F3fNP0drTPU70Um8EHqcDP54eKfRtvqAW+56Z4oLDQThreG/srW7R/OeLPtmDO94otdwte7C+TZsnEJ/lXtXkRZ76tPLPz/dFnJfuDnkvkZaxXSoGq41bdpu5tPOqO6iirhV3vbk55k5beWPTR0TJiKYOrOcyjCXx/ClNAVAmhNgjhPABeBnAAn0HIcRKIUSrergGwMDOHWZiI4Wlo/VKz1AXYPtkp2Le+H64YdbwqP3f/8VM/P3q0w1t0qUzbkB2RP9pw8NpEKQbJyiEYTE21e3UMlgOzEvD4HzFdaJff2j1BTX3ggzJzEp1odkb0Nwx7f4QXlp7wDYVQn6GBxPVJGp1Oh/3BU9/grV7a7XjirpWfFZWjapmL8YNsF/clr9z+cQgBbXVxsq2EvJYBcflZxrbA3jhi/14Z9OhqP0DVuLOljvTycTzlzQAgP55t0Jts+MaAP89nkElG8frlnn4kvH46JezImLY7RjZJwuzTeUGnQ7C4pum418/PsPmUwr56YqAB0PCINzpHhd6qVa9LxjSUiroI25afQFNtDPUaJ6sVBeEMObUAcKLpZMG5xrafzStCL3VXba1Ost9++FG3Pv2Fu34rEdX4nt/+wINrX4U945cNJbjkr9zKebSL27rlrGwuptj7G8wW+rtphtEmy9oWA9o06z08H8/+Xtzssud6STiUQurPzfLVSci+gGAEgBn25y/HsD1ADB48GCrLkmJzJcezXUSjVS3E0UWAnasTBiYG7NPoeqbb/UFTOLu1KJBfIGQdsMakJeGnWoBjo376lHVpKQLlrtOs1IVl0mdKdJEboDqk2VcRyguyNAiVsyfkQvL+s1Yzd4AslONEULjB+TgaGM7PtlVpX1GumFcJsudKHzzTXE54LXYCGWVw0eP2dr3m47Pe+pjHKhtQ/kj8wGEnwT0Qt6qjY8td6ZziOcvqQKAPuRiIICI504imgPgLgAXCiEsd8sIIRYJIUqEECUFBfGFAyYD180cismDc3HxpGgPPD2DInVRtt0fMoh7mseJXhmK8PuDIe2GVaBbkD3S2I515UpmSL1bBgD217ZCzyHVv25e6B2cn65F89SZKmPJ9AZHG8J/XiEBZJpSLmSnuVDZ5MWVz67FHjUSR1rL0iUiLXf97TbF5bAMp7RyIb22oQJFC99DbYsvYhE2EDJe40CtMda/0bSzV/kO9RoJbLl/ub/OEJnEdC/xiPs6ACOIqJiIPAAuB7BY34GIJgH4CxRhr+z8YSY2A3LT8MYN05F/HCUBTxTFBeEnBLPlnq+6ZUb0ycKZagGSBZP64+8/Oh0f3DLTsNCbmWK03M38/OWvAACFpvDOoQWZmuUubwCS/bWtOFjfhiavUfTN+XTMsf4AcKC2FcPvXKJlmJSCrX+a8ric2FRRj1ZfAG2+IJ5dvRfBkIjIoQMAz39Wrl03wnJQ8x9BAAAdhUlEQVSPsflKXk/m6Pn3mn346ypl81KsxVs9Ww42oN0fjJkQ7kTxfx+W4e+fluPdzZxjpycQ0y0jhAgQ0U0AlgFwAnhOCLGViB4EsF4IsRjA4wAyAbyq/mfZL4S4sAvHzXQyfbJTcLTRa3Bx6H386W4XMlNc+Pc1Z2Bs/2zkZXg0N4NkeGEWHluqumV0PvdoyMXQa88qxt1qSUKZUsFs7QPA7Mc/wh+/N8nQlpniwl3zTsFvlmy3/c6PdlYZKkfJHPIOAqTdXd/qQ3WzFzMeXYnLThuIv6zag8KsFM2nrt89K/cuCN21JN5ACKGQgDcQMqQkaPcHkep2orFNubG0+IK49vl1+GB72B6yq1trZtnWI1odXADY8/C8bt8AJUNxN5TXarWDme4jrhU6IcQSAEtMbffq3nfeThimW3j/F2drrorvlgzC4F7p8LgccDkIgZDQ8uHoC4xEQyZfy44h7mePLMC/rzkDpxfnaW3SLWNOXQAofndzXvqsVBcWTBygE/dIy93sWpGCrUTTKEItxb+mxYcjqjXsDYQ0n7s+xbP0vLR6AxFumeb2AJ5esQtPr9iFbQ+er7U3tPmR6nZqlnuLN4BVaoET87gkP/7HOnynZBDmjusLIQTufHMLvnv6IHy00/iA3NjuR2569z4ZyoikeBK4fbSzEi6HI+6/J+bY4cRhDAAlmZkU8Ecvm6C1f3XfeThc32bY+RqNwqwUVDZ5tTA/O7eMHvN/8BR3dG+hvnyf1XdYWe7muHatMImNsStF1kHh3DEpLgfuenMz1u6t1aJumrwBeP0h5Gd4tPQEzV4/Vqri+8WecPhmQ5sffbJTtQXVDSZhV743nDTtuU/L8eGOSny4oxLlj8xHkzeAl9bux7ulh3BKX2NIa22LL6q4v1d6GMMLMzGqb5ah/UhDO/zBEAblRy84Ew8y3DUe19LVf18HABFPf0znwUvzTFQyU1wY0ScrdkeV9342A2/eME07Nkey6Dl3TB/Ldn0yNLdFbODBeqO7Rvr337xhGq49q9gyx4tcWJWELXdrpOukqT2AXUeVm0kwJPDCF/uxq7JZ25jW3B6ANxDSNlLJzxSpGSk/3xNOtCbDIaNF38gc+qUVDXjo3W2Gc3JBORQS2g1HIm8s+2qsi43f+OJGnP/Uqoj2M3+7AjMes06ydqzIJ5hjWTeobj62TKVM/LC4M51KQVYKJg0Ou1jSPE58cMtMy77/Z/KdS/TibFXCcLOawlgixX3S4Dzc/c0xBt+4HdJyt4tOlULc0ObHLvVJQS9actdws+qWydNZzU3tAe3zR3QpC2Su/mjiLqN6rDZZyRuSVTCKFPc5T3wctdi4lfBLapq9uOetLR2uZSt/P3YVvKz4eGdV7E4qDa3+DhfMORlhcWe6nOGFWSi9/zxcMSUcUds/JzUiXbHE7XRoETMFFrlvzDlezG6YeMRduj/sdg1vO6zkf6lp9moJzfTiLn3yzapbJk8XCdXiC6BKtUj1kSz/+Kwce6qa0djut31q8QZCqG3xRYRT6r8/JETEIq4U91iROtf+cz1CIWEp4H/8sAz/WrMvotyimSsWrcGMxz60HV884i7LQb71VfTv0nPqg+9j0oPLY3dkALC4MyeI7FS3ocJVLAGWPvs+umRpdsEg5uInDpM5PrKPsT4toHPL6Lpa+eoP1rcjJJScO1bJzhrb/PAFQ9rOXkCxvivVwigHdeGcq8uq8e0/f46m9gBG9smMqNA1tr/iR39twwHLcoTS7SFEZDHzWlMenoDu8+Zx//LVTRh9z9KIYihyrSNWTp/P99Rosfsrd1SiaOF72F/Tqo0vHnGXUUHlNS0xehoJcAx93LC4MycMvaDPnxA946bUHZnDBgDW3PkN7b1+z4B5569ZAGaOiNwwJ8Vd33f1r87BI5eM145HFGbiSKMiYnaLldJi1lvu1c1ezYo9aIrVr2nxIRgSyE51R5RSHNUnC32zU1FW2WyZklhusAoJEZGPvtYUQaR/yjCL7RtqNkrzDULeJK1SJ1vhD4a0TJybDzZo47PKrGlGPnn4AyzWXQWLO3PCkKGE3z9jMG49d1TUvjKOfLAuikPukAWAL1Shv3paUcRnAyar95R+xsiSrBQX2vxKYREpfPPG90VOmtvg41dqrCrumVyb0og1Utx1C6pS8FNcjoiCL9oYLMQdBOSmu1Hb4rfMfaO/IUW4ZVrjF3fJ4q/CG80DwZDmJmtojU/cjza2a24gl5PCbpkYoZDBkND6WD2hMJ0DiztzwpB+9KxUd8wNN1IU9XVR9WLodjqw46G5uO9b5uzT4agSiTn8ryArBfWtfq1k4E2zh+OPV0wGYKxJe9lp4eSmesv91nNHYs0d38DEQbk4oG600qdAkA8D/aOUMcxOc0W4mQiE/AwP6lt9likPokWh1Lb48PQHu3R9w9azN2htSS98Y7P2vs0f1G4e8VruB+vaNHH2OB1xu2X0Y4snJp7pGCzuzAlDhjUei7WWbWMxA0pCNatkbGa3jLlQtsw6+YNnvwCg7MSVN45UXR1Uffy93jLPVYugDOmVrkXSpFosDltVzpL4AqGIZHAOUqqA1XZQ3J/84GvtWG/Zmzd9WdHmC2p+8Pq22P0BZVFZ/lsaLPcY4i7Hlup2HJfl/sq6A/hkVxW2HGzAf9btj/2BkwzexMScML57+mCsK6/DT84eGrOvlGfzDtfrZw7FoLzohb1lzvpTB+Vi++HGiFh7cwSO3lLVpwvQb44arXPtyDEN0bmMrG5C+qRoY/tnY6tagQlQXEV/u6oEU36zQmsjAvIy3KhvtXbLRPNll5rCQ72BIO5fvBXeQBAvrY2sUGVmysMrtIyZbXHUogWUG4J8SgqGhOZz14v7df9cj0snD8DcceE1Funrz0p129aljUWzN4DbXy81tH339JMn02w8sLgzJ4ycNDf++sOS+Dqr6m7efXrnvFMsOhuZMaIAa+74hlbRyYzMe3PNWcUYnJ+OueP6aufSdJZ7hk7oZ4zojexUFxrbA9rC42BdWcO8dDfeuGEa3t10GM99uhdAONwPAJ753mTc+OJGbD3UiIcvHq+tA8yf0A/vlSqJtqYP742yymbUt/oiYuE/2VWF/24+YmjT74o14/WH8A81uVm8yCeedjXGfn9NK55YvhOPXDpBe6LRR960+YOaWyUQFBGbmLyBIJZvO4rl244adqK2a+LuQlWTF8GQiFx/iIFMv5Dmdmo3CyGE4UnOFwjB6aBjvnaywG4ZpkcS9s93zP6wE3YgbFkWZKXgqmlFBr++3i2jF4o0txMrbp2F33/7VExXK1fpnyBy092YPDjPEHapF/ecNLe2oJxrkcph4QWjsWDiAOSlexASkbH8Vz67Fu+Zsi1a7QGQHMsuUTPtgSACwRB++eomvPXVIXyk22ikj7Bp8we1eHx/MBThlrHbrNWus9zlZ/Ws+roK3/j9R4YnFXPYZnm1EkI5Qvf7Ns955N3/xY0vbDS0fbKrSqtheyL4Yk+NbdH1robFnemR/OcnU/G/5wxHmtuJj2+bhXduOuu4rqcPnZRRIVYFx/WWu550jxMFWSm49LSBmkjrF0zlgqverVOoK0SSmerSBMoqJYN04cgxlVbUx5yT/uZhpral49v623xB3P5aKdaWK3lxKupaUbTwPbxbesgQgvna+gotmsinE3dZ8CSWuEv3llnc71u8FburWgx58PWbs4IhgaON4cLoEqtdvUu3Gp92rnx2Leb94RM8s7IM9XHU6D0eVu+qxncXrcHfPtnTpd9jB4s70yMZ0z8bt543CkSEIb0yMH6gfZ3UePhs4Tna+4UXjMYNs4Zh3vjIWHv5xHDakDxDu34DlqQw22iZA8abg1583U4HgqoS6SNr5LOB3Hg1dVgvOCic3iAa8vqXnTYQZw7NN5yLtct0qpqP34p2f1CLhQeATao//88f7zaUHNxTHd6A5PWHEAwJECmWe0iXBx8IW94Vda14bnU5gPBNzryr1mMqhbhi+1FDGgdfIKTt/NU/SejXKYKG9M6Rov/4sp34/ftfR7QfL7e+sglFC98DAOyqVCqUmesSnChY3JmTAr27JS/Dg9vnjjak8JU4HIQlP5uBf/zIWGA81SJTpTHBmXJefxMwW9bSQHVZ+IClCyg33YPRpoyPo/tGJm7LTHFhpJrQLd3jxLj+xpvf+1FyyADQ6uFaYY6hlyIdCgHn/P5jy880qcKapeb5mfn4Smw/HHZ/FN+xBHurWwzupSwby13ulK1tUaKGrnl+PWY+Hk5u1u4PorJJsdzbdNb6/Yu34s0vKwDA8IQx+p6lWL2rOsK1Y+WLF0LYFm8XQsR0sby+sUJ7L5PPRYv46kpY3JmThhW3no2/m0TbijH9syMWcuOtf6t3y5gLml9zVjEAYJBuo5SUCr3wmCt2mcW9/JH52PLA+Rik7t71OB1aCuJ4iSY45thzuWgbstuRBWj5d+RaR0VdG371+mZDnxXbj2r7AvRjMIdOSsu9psVnmSjspXX7sUut26u3ylfsqMQv/rMJgFH0AeCTsiq0mNqs1j6e/6wc4+5bFrGzGABueulLDLtzCRra/Fijy/ZphRBCi8LKTnVj5c5KbZ3gRMHizpw0DCvIxOxRhZ16zaG9wwW9AWNpwlRT/pzLThuI8kfmx8yNb86Vk2bhEgLCNw+Py4HxakWrX180Tjsf7X5kFZdvx0G1aEq0+PXPdytiVxylkHu7P2jYg1CYFa7Jq0emqaht8VqmYXhs6U5NqM0pFAAlJbLZ/y6EkgdIj9Vn31UjlyosqoDJqKYzH16Byxet0W5oVviDYXH3uBz40d/XYdbvlEXif63ZhzLVZdOVsLgzTBSW3jwDf7nytCjnZ6L0/vO0Y727I8VmcVaPlf6arWq7SD55I3E7HZg/oR+W3TwT35oQLm/XO0o0jZWbyQ6ZYqFKdYVkpkTebHZVNqNXhkdzdTx66fiIPr8z+bhl/Vx/UODfa/Zp6YjlA0JNi88y3l8ysk+mpT+9vKYlQtxDIaEVSZE0Wyz4yqeTaDuo2+LYyesLhrTv098Ua1t8uOetLVi7N7JQS2fD4s4wURjdNxvnj+1re97jchh87/q87ikuB+6ad4ql0EVDxtcPL8zEnFP6aBb4zXNGGHLjZ3jCljugpFnQV7GKLu5O28ggO6Rf/cnvTtTabjs/nCNoSK90XD2tGNmpLpw7xv53JpHfX9Xkxd1vbdHy0MuqWTXN0cW9pCjfMrf9gbq2iMRq/1l3AHOf+sTQpvetV9S1YnNFg+X1QiER4a8HwmsT9a0+/GvNPkMfrz+oPSnos2zWtShteXFWNjseWNwZphMx57+5bubQqDsnx6oLoQN0YZXSf37xpAH421UlWs75vHQPhheG/e/SgtanDta7iHpHWTRNdTssF1Vj1bw1X/ebuuyefXNSMaU4H6X3n4/8DI9tznqJTEexfJsSrihvatKirmry2i5uOkhxs1mx7VAjvj5qLMXYpLvOzXNGYEy/bIPl/uTyXfjpCxs0gf72nz/HloNKlNDwu5bgmucjC6DIp4Z3Sg/jnre2GPYm6END9dWmZAWtE1HvlsWdYbqRn8wcisU3TUdJUTiUUWp1SDUjL5k8AAAwc6QxdXFehhselwO9s6zTH8ey3K3OR0t2ZnXd7FS3Fv3TN9v42T99fzJevPYM2+vIm5IURemmkYJe2eSNqH0rWXrzTMtyigDw6NIduPPNzZbnAGDBxAHISnUZBL+2xYvDDe2GNYF3Sg9BCIGQAD7cURlxnTa/stmrTnVb6V1EXn9I2+Clz+1z5bNrASj/dl0Npx9gmG7E4SBMGJhraHOqAi3j4icNzrMsJJ2V6saHt55tm6AsmuU+ojBLC9W85qxiPLtaSZnQLycVO440wekgy7A/j8th2PyVmerSBLFvjvFm4XI6kB9lDG5XOOQRCK8/SHGvamq3XFAFlDDKY3UrSbJTXchKdeFQfdjSbvYGEAwJg5Xt9Yei7jc40tCO4Xf9V7u56X3r3kBIy7VjVSc2jy13hkk84inzF4101d1iFQ9vZmBeum0YpN7CvmHWMMO5SYNztfNDeqVjhpoBU363ORxTMrJPpuH79HsF+ljcZKyicm6fOwr//PEU7bNS3Nv8QfiDIbT6giACqpt9touWmSmuY6rVqic33YPMFJfB5SN308qdr4DiWjnUYL8BaccRJY5f3tx8phz6sth5tWq5z9BlGbUKw+xsWNwZppNZ/avZeP8X1kXB4+GqqUW4fuZQ/FiNiz9WpLtDH5Z4+9zRKH9kPh65ZDyumDIIqW4nbpw9DJMH52L++H5adInMVzPcxp89qo+ywep/zxluWCfQf1aPPu7/vDF9sOm+83DDrOGYObJA87lLy7bVF9QicmRop124YYbHZVlyz6qkohmng5Cb7kF1s1dzfVmlSthysMGQAiHF5dBKISrjNqYv0O8P8AaC2oKr9LM/tCAcpmpXP7gziUvciWguEe0kojIiWmhxfiYRbSSiABFd1vnDZJjEoTArVds92hHSPE7cOe8Uy5QH8SAjZoYWRNZpvXzKYPz2kgkAFKv/jRumo1dmihaVcsnkAXjgwrG478LIIigAcP5YZZH01vNG4VNdSgfAWGpQorfcF/2wxBDDL8cmhbWx3Y9fvqpsQhqnivueqhZDdk6Jw0H4dslAlJjSRAwvjC3ugLJRrdUX1FIoWG2WKq1owP2Ltxqurb+hyfh/SaRbRrlhSpGP5qLqCmKKOxE5ATwD4AIAYwBcQUTmf/n9AK4G8GJnD5BhmGND+qJT3Q6su3sOVt02O+ZnpOWemeLCVdOKIjZSAcBjl03AeVHCQq38yClR4unN6R+EAD5TN0PJdAq7q5qRmerCVVOHYJTphul2OnDLuSMNbXNOUW4+Dy4Yi4cvtg9BPVVd5yitqI+acuBIY9gvf0q/bEO20QOmjU6GClOBENp1Yu8gINPjwt9/dLqhTm9XEo/lPgVAmRBijxDCB+BlAAv0HYQQ5UKIUgBcM4thuhnpX89L9yAnzR1RicqKX5w7AkA4WsZs8QPAd0oGRb2GlR85Jcr6gzvKOVlwpa7Vjz7ZqXhgwTgs+8VM3DBrmCHEcuqwXvjdt081HK+/ew5+OLUIV0wJj/dP359suP6wggwQKamDW3xBy/h2MxdNHIDhhZlaCGqT6YawfFs4ombHkSaDJS9LS84eVYjLp5yYoiLxPPcNAKAv5VIBwD6+iWGYbuXq6cW4evqx+esvnjQQF08K14w17661suTNpFpEr0TLySN97laM6psFIsWa10fn3D53dMT1LzttIF5ZfwBr99Yize3UYsiJCCVD8jC0IANzTDH3LqcDOWlu7Ktt1UIZ7ZhzSh88fPE4FGan4oyh+Zg7ti/mPPExGk1++pfWhkv9Pbp0h+FcdtqJD0yM5xut/gU6lH2eiK4HcD0ADB7MJbEYpqdittxvnzvKpmfHydCtKeSlu1Gnhh1eOnkgUlxO9MpIQXWzF/1yYsfeL7ryNKwvr4vYHPTaT6dp788eWWAoep6X7sHbXx3C218dsrzmqQNzsKmiAUN6pWsx+G6nA4XZqUh1OyPEPRrx3Bw7m3jEvQKA/nlsIADr30YMhBCLACwCgJKSku4pT8IwTEz0VvVnC8+Ja3PTsZLuccLtJPiDAgVZKahr9SM33Y3ff0dxs2SkOFHdbF1UxUxuuifCOjfz/I+nGI5jVfkyR+7oSbPZQGWHVYGWriYen/s6ACOIqJiIPAAuB7C4a4fFMEx3onenxBL2Mf2yMaZftu35rFQX5losxOq/Y9aoQvTO9GDRleEau2cUK7t2B+XHXjPoCOYiIfNNxVvkztkpxcZCKID1mkQ0ukPcY1ruQogAEd0EYBkAJ4DnhBBbiehBAOuFEIuJ6HQAbwLIA/AtInpACDG2S0fOMEyXUxillJ9kyc9nRD2/+f7zbc/JWPXThuRFFD9/5JIJ+Nap/XFGsX3VqONBH90CAE9dPhEPXTQOi786iLwMDzI8LizbesTy5mZV0i8aPdUtAyHEEgBLTG336t6vg+KuYRgmSXj3f8+KWmi8M5CJFK12tzochBkjCiLaOwuvruIUkeJPz8/wGBaj7Vw9V08rwm+WbEe/nFQcNhUzt6KnLqgyDHMSMs7C19xVxPOE0Nn0zkrBwfo2LL15hqGYeTxcN3Mo5o7riz9/vBsvfLHfcC471RWx2Noj3TIMwzBdxeD8dOyvbY2awbKr+PMPJmPV11URNWvjZVB+umVmygvG9UOLL6BVdQLiy7bZ2XBuGYZhuo2Xrz8Tf7nytONOttYR+uWkRc21Hw9Th/XC/AnGhVh/MKQtBktGWRQ572pY3BmG6Tb656ZFrXTV0zlndB888z3j7ld/SOAHZw7Bcl3yuHhz3nQmLO4MwzCdSCAYAhFhRJ8sPHDhWJw1vLfl7t2uhn3uDMMwnYg+fv6qaUW4alpRt4yDLXeGYZjj5MbZwzB7lBK2KcvrdTcs7gzDMMfJbeePxnUzhgIAXI6eIavslmEYhukEzhzaCzfOHtZtbhgzLO4MwzCdgMNBuO380bE7niB6xvMDwzAM06mwuDMMwyQhLO4MwzBJCIs7wzBMEsLizjAMk4SwuDMMwyQhLO4MwzBJCIs7wzBMEkJCiNi9uuKLiaoA7Ovgx3sDqO7E4fREkn2OyT4/IPnnmOzzA3rmHIcIIWLWH+w2cT8eiGi9EKIkds/EJdnnmOzzA5J/jsk+PyCx58huGYZhmCSExZ1hGCYJSVRxX9TdAzgBJPsck31+QPLPMdnnByTwHBPS584wDMNEJ1Etd4ZhGCYKCSfuRDSXiHYSURkRLezu8XQEInqOiCqJaIuuLZ+IlhPRLvU1T20nIvqDOt9SIppsf+WeAxENIqKVRLSdiLYS0c/V9qSYJxGlEtFaItqkzu8Btb2YiL5Q5/cfIvKo7SnqcZl6vqg7xx8vROQkoi+J6F31ONnmV05Em4noKyJar7Ylxd9oQok7ETkBPAPgAgBjAFxBRGO6d1Qd4h8A5praFgJYIYQYAWCFegwocx2h/lwP4E8naIzHSwDArUKIUwCcCeBG9d8qWebpBXCOEOJUABMBzCWiMwE8CuBJdX51AK5R+18DoE4IMRzAk2q/RODnALbrjpNtfgAwWwgxURfymBx/o0KIhPkBMBXAMt3xHQDu6O5xdXAuRQC26I53Auinvu8HYKf6/i8ArrDql0g/AN4GcG4yzhNAOoCNAM6AsuHFpbZrf68AlgGYqr53qf2ou8ceY14DoYjbOQDeBUDJND91rOUAepvakuJvNKEsdwADABzQHVeobclAHyHEYQBQXwvV9oSfs/qIPgnAF0iieaoui68AVAJYDmA3gHohREDtop+DNj/1fAOAXid2xMfMUwBuBxBSj3shueYHAALA+0S0gYiuV9uS4m800WqokkVbsof7JPSciSgTwOsAbhZCNBJZTUfpatHWo+cphAgCmEhEuQDeBHCKVTf1NaHmR0TfBFAphNhARLNks0XXhJyfjulCiENEVAhgORHtiNI3oeaYaJZ7BYBBuuOBAA5101g6m6NE1A8A1NdKtT1h50xEbijC/oIQ4g21OenmKYSoB/ARlLWFXCKSRpN+Dtr81PM5AGpP7EiPiekALiSicgAvQ3HNPIXkmR8AQAhxSH2thHKDnoIk+RtNNHFfB2CEumLvAXA5gMXdPKbOYjGAq9T3V0HxUcv2H6or9WcCaJCPjD0ZUkz0ZwFsF0I8oTuVFPMkogLVYgcRpQGYA2XhcSWAy9Ru5vnJeV8G4EOhOm57IkKIO4QQA4UQRVD+n30ohPg+kmR+AEBEGUSUJd8DOA/AFiTJ32i3O/07sAAyD8DXUPybd3X3eDo4h5cAHAbgh2INXAPFP7kCwC71NV/tS1AihHYD2AygpLvHH+ccz4LyyFoK4Cv1Z16yzBPABABfqvPbAuBetX0ogLUAygC8CiBFbU9Vj8vU80O7ew7HMNdZAN5Ntvmpc9mk/myVepIsf6O8Q5VhGCYJSTS3DMMwDBMHLO4MwzBJCIs7wzBMEsLizjAMk4SwuDMMwyQhLO4MwzBJCIs7wzBMEsLizjAMk4T8f4EKZWp5UBiaAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "%matplotlib inline\n",
    "\n",
    "e_losses = []\n",
    "num_epochs = 20\n",
    "for e in range(num_epochs):\n",
    "    e_losses += train_epoch(net, opt, criterion,dataloader)\n",
    "plt.plot(e_losses)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "metadata": {},
   "outputs": [],
   "source": [
    "import seaborn as sns\n",
    "\n",
    "\n",
    "\n",
    "y_pred_lst = []\n",
    "y_truth_lst = []\n",
    "mean_lst=[]\n",
    "with torch.no_grad():\n",
    "    for i, (X, y) in enumerate(test_dataloader):\n",
    "        \n",
    "        output = trainer.likelihood(trainer.model(X.to(trainer.device)))\n",
    "        \n",
    "        mean_lst.append(output.mean.float().cpu().numpy())\n",
    "        y_pred = output.mean.ge(0.5).float().cpu().numpy()\n",
    "        \n",
    "        y_pred_lst.append(y_pred)\n",
    "        y_truth_lst.append(y.cpu().numpy())\n",
    "        \n",
    "    truth = np.concatenate(y_truth_lst)\n",
    "    pred =  np.concatenate(y_pred_lst)\n",
    "    mean = np.concatenate(mean_lst)\n",
    "    #auc = roc_auc_score(truth,pred)\n",
    "    #accuracy = accuracy_score(truth,pred)   \n",
    "    \n",
    "#print(\"AUC score: \",round(auc,2))\n",
    "#print(\"Accuracy score: \",round(accuracy,2))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 51,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(1, 6755)"
      ]
     },
     "execution_count": 51,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "mean.reshape(1,-1).shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 54,
   "metadata": {},
   "outputs": [
    {
     "ename": "AttributeError",
     "evalue": "'list' object has no attribute 'shape'",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mAttributeError\u001b[0m                            Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-54-4ab80bed8057>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmeshgrid\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmean\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;31m#grid.shape\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
      "\u001b[0;31mAttributeError\u001b[0m: 'list' object has no attribute 'shape'"
     ]
    }
   ],
   "source": [
    " np.meshgrid(mean).shape#grid.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "cmap = sns.diverging_palette(250, 12, s=85, l=25, as_cmap=True)\n",
    "fig, ax = plt.subplots(figsize=(16, 9))\n",
    "contour = ax.tricontour(grid[0].ravel(),grid[1].ravel(), mean, cmap=cmap)\n",
    "#ax.scatter(X_test[pred==0, 0], X_test[pred==0, 1])\n",
    "#ax.scatter(X_test[pred==1, 0], X_test[pred==1, 1], color='r')\n",
    "cbar = plt.colorbar(contour, ax=ax)\n",
    "_ = ax.set(xlim=(-3, 3), ylim=(-3, 3), xlabel='X', ylabel='Y');\n",
    "cbar.ax.set_ylabel('Posterior predictive mean probability of class label = 0')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "cmap = sns.cubehelix_palette(light=1, as_cmap=True)\n",
    "fig, ax = plt.subplots(figsize=(16, 9))\n",
    "contour = ax.contourf(grid[0], grid[1], ppc.std(axis=0).reshape(100, 100), cmap=cmap)\n",
    "ax.scatter(X_test[pred==0, 0], X_test[pred==0, 1])\n",
    "ax.scatter(X_test[pred==1, 0], X_test[pred==1, 1], color='r')\n",
    "cbar = plt.colorbar(contour, ax=ax)\n",
    "_ = ax.set(xlim=(-3, 3), ylim=(-3, 3), xlabel='X', ylabel='Y');\n",
    "cbar.ax.set_ylabel('Uncertainty (posterior predictive standard deviation)');"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python [conda env:amlenv]",
   "language": "python",
   "name": "conda-env-amlenv-py"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.7"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
