{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 34
    },
    "colab_type": "code",
    "id": "0uS5M7jfP-Al",
    "outputId": "38920e86-6d2a-4135-dd12-491a38db4fab"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "bengin_traffic.csv  sample_data\n"
     ]
    }
   ],
   "source": [
    "!ls"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "x2qbnjrWP-As"
   },
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "from scipy import stats\n",
    "import tensorflow as tf\n",
    "import pickle\n",
    "from sklearn.metrics import confusion_matrix, precision_recall_curve\n",
    "from sklearn.metrics import recall_score, classification_report, auc, roc_curve\n",
    "from sklearn.metrics import precision_recall_fscore_support, f1_score\n",
    "from sklearn.preprocessing import MinMaxScaler\n",
    "from sklearn.preprocessing import StandardScaler\n",
    "from sklearn.preprocessing import scale\n",
    "from keras.models import Model, load_model, Sequential\n",
    "from keras.layers import Input, Dense\n",
    "from keras.callbacks import ModelCheckpoint, TensorBoard"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "27VMlgfZP-Av"
   },
   "outputs": [],
   "source": [
    "df_bening = pd.read_csv('bengin_traffic.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 34
    },
    "colab_type": "code",
    "id": "kjSLFbAhP-Ay",
    "outputId": "0c7287da-45be-42ab-acdf-62aadf6e6674"
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(40395, 115)"
      ]
     },
     "execution_count": 4,
     "metadata": {
      "tags": []
     },
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df_bening.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "VGZGzJEi9VGR"
   },
   "outputs": [],
   "source": [
    "#df_bening = (df_bening - df_bening.mean()) / df_bening.std()\n",
    "#df_bening = scale(df_bening)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "gh3tg77_AiY_"
   },
   "outputs": [],
   "source": [
    "scaler = StandardScaler()\n",
    "df_bening = scaler.fit_transform(df_bening)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "ORF702HoP-A2"
   },
   "outputs": [],
   "source": [
    "x_train, x_opt, x_test = np.split(df_bening, [int((1/3)*len(df_bening)), int((2/3)*len(df_bening))])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 68
    },
    "colab_type": "code",
    "id": "i1G9vonQP-A4",
    "outputId": "b22223a3-22fa-42ac-c8ad-be50164ac255"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(13465, 115)\n",
      "(13465, 115)\n",
      "(13465, 115)\n"
     ]
    }
   ],
   "source": [
    "print(x_train.shape)\n",
    "print(x_opt.shape)\n",
    "print(x_test.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "s38Jxq73P-A7"
   },
   "outputs": [],
   "source": [
    "nb_epoch = 800\n",
    "input_dim = x_train.shape[1]\n",
    "learning_rate = 0.012\n",
    "\n",
    "autoencoder = Sequential()\n",
    "autoencoder.add(Dense(int(0.75 * input_dim), activation=\"tanh\", input_dim=input_dim))\n",
    "autoencoder.add(Dense(int(0.5 * input_dim), activation=\"tanh\"))\n",
    "autoencoder.add(Dense(int(0.33 * input_dim), activation=\"tanh\"))\n",
    "autoencoder.add(Dense(int(0.25 * input_dim), activation=\"tanh\"))\n",
    "autoencoder.add(Dense(int(0.33 * input_dim), activation=\"tanh\"))\n",
    "autoencoder.add(Dense(int(0.5 * input_dim), activation=\"tanh\"))\n",
    "autoencoder.add(Dense(int(0.75 * input_dim), activation=\"tanh\"))\n",
    "autoencoder.add(Dense(input_dim))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 27234
    },
    "colab_type": "code",
    "id": "Rd-G5V6gP-A9",
    "outputId": "cd436274-7c4d-4ee1-b398-82e38729400b"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train on 13465 samples, validate on 13465 samples\n",
      "Epoch 1/800\n",
      "13465/13465 [==============================] - 1s 109us/step - loss: 1.0663 - val_loss: 0.7341\n",
      "Epoch 2/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.9301 - val_loss: 0.6533\n",
      "Epoch 3/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.8739 - val_loss: 0.6169\n",
      "Epoch 4/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.8442 - val_loss: 0.5960\n",
      "Epoch 5/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.8254 - val_loss: 0.5825\n",
      "Epoch 6/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.8120 - val_loss: 0.5724\n",
      "Epoch 7/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.8010 - val_loss: 0.5639\n",
      "Epoch 8/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.7911 - val_loss: 0.5560\n",
      "Epoch 9/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.7818 - val_loss: 0.5485\n",
      "Epoch 10/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.7729 - val_loss: 0.5416\n",
      "Epoch 11/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.7645 - val_loss: 0.5350\n",
      "Epoch 12/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.7566 - val_loss: 0.5289\n",
      "Epoch 13/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.7491 - val_loss: 0.5231\n",
      "Epoch 14/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.7420 - val_loss: 0.5177\n",
      "Epoch 15/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.7353 - val_loss: 0.5127\n",
      "Epoch 16/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.7289 - val_loss: 0.5080\n",
      "Epoch 17/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.7229 - val_loss: 0.5035\n",
      "Epoch 18/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.7172 - val_loss: 0.4995\n",
      "Epoch 19/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.7118 - val_loss: 0.4956\n",
      "Epoch 20/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.7066 - val_loss: 0.4919\n",
      "Epoch 21/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.7016 - val_loss: 0.4885\n",
      "Epoch 22/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.6968 - val_loss: 0.4852\n",
      "Epoch 23/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.6922 - val_loss: 0.4820\n",
      "Epoch 24/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.6877 - val_loss: 0.4790\n",
      "Epoch 25/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.6833 - val_loss: 0.4761\n",
      "Epoch 26/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.6790 - val_loss: 0.4734\n",
      "Epoch 27/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.6749 - val_loss: 0.4707\n",
      "Epoch 28/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.6708 - val_loss: 0.4681\n",
      "Epoch 29/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.6668 - val_loss: 0.4658\n",
      "Epoch 30/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.6629 - val_loss: 0.4635\n",
      "Epoch 31/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.6591 - val_loss: 0.4612\n",
      "Epoch 32/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.6554 - val_loss: 0.4591\n",
      "Epoch 33/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.6518 - val_loss: 0.4570\n",
      "Epoch 34/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.6482 - val_loss: 0.4565\n",
      "Epoch 35/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.6456 - val_loss: 0.4578\n",
      "Epoch 36/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.6432 - val_loss: 0.4513\n",
      "Epoch 37/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.6395 - val_loss: 0.4503\n",
      "Epoch 38/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.6370 - val_loss: 0.4483\n",
      "Epoch 39/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.6334 - val_loss: 0.4467\n",
      "Epoch 40/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.6308 - val_loss: 0.4448\n",
      "Epoch 41/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.6285 - val_loss: 0.4449\n",
      "Epoch 42/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.6254 - val_loss: 0.4420\n",
      "Epoch 43/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.6220 - val_loss: 0.4467\n",
      "Epoch 44/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.6195 - val_loss: 0.4390\n",
      "Epoch 45/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.6168 - val_loss: 0.4378\n",
      "Epoch 46/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.6150 - val_loss: 0.4358\n",
      "Epoch 47/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.6117 - val_loss: 0.4447\n",
      "Epoch 48/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.6099 - val_loss: 0.4332\n",
      "Epoch 49/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.6068 - val_loss: 0.4317\n",
      "Epoch 50/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.6043 - val_loss: 0.4306\n",
      "Epoch 51/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.6022 - val_loss: 0.4328\n",
      "Epoch 52/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.6008 - val_loss: 0.4281\n",
      "Epoch 53/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5975 - val_loss: 0.4265\n",
      "Epoch 54/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5958 - val_loss: 0.4255\n",
      "Epoch 55/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5936 - val_loss: 0.4245\n",
      "Epoch 56/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5911 - val_loss: 0.4236\n",
      "Epoch 57/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5900 - val_loss: 0.4222\n",
      "Epoch 58/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5874 - val_loss: 0.4209\n",
      "Epoch 59/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5862 - val_loss: 0.4199\n",
      "Epoch 60/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.5834 - val_loss: 0.4215\n",
      "Epoch 61/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.5811 - val_loss: 0.4178\n",
      "Epoch 62/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5804 - val_loss: 0.4220\n",
      "Epoch 63/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5785 - val_loss: 0.4209\n",
      "Epoch 64/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.5762 - val_loss: 0.4160\n",
      "Epoch 65/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5758 - val_loss: 0.4160\n",
      "Epoch 66/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5727 - val_loss: 0.4132\n",
      "Epoch 67/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5718 - val_loss: 0.4117\n",
      "Epoch 68/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5688 - val_loss: 0.4131\n",
      "Epoch 69/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5697 - val_loss: 0.4099\n",
      "Epoch 70/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5656 - val_loss: 0.4127\n",
      "Epoch 71/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5661 - val_loss: 0.4114\n",
      "Epoch 72/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5642 - val_loss: 0.4071\n",
      "Epoch 73/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5626 - val_loss: 0.4063\n",
      "Epoch 74/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5599 - val_loss: 0.4061\n",
      "Epoch 75/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.5592 - val_loss: 0.4050\n",
      "Epoch 76/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5592 - val_loss: 0.4044\n",
      "Epoch 77/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5563 - val_loss: 0.4036\n",
      "Epoch 78/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5552 - val_loss: 0.4024\n",
      "Epoch 79/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5539 - val_loss: 0.4016\n",
      "Epoch 80/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5539 - val_loss: 0.4012\n",
      "Epoch 81/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5512 - val_loss: 0.4002\n",
      "Epoch 82/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5508 - val_loss: 0.3991\n",
      "Epoch 83/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.5486 - val_loss: 0.4006\n",
      "Epoch 84/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5486 - val_loss: 0.3976\n",
      "Epoch 85/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5452 - val_loss: 0.4052\n",
      "Epoch 86/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5460 - val_loss: 0.3966\n",
      "Epoch 87/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5443 - val_loss: 0.3990\n",
      "Epoch 88/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5431 - val_loss: 0.3952\n",
      "Epoch 89/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.5426 - val_loss: 0.3948\n",
      "Epoch 90/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5402 - val_loss: 0.3934\n",
      "Epoch 91/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5408 - val_loss: 0.3929\n",
      "Epoch 92/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.5372 - val_loss: 0.3932\n",
      "Epoch 93/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5393 - val_loss: 0.3961\n",
      "Epoch 94/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5368 - val_loss: 0.3912\n",
      "Epoch 95/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.5350 - val_loss: 0.3931\n",
      "Epoch 96/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.5346 - val_loss: 0.3919\n",
      "Epoch 97/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5337 - val_loss: 0.3892\n",
      "Epoch 98/800\n",
      "13465/13465 [==============================] - 1s 87us/step - loss: 0.5327 - val_loss: 0.3887\n",
      "Epoch 99/800\n",
      "13465/13465 [==============================] - 1s 86us/step - loss: 0.5306 - val_loss: 0.3878\n",
      "Epoch 100/800\n",
      "13465/13465 [==============================] - 1s 85us/step - loss: 0.5310 - val_loss: 0.3908\n",
      "Epoch 101/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.5289 - val_loss: 0.3962\n",
      "Epoch 102/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5292 - val_loss: 0.3856\n",
      "Epoch 103/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5273 - val_loss: 0.3862\n",
      "Epoch 104/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5259 - val_loss: 0.3849\n",
      "Epoch 105/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5252 - val_loss: 0.3854\n",
      "Epoch 106/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5252 - val_loss: 0.3860\n",
      "Epoch 107/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.5239 - val_loss: 0.3857\n",
      "Epoch 108/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5229 - val_loss: 0.3858\n",
      "Epoch 109/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5208 - val_loss: 0.3853\n",
      "Epoch 110/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5212 - val_loss: 0.3873\n",
      "Epoch 111/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5197 - val_loss: 0.3820\n",
      "Epoch 112/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.5193 - val_loss: 0.3805\n",
      "Epoch 113/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5173 - val_loss: 0.3838\n",
      "Epoch 114/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5181 - val_loss: 0.3793\n",
      "Epoch 115/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5157 - val_loss: 0.3792\n",
      "Epoch 116/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.5159 - val_loss: 0.3783\n",
      "Epoch 117/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5133 - val_loss: 0.3773\n",
      "Epoch 118/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5142 - val_loss: 0.3766\n",
      "Epoch 119/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5122 - val_loss: 0.3761\n",
      "Epoch 120/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5119 - val_loss: 0.3765\n",
      "Epoch 121/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5111 - val_loss: 0.3747\n",
      "Epoch 122/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.5089 - val_loss: 0.3748\n",
      "Epoch 123/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5095 - val_loss: 0.3734\n",
      "Epoch 124/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5080 - val_loss: 0.3735\n",
      "Epoch 125/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5081 - val_loss: 0.3729\n",
      "Epoch 126/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.5050 - val_loss: 0.3737\n",
      "Epoch 127/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5062 - val_loss: 0.3717\n",
      "Epoch 128/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5028 - val_loss: 0.3751\n",
      "Epoch 129/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5053 - val_loss: 0.3700\n",
      "Epoch 130/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5039 - val_loss: 0.3696\n",
      "Epoch 131/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5009 - val_loss: 0.3691\n",
      "Epoch 132/800\n",
      "13465/13465 [==============================] - 1s 97us/step - loss: 0.5007 - val_loss: 0.3685\n",
      "Epoch 133/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.5007 - val_loss: 0.3691\n",
      "Epoch 134/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4998 - val_loss: 0.3673\n",
      "Epoch 135/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4974 - val_loss: 0.3711\n",
      "Epoch 136/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4987 - val_loss: 0.3662\n",
      "Epoch 137/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4969 - val_loss: 0.3659\n",
      "Epoch 138/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4963 - val_loss: 0.3666\n",
      "Epoch 139/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4948 - val_loss: 0.3653\n",
      "Epoch 140/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4947 - val_loss: 0.3641\n",
      "Epoch 141/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4945 - val_loss: 0.3638\n",
      "Epoch 142/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4932 - val_loss: 0.3631\n",
      "Epoch 143/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4912 - val_loss: 0.3628\n",
      "Epoch 144/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4904 - val_loss: 0.3650\n",
      "Epoch 145/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4910 - val_loss: 0.3643\n",
      "Epoch 146/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4886 - val_loss: 0.3609\n",
      "Epoch 147/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4900 - val_loss: 0.3619\n",
      "Epoch 148/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4865 - val_loss: 0.3602\n",
      "Epoch 149/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.4872 - val_loss: 0.3600\n",
      "Epoch 150/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4876 - val_loss: 0.3588\n",
      "Epoch 151/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4851 - val_loss: 0.3594\n",
      "Epoch 152/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4831 - val_loss: 0.3575\n",
      "Epoch 153/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4846 - val_loss: 0.3593\n",
      "Epoch 154/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4824 - val_loss: 0.3586\n",
      "Epoch 155/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4834 - val_loss: 0.3558\n",
      "Epoch 156/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4811 - val_loss: 0.3589\n",
      "Epoch 157/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4808 - val_loss: 0.3577\n",
      "Epoch 158/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4796 - val_loss: 0.3682\n",
      "Epoch 159/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4796 - val_loss: 0.3586\n",
      "Epoch 160/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4784 - val_loss: 0.3530\n",
      "Epoch 161/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4771 - val_loss: 0.3579\n",
      "Epoch 162/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4758 - val_loss: 0.3523\n",
      "Epoch 163/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4772 - val_loss: 0.3516\n",
      "Epoch 164/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4741 - val_loss: 0.3512\n",
      "Epoch 165/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4753 - val_loss: 0.3547\n",
      "Epoch 166/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4724 - val_loss: 0.3616\n",
      "Epoch 167/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4745 - val_loss: 0.3539\n",
      "Epoch 168/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4718 - val_loss: 0.3486\n",
      "Epoch 169/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4713 - val_loss: 0.3515\n",
      "Epoch 170/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4712 - val_loss: 0.3480\n",
      "Epoch 171/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4697 - val_loss: 0.3472\n",
      "Epoch 172/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4697 - val_loss: 0.3480\n",
      "Epoch 173/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4681 - val_loss: 0.3463\n",
      "Epoch 174/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.4670 - val_loss: 0.3471\n",
      "Epoch 175/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.4664 - val_loss: 0.3452\n",
      "Epoch 176/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4677 - val_loss: 0.3460\n",
      "Epoch 177/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4648 - val_loss: 0.3455\n",
      "Epoch 178/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4644 - val_loss: 0.3439\n",
      "Epoch 179/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4642 - val_loss: 0.3440\n",
      "Epoch 180/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4622 - val_loss: 0.3446\n",
      "Epoch 181/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.4627 - val_loss: 0.3446\n",
      "Epoch 182/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.4612 - val_loss: 0.3431\n",
      "Epoch 183/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4616 - val_loss: 0.3428\n",
      "Epoch 184/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4592 - val_loss: 0.3442\n",
      "Epoch 185/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4605 - val_loss: 0.3408\n",
      "Epoch 186/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4576 - val_loss: 0.3398\n",
      "Epoch 187/800\n",
      "13465/13465 [==============================] - 1s 87us/step - loss: 0.4577 - val_loss: 0.3398\n",
      "Epoch 188/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4572 - val_loss: 0.3396\n",
      "Epoch 189/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4569 - val_loss: 0.3416\n",
      "Epoch 190/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4553 - val_loss: 0.3378\n",
      "Epoch 191/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4544 - val_loss: 0.3382\n",
      "Epoch 192/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4560 - val_loss: 0.3419\n",
      "Epoch 193/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4533 - val_loss: 0.3444\n",
      "Epoch 194/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4529 - val_loss: 0.3364\n",
      "Epoch 195/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4528 - val_loss: 0.3359\n",
      "Epoch 196/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4501 - val_loss: 0.3369\n",
      "Epoch 197/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4509 - val_loss: 0.3381\n",
      "Epoch 198/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4505 - val_loss: 0.3355\n",
      "Epoch 199/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4483 - val_loss: 0.3344\n",
      "Epoch 200/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4490 - val_loss: 0.3339\n",
      "Epoch 201/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4477 - val_loss: 0.3336\n",
      "Epoch 202/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4472 - val_loss: 0.3336\n",
      "Epoch 203/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4458 - val_loss: 0.3328\n",
      "Epoch 204/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4465 - val_loss: 0.3337\n",
      "Epoch 205/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4463 - val_loss: 0.3310\n",
      "Epoch 206/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4438 - val_loss: 0.3331\n",
      "Epoch 207/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4436 - val_loss: 0.3327\n",
      "Epoch 208/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4431 - val_loss: 0.3299\n",
      "Epoch 209/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4431 - val_loss: 0.3293\n",
      "Epoch 210/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4415 - val_loss: 0.3293\n",
      "Epoch 211/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4398 - val_loss: 0.3290\n",
      "Epoch 212/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4408 - val_loss: 0.3293\n",
      "Epoch 213/800\n",
      "13465/13465 [==============================] - 1s 99us/step - loss: 0.4384 - val_loss: 0.3276\n",
      "Epoch 214/800\n",
      "13465/13465 [==============================] - 1s 104us/step - loss: 0.4391 - val_loss: 0.3295\n",
      "Epoch 215/800\n",
      "13465/13465 [==============================] - 1s 105us/step - loss: 0.4398 - val_loss: 0.3269\n",
      "Epoch 216/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 0.4367 - val_loss: 0.3276\n",
      "Epoch 217/800\n",
      "13465/13465 [==============================] - 1s 104us/step - loss: 0.4373 - val_loss: 0.3276\n",
      "Epoch 218/800\n",
      "13465/13465 [==============================] - 1s 104us/step - loss: 0.4360 - val_loss: 0.3421\n",
      "Epoch 219/800\n",
      "13465/13465 [==============================] - 1s 103us/step - loss: 0.4359 - val_loss: 0.3259\n",
      "Epoch 220/800\n",
      "13465/13465 [==============================] - 1s 99us/step - loss: 0.4344 - val_loss: 0.3247\n",
      "Epoch 221/800\n",
      "13465/13465 [==============================] - 1s 96us/step - loss: 0.4352 - val_loss: 0.3243\n",
      "Epoch 222/800\n",
      "13465/13465 [==============================] - 1s 101us/step - loss: 0.4333 - val_loss: 0.3239\n",
      "Epoch 223/800\n",
      "13465/13465 [==============================] - 1s 97us/step - loss: 0.4324 - val_loss: 0.3244\n",
      "Epoch 224/800\n",
      "13465/13465 [==============================] - 1s 97us/step - loss: 0.4321 - val_loss: 0.3233\n",
      "Epoch 225/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4316 - val_loss: 0.3222\n",
      "Epoch 226/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4310 - val_loss: 0.3222\n",
      "Epoch 227/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4296 - val_loss: 0.3306\n",
      "Epoch 228/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4294 - val_loss: 0.3211\n",
      "Epoch 229/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.4291 - val_loss: 0.3215\n",
      "Epoch 230/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4289 - val_loss: 0.3253\n",
      "Epoch 231/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4273 - val_loss: 0.3250\n",
      "Epoch 232/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4273 - val_loss: 0.3196\n",
      "Epoch 233/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4264 - val_loss: 0.3200\n",
      "Epoch 234/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4254 - val_loss: 0.3199\n",
      "Epoch 235/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.4250 - val_loss: 0.3184\n",
      "Epoch 236/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4246 - val_loss: 0.3179\n",
      "Epoch 237/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4230 - val_loss: 0.3172\n",
      "Epoch 238/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.4242 - val_loss: 0.3172\n",
      "Epoch 239/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.4217 - val_loss: 0.3164\n",
      "Epoch 240/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4220 - val_loss: 0.3166\n",
      "Epoch 241/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4219 - val_loss: 0.3711\n",
      "Epoch 242/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4213 - val_loss: 0.3155\n",
      "Epoch 243/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4191 - val_loss: 0.3148\n",
      "Epoch 244/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4192 - val_loss: 0.3150\n",
      "Epoch 245/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4192 - val_loss: 0.3160\n",
      "Epoch 246/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4187 - val_loss: 0.3191\n",
      "Epoch 247/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4169 - val_loss: 0.3133\n",
      "Epoch 248/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4176 - val_loss: 0.3134\n",
      "Epoch 249/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4154 - val_loss: 0.3128\n",
      "Epoch 250/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4155 - val_loss: 0.3158\n",
      "Epoch 251/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4139 - val_loss: 0.3122\n",
      "Epoch 252/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4164 - val_loss: 0.3120\n",
      "Epoch 253/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4129 - val_loss: 0.3120\n",
      "Epoch 254/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4132 - val_loss: 0.3109\n",
      "Epoch 255/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4119 - val_loss: 0.3112\n",
      "Epoch 256/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4117 - val_loss: 0.3121\n",
      "Epoch 257/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4105 - val_loss: 0.3109\n",
      "Epoch 258/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4127 - val_loss: 0.3091\n",
      "Epoch 259/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4096 - val_loss: 0.3099\n",
      "Epoch 260/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4087 - val_loss: 0.3136\n",
      "Epoch 261/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4087 - val_loss: 0.3099\n",
      "Epoch 262/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4095 - val_loss: 0.3090\n",
      "Epoch 263/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4074 - val_loss: 0.3083\n",
      "Epoch 264/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4077 - val_loss: 0.3081\n",
      "Epoch 265/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4058 - val_loss: 0.3097\n",
      "Epoch 266/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4059 - val_loss: 0.3071\n",
      "Epoch 267/800\n",
      "13465/13465 [==============================] - 1s 87us/step - loss: 0.4060 - val_loss: 0.3061\n",
      "Epoch 268/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4040 - val_loss: 0.3072\n",
      "Epoch 269/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4039 - val_loss: 0.3055\n",
      "Epoch 270/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4048 - val_loss: 0.3049\n",
      "Epoch 271/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4018 - val_loss: 0.3096\n",
      "Epoch 272/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4025 - val_loss: 0.3049\n",
      "Epoch 273/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.4021 - val_loss: 0.3045\n",
      "Epoch 274/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4014 - val_loss: 0.3063\n",
      "Epoch 275/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.4003 - val_loss: 0.3038\n",
      "Epoch 276/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3996 - val_loss: 0.3029\n",
      "Epoch 277/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.4002 - val_loss: 0.3025\n",
      "Epoch 278/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3988 - val_loss: 0.3049\n",
      "Epoch 279/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.3989 - val_loss: 0.3021\n",
      "Epoch 280/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3992 - val_loss: 0.3027\n",
      "Epoch 281/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3959 - val_loss: 0.3015\n",
      "Epoch 282/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3965 - val_loss: 0.3021\n",
      "Epoch 283/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3950 - val_loss: 0.3020\n",
      "Epoch 284/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3957 - val_loss: 0.3007\n",
      "Epoch 285/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3961 - val_loss: 0.3002\n",
      "Epoch 286/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3932 - val_loss: 0.3010\n",
      "Epoch 287/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3931 - val_loss: 0.2996\n",
      "Epoch 288/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3930 - val_loss: 0.3066\n",
      "Epoch 289/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3923 - val_loss: 0.2995\n",
      "Epoch 290/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3912 - val_loss: 0.2993\n",
      "Epoch 291/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3917 - val_loss: 0.3064\n",
      "Epoch 292/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3921 - val_loss: 0.2999\n",
      "Epoch 293/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3900 - val_loss: 0.2992\n",
      "Epoch 294/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3902 - val_loss: 0.2971\n",
      "Epoch 295/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3881 - val_loss: 0.2988\n",
      "Epoch 296/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3896 - val_loss: 0.2972\n",
      "Epoch 297/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3877 - val_loss: 0.2990\n",
      "Epoch 298/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3880 - val_loss: 0.2973\n",
      "Epoch 299/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3869 - val_loss: 0.2960\n",
      "Epoch 300/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3868 - val_loss: 0.2958\n",
      "Epoch 301/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3850 - val_loss: 0.2965\n",
      "Epoch 302/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3854 - val_loss: 0.2958\n",
      "Epoch 303/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3838 - val_loss: 0.2947\n",
      "Epoch 304/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3846 - val_loss: 0.2940\n",
      "Epoch 305/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3847 - val_loss: 0.2952\n",
      "Epoch 306/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3830 - val_loss: 0.2941\n",
      "Epoch 307/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3817 - val_loss: 0.2939\n",
      "Epoch 308/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3827 - val_loss: 0.2931\n",
      "Epoch 309/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3808 - val_loss: 0.2937\n",
      "Epoch 310/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3809 - val_loss: 0.2943\n",
      "Epoch 311/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3802 - val_loss: 0.3039\n",
      "Epoch 312/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3800 - val_loss: 0.2927\n",
      "Epoch 313/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3791 - val_loss: 0.2930\n",
      "Epoch 314/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3779 - val_loss: 0.2918\n",
      "Epoch 315/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3797 - val_loss: 0.2916\n",
      "Epoch 316/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3774 - val_loss: 0.2907\n",
      "Epoch 317/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3767 - val_loss: 0.2909\n",
      "Epoch 318/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3773 - val_loss: 0.2921\n",
      "Epoch 319/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3759 - val_loss: 0.2958\n",
      "Epoch 320/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3755 - val_loss: 0.2955\n",
      "Epoch 321/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3747 - val_loss: 0.2905\n",
      "Epoch 322/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3742 - val_loss: 0.2891\n",
      "Epoch 323/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3745 - val_loss: 0.2915\n",
      "Epoch 324/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3727 - val_loss: 0.2885\n",
      "Epoch 325/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3733 - val_loss: 0.2906\n",
      "Epoch 326/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3721 - val_loss: 0.2897\n",
      "Epoch 327/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3718 - val_loss: 0.2892\n",
      "Epoch 328/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.3713 - val_loss: 0.2903\n",
      "Epoch 329/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3708 - val_loss: 0.2872\n",
      "Epoch 330/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3703 - val_loss: 0.2869\n",
      "Epoch 331/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3699 - val_loss: 0.2879\n",
      "Epoch 332/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3690 - val_loss: 0.2880\n",
      "Epoch 333/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3687 - val_loss: 0.2877\n",
      "Epoch 334/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3690 - val_loss: 0.2866\n",
      "Epoch 335/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3679 - val_loss: 0.2879\n",
      "Epoch 336/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3662 - val_loss: 0.2853\n",
      "Epoch 337/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3674 - val_loss: 0.2851\n",
      "Epoch 338/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3652 - val_loss: 0.2848\n",
      "Epoch 339/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3671 - val_loss: 0.2851\n",
      "Epoch 340/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3648 - val_loss: 0.2851\n",
      "Epoch 341/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3647 - val_loss: 0.2841\n",
      "Epoch 342/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3645 - val_loss: 0.2837\n",
      "Epoch 343/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3639 - val_loss: 0.2837\n",
      "Epoch 344/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3635 - val_loss: 0.2934\n",
      "Epoch 345/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3621 - val_loss: 0.2829\n",
      "Epoch 346/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3617 - val_loss: 0.2833\n",
      "Epoch 347/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3621 - val_loss: 0.2834\n",
      "Epoch 348/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3609 - val_loss: 0.2843\n",
      "Epoch 349/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3609 - val_loss: 0.2831\n",
      "Epoch 350/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3599 - val_loss: 0.2813\n",
      "Epoch 351/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3607 - val_loss: 0.2811\n",
      "Epoch 352/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3587 - val_loss: 0.2817\n",
      "Epoch 353/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3581 - val_loss: 0.2812\n",
      "Epoch 354/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3583 - val_loss: 0.2843\n",
      "Epoch 355/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3581 - val_loss: 0.2807\n",
      "Epoch 356/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3564 - val_loss: 0.2813\n",
      "Epoch 357/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3570 - val_loss: 0.2809\n",
      "Epoch 358/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3567 - val_loss: 0.2798\n",
      "Epoch 359/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3560 - val_loss: 0.2794\n",
      "Epoch 360/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3552 - val_loss: 0.2814\n",
      "Epoch 361/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3549 - val_loss: 0.2825\n",
      "Epoch 362/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3544 - val_loss: 0.2797\n",
      "Epoch 363/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3537 - val_loss: 0.2783\n",
      "Epoch 364/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3537 - val_loss: 0.2815\n",
      "Epoch 365/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3529 - val_loss: 0.2782\n",
      "Epoch 366/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3534 - val_loss: 0.2778\n",
      "Epoch 367/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3523 - val_loss: 0.2789\n",
      "Epoch 368/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3521 - val_loss: 0.2782\n",
      "Epoch 369/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3508 - val_loss: 0.2804\n",
      "Epoch 370/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3510 - val_loss: 0.2788\n",
      "Epoch 371/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3497 - val_loss: 0.2764\n",
      "Epoch 372/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3492 - val_loss: 0.2775\n",
      "Epoch 373/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3492 - val_loss: 0.2763\n",
      "Epoch 374/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3508 - val_loss: 0.2763\n",
      "Epoch 375/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3483 - val_loss: 0.2781\n",
      "Epoch 376/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3475 - val_loss: 0.2798\n",
      "Epoch 377/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.3475 - val_loss: 0.2748\n",
      "Epoch 378/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3478 - val_loss: 0.2763\n",
      "Epoch 379/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3466 - val_loss: 0.2747\n",
      "Epoch 380/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3460 - val_loss: 0.2759\n",
      "Epoch 381/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3459 - val_loss: 0.2741\n",
      "Epoch 382/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3453 - val_loss: 0.2738\n",
      "Epoch 383/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3447 - val_loss: 0.2737\n",
      "Epoch 384/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3444 - val_loss: 0.2730\n",
      "Epoch 385/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3434 - val_loss: 0.2733\n",
      "Epoch 386/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3428 - val_loss: 0.2731\n",
      "Epoch 387/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3428 - val_loss: 0.2763\n",
      "Epoch 388/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3423 - val_loss: 0.2727\n",
      "Epoch 389/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3426 - val_loss: 0.2765\n",
      "Epoch 390/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3420 - val_loss: 0.2726\n",
      "Epoch 391/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3412 - val_loss: 0.2721\n",
      "Epoch 392/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3405 - val_loss: 0.2717\n",
      "Epoch 393/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3411 - val_loss: 0.2709\n",
      "Epoch 394/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3396 - val_loss: 0.2704\n",
      "Epoch 395/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3391 - val_loss: 0.2705\n",
      "Epoch 396/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3390 - val_loss: 0.2702\n",
      "Epoch 397/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3385 - val_loss: 0.2702\n",
      "Epoch 398/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3379 - val_loss: 0.2698\n",
      "Epoch 399/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3383 - val_loss: 0.2732\n",
      "Epoch 400/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3372 - val_loss: 0.2715\n",
      "Epoch 401/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3385 - val_loss: 0.2724\n",
      "Epoch 402/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3359 - val_loss: 0.2695\n",
      "Epoch 403/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3365 - val_loss: 0.2688\n",
      "Epoch 404/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3356 - val_loss: 0.2696\n",
      "Epoch 405/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3351 - val_loss: 0.2688\n",
      "Epoch 406/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3345 - val_loss: 0.2678\n",
      "Epoch 407/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3343 - val_loss: 0.2680\n",
      "Epoch 408/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3338 - val_loss: 0.2676\n",
      "Epoch 409/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3337 - val_loss: 0.2701\n",
      "Epoch 410/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3330 - val_loss: 0.2671\n",
      "Epoch 411/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3322 - val_loss: 0.2672\n",
      "Epoch 412/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3314 - val_loss: 0.2679\n",
      "Epoch 413/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3333 - val_loss: 0.2672\n",
      "Epoch 414/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3306 - val_loss: 0.2686\n",
      "Epoch 415/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3317 - val_loss: 0.2689\n",
      "Epoch 416/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3309 - val_loss: 0.2659\n",
      "Epoch 417/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3298 - val_loss: 0.2668\n",
      "Epoch 418/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3291 - val_loss: 0.2654\n",
      "Epoch 419/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3290 - val_loss: 0.2688\n",
      "Epoch 420/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3289 - val_loss: 0.2650\n",
      "Epoch 421/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3285 - val_loss: 0.2656\n",
      "Epoch 422/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3281 - val_loss: 0.2659\n",
      "Epoch 423/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3292 - val_loss: 0.2643\n",
      "Epoch 424/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3272 - val_loss: 0.2640\n",
      "Epoch 425/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3272 - val_loss: 0.2640\n",
      "Epoch 426/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3273 - val_loss: 0.2641\n",
      "Epoch 427/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3263 - val_loss: 0.2650\n",
      "Epoch 428/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3255 - val_loss: 0.2639\n",
      "Epoch 429/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3248 - val_loss: 0.2631\n",
      "Epoch 430/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3254 - val_loss: 0.2629\n",
      "Epoch 431/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3243 - val_loss: 0.2644\n",
      "Epoch 432/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3238 - val_loss: 0.2624\n",
      "Epoch 433/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3235 - val_loss: 0.2644\n",
      "Epoch 434/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3233 - val_loss: 0.2619\n",
      "Epoch 435/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3228 - val_loss: 0.2652\n",
      "Epoch 436/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3232 - val_loss: 0.2614\n",
      "Epoch 437/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3215 - val_loss: 0.2654\n",
      "Epoch 438/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3219 - val_loss: 0.2615\n",
      "Epoch 439/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3217 - val_loss: 0.2609\n",
      "Epoch 440/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3203 - val_loss: 0.2607\n",
      "Epoch 441/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3212 - val_loss: 0.2621\n",
      "Epoch 442/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3207 - val_loss: 0.2609\n",
      "Epoch 443/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.3195 - val_loss: 0.2602\n",
      "Epoch 444/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.3201 - val_loss: 0.2608\n",
      "Epoch 445/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3185 - val_loss: 0.2615\n",
      "Epoch 446/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3186 - val_loss: 0.2606\n",
      "Epoch 447/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3186 - val_loss: 0.2595\n",
      "Epoch 448/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3186 - val_loss: 0.2596\n",
      "Epoch 449/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3177 - val_loss: 0.2589\n",
      "Epoch 450/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3175 - val_loss: 0.2586\n",
      "Epoch 451/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3162 - val_loss: 0.2590\n",
      "Epoch 452/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3166 - val_loss: 0.2582\n",
      "Epoch 453/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3158 - val_loss: 0.2617\n",
      "Epoch 454/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3160 - val_loss: 0.2579\n",
      "Epoch 455/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3156 - val_loss: 0.2590\n",
      "Epoch 456/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3150 - val_loss: 0.2575\n",
      "Epoch 457/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3140 - val_loss: 0.2569\n",
      "Epoch 458/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3136 - val_loss: 0.2583\n",
      "Epoch 459/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3145 - val_loss: 0.2571\n",
      "Epoch 460/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3137 - val_loss: 0.2571\n",
      "Epoch 461/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3144 - val_loss: 0.2582\n",
      "Epoch 462/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3126 - val_loss: 0.2572\n",
      "Epoch 463/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3133 - val_loss: 0.2571\n",
      "Epoch 464/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3114 - val_loss: 0.2562\n",
      "Epoch 465/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3117 - val_loss: 0.2572\n",
      "Epoch 466/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3112 - val_loss: 0.2561\n",
      "Epoch 467/800\n",
      "13465/13465 [==============================] - 1s 108us/step - loss: 0.3108 - val_loss: 0.2564\n",
      "Epoch 468/800\n",
      "13465/13465 [==============================] - 1s 108us/step - loss: 0.3105 - val_loss: 0.2553\n",
      "Epoch 469/800\n",
      "13465/13465 [==============================] - 1s 105us/step - loss: 0.3102 - val_loss: 0.2573\n",
      "Epoch 470/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 0.3102 - val_loss: 0.2547\n",
      "Epoch 471/800\n",
      "13465/13465 [==============================] - 1s 104us/step - loss: 0.3094 - val_loss: 0.2547\n",
      "Epoch 472/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 0.3095 - val_loss: 0.2557\n",
      "Epoch 473/800\n",
      "13465/13465 [==============================] - 1s 105us/step - loss: 0.3088 - val_loss: 0.2541\n",
      "Epoch 474/800\n",
      "13465/13465 [==============================] - 1s 102us/step - loss: 0.3079 - val_loss: 0.2544\n",
      "Epoch 475/800\n",
      "13465/13465 [==============================] - 1s 100us/step - loss: 0.3080 - val_loss: 0.2554\n",
      "Epoch 476/800\n",
      "13465/13465 [==============================] - 1s 100us/step - loss: 0.3081 - val_loss: 0.2547\n",
      "Epoch 477/800\n",
      "13465/13465 [==============================] - 1s 99us/step - loss: 0.3073 - val_loss: 0.2545\n",
      "Epoch 478/800\n",
      "13465/13465 [==============================] - 1s 94us/step - loss: 0.3074 - val_loss: 0.2577\n",
      "Epoch 479/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3070 - val_loss: 0.2529\n",
      "Epoch 480/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3061 - val_loss: 0.2531\n",
      "Epoch 481/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3059 - val_loss: 0.2534\n",
      "Epoch 482/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3057 - val_loss: 0.2534\n",
      "Epoch 483/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3049 - val_loss: 0.2522\n",
      "Epoch 484/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3052 - val_loss: 0.2532\n",
      "Epoch 485/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3048 - val_loss: 0.2528\n",
      "Epoch 486/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3040 - val_loss: 0.2520\n",
      "Epoch 487/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3037 - val_loss: 0.2526\n",
      "Epoch 488/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3040 - val_loss: 0.2516\n",
      "Epoch 489/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3031 - val_loss: 0.2520\n",
      "Epoch 490/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.3033 - val_loss: 0.2509\n",
      "Epoch 491/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3025 - val_loss: 0.2517\n",
      "Epoch 492/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3038 - val_loss: 0.2517\n",
      "Epoch 493/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3014 - val_loss: 0.2504\n",
      "Epoch 494/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3016 - val_loss: 0.2499\n",
      "Epoch 495/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.3007 - val_loss: 0.2502\n",
      "Epoch 496/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.3011 - val_loss: 0.2504\n",
      "Epoch 497/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3003 - val_loss: 0.2502\n",
      "Epoch 498/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.3001 - val_loss: 0.2510\n",
      "Epoch 499/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2992 - val_loss: 0.2503\n",
      "Epoch 500/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2991 - val_loss: 0.2493\n",
      "Epoch 501/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2996 - val_loss: 0.2919\n",
      "Epoch 502/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2994 - val_loss: 0.2491\n",
      "Epoch 503/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2981 - val_loss: 0.2507\n",
      "Epoch 504/800\n",
      "13465/13465 [==============================] - 1s 87us/step - loss: 0.2986 - val_loss: 0.2495\n",
      "Epoch 505/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2978 - val_loss: 0.2494\n",
      "Epoch 506/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2980 - val_loss: 0.2491\n",
      "Epoch 507/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2971 - val_loss: 0.2486\n",
      "Epoch 508/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2969 - val_loss: 0.2478\n",
      "Epoch 509/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2969 - val_loss: 0.2489\n",
      "Epoch 510/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2960 - val_loss: 0.2513\n",
      "Epoch 511/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2962 - val_loss: 0.2477\n",
      "Epoch 512/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2960 - val_loss: 0.2472\n",
      "Epoch 513/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2953 - val_loss: 0.2464\n",
      "Epoch 514/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2946 - val_loss: 0.2469\n",
      "Epoch 515/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2948 - val_loss: 0.2485\n",
      "Epoch 516/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2943 - val_loss: 0.2486\n",
      "Epoch 517/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2935 - val_loss: 0.2501\n",
      "Epoch 518/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2936 - val_loss: 0.2459\n",
      "Epoch 519/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2927 - val_loss: 0.2478\n",
      "Epoch 520/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2929 - val_loss: 0.2464\n",
      "Epoch 521/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2925 - val_loss: 0.2457\n",
      "Epoch 522/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2935 - val_loss: 0.2460\n",
      "Epoch 523/800\n",
      "13465/13465 [==============================] - 1s 94us/step - loss: 0.2919 - val_loss: 0.2460\n",
      "Epoch 524/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.2913 - val_loss: 0.2454\n",
      "Epoch 525/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2916 - val_loss: 0.2452\n",
      "Epoch 526/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2912 - val_loss: 0.2444\n",
      "Epoch 527/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2907 - val_loss: 0.2453\n",
      "Epoch 528/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.2905 - val_loss: 0.2443\n",
      "Epoch 529/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.2905 - val_loss: 0.2442\n",
      "Epoch 530/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.2899 - val_loss: 0.2433\n",
      "Epoch 531/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2894 - val_loss: 0.2436\n",
      "Epoch 532/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2888 - val_loss: 0.2442\n",
      "Epoch 533/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2885 - val_loss: 0.2443\n",
      "Epoch 534/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2888 - val_loss: 0.2434\n",
      "Epoch 535/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2879 - val_loss: 0.2433\n",
      "Epoch 536/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2880 - val_loss: 0.2479\n",
      "Epoch 537/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2879 - val_loss: 0.2457\n",
      "Epoch 538/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2874 - val_loss: 0.2423\n",
      "Epoch 539/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2865 - val_loss: 0.2423\n",
      "Epoch 540/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2865 - val_loss: 0.2446\n",
      "Epoch 541/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2861 - val_loss: 0.2420\n",
      "Epoch 542/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2857 - val_loss: 0.2470\n",
      "Epoch 543/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2854 - val_loss: 0.2417\n",
      "Epoch 544/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2861 - val_loss: 0.2415\n",
      "Epoch 545/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2854 - val_loss: 0.2423\n",
      "Epoch 546/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2847 - val_loss: 0.2413\n",
      "Epoch 547/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2843 - val_loss: 0.2435\n",
      "Epoch 548/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2838 - val_loss: 0.2436\n",
      "Epoch 549/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2835 - val_loss: 0.2416\n",
      "Epoch 550/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.2840 - val_loss: 0.2448\n",
      "Epoch 551/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2836 - val_loss: 0.2514\n",
      "Epoch 552/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2837 - val_loss: 0.2398\n",
      "Epoch 553/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2825 - val_loss: 0.2419\n",
      "Epoch 554/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2818 - val_loss: 0.2408\n",
      "Epoch 555/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2818 - val_loss: 0.2403\n",
      "Epoch 556/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2819 - val_loss: 0.2396\n",
      "Epoch 557/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2817 - val_loss: 0.2395\n",
      "Epoch 558/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2812 - val_loss: 0.2409\n",
      "Epoch 559/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2804 - val_loss: 0.2392\n",
      "Epoch 560/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2807 - val_loss: 0.2460\n",
      "Epoch 561/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2812 - val_loss: 0.2381\n",
      "Epoch 562/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2794 - val_loss: 0.2387\n",
      "Epoch 563/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2796 - val_loss: 0.2384\n",
      "Epoch 564/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2788 - val_loss: 0.2385\n",
      "Epoch 565/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2787 - val_loss: 0.2380\n",
      "Epoch 566/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2789 - val_loss: 0.2385\n",
      "Epoch 567/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2782 - val_loss: 0.2373\n",
      "Epoch 568/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2776 - val_loss: 0.2379\n",
      "Epoch 569/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2781 - val_loss: 0.2418\n",
      "Epoch 570/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2783 - val_loss: 0.2371\n",
      "Epoch 571/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2769 - val_loss: 0.2370\n",
      "Epoch 572/800\n",
      "13465/13465 [==============================] - 1s 94us/step - loss: 0.2766 - val_loss: 0.2367\n",
      "Epoch 573/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.2771 - val_loss: 0.2379\n",
      "Epoch 574/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2759 - val_loss: 0.2361\n",
      "Epoch 575/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2760 - val_loss: 0.2375\n",
      "Epoch 576/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2754 - val_loss: 0.2368\n",
      "Epoch 577/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2755 - val_loss: 0.2374\n",
      "Epoch 578/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2751 - val_loss: 0.2368\n",
      "Epoch 579/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2748 - val_loss: 0.2361\n",
      "Epoch 580/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2746 - val_loss: 0.2450\n",
      "Epoch 581/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2748 - val_loss: 0.2362\n",
      "Epoch 582/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2741 - val_loss: 0.2385\n",
      "Epoch 583/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2739 - val_loss: 0.2373\n",
      "Epoch 584/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2732 - val_loss: 0.2357\n",
      "Epoch 585/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2729 - val_loss: 0.2354\n",
      "Epoch 586/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2729 - val_loss: 0.2347\n",
      "Epoch 587/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2725 - val_loss: 0.2343\n",
      "Epoch 588/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2731 - val_loss: 0.2346\n",
      "Epoch 589/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2724 - val_loss: 0.2341\n",
      "Epoch 590/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2713 - val_loss: 0.2338\n",
      "Epoch 591/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2712 - val_loss: 0.2348\n",
      "Epoch 592/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2709 - val_loss: 0.2337\n",
      "Epoch 593/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2709 - val_loss: 0.2333\n",
      "Epoch 594/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2704 - val_loss: 0.2345\n",
      "Epoch 595/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2701 - val_loss: 0.2334\n",
      "Epoch 596/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2728 - val_loss: 0.2332\n",
      "Epoch 597/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2695 - val_loss: 0.2344\n",
      "Epoch 598/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2692 - val_loss: 0.2339\n",
      "Epoch 599/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2689 - val_loss: 0.2329\n",
      "Epoch 600/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2686 - val_loss: 0.2329\n",
      "Epoch 601/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2680 - val_loss: 0.2327\n",
      "Epoch 602/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2679 - val_loss: 0.2323\n",
      "Epoch 603/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2679 - val_loss: 0.2324\n",
      "Epoch 604/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2676 - val_loss: 0.2327\n",
      "Epoch 605/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2671 - val_loss: 0.2326\n",
      "Epoch 606/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2673 - val_loss: 0.2365\n",
      "Epoch 607/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2673 - val_loss: 0.2326\n",
      "Epoch 608/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2664 - val_loss: 0.2314\n",
      "Epoch 609/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2664 - val_loss: 0.2315\n",
      "Epoch 610/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2659 - val_loss: 0.2323\n",
      "Epoch 611/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2661 - val_loss: 0.2312\n",
      "Epoch 612/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2652 - val_loss: 0.2316\n",
      "Epoch 613/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2652 - val_loss: 0.2309\n",
      "Epoch 614/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2655 - val_loss: 0.2334\n",
      "Epoch 615/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2648 - val_loss: 0.2314\n",
      "Epoch 616/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2641 - val_loss: 0.2307\n",
      "Epoch 617/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2642 - val_loss: 0.2311\n",
      "Epoch 618/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2635 - val_loss: 0.2351\n",
      "Epoch 619/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2635 - val_loss: 0.2301\n",
      "Epoch 620/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2636 - val_loss: 0.2300\n",
      "Epoch 621/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.2630 - val_loss: 0.2315\n",
      "Epoch 622/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.2630 - val_loss: 0.2301\n",
      "Epoch 623/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2623 - val_loss: 0.2296\n",
      "Epoch 624/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2618 - val_loss: 0.2308\n",
      "Epoch 625/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2625 - val_loss: 0.2291\n",
      "Epoch 626/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2612 - val_loss: 0.2310\n",
      "Epoch 627/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2614 - val_loss: 0.2287\n",
      "Epoch 628/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.2617 - val_loss: 0.2433\n",
      "Epoch 629/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2611 - val_loss: 0.2306\n",
      "Epoch 630/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2605 - val_loss: 0.2299\n",
      "Epoch 631/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2606 - val_loss: 0.2284\n",
      "Epoch 632/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2601 - val_loss: 0.2279\n",
      "Epoch 633/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2594 - val_loss: 0.2316\n",
      "Epoch 634/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2596 - val_loss: 0.2279\n",
      "Epoch 635/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2589 - val_loss: 0.2286\n",
      "Epoch 636/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2592 - val_loss: 0.2290\n",
      "Epoch 637/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2588 - val_loss: 0.2273\n",
      "Epoch 638/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2599 - val_loss: 0.2283\n",
      "Epoch 639/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2580 - val_loss: 0.2269\n",
      "Epoch 640/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2578 - val_loss: 0.2275\n",
      "Epoch 641/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.2574 - val_loss: 0.2266\n",
      "Epoch 642/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2581 - val_loss: 0.2278\n",
      "Epoch 643/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2584 - val_loss: 0.2289\n",
      "Epoch 644/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2580 - val_loss: 0.2259\n",
      "Epoch 645/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2569 - val_loss: 0.2266\n",
      "Epoch 646/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2567 - val_loss: 0.2259\n",
      "Epoch 647/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2561 - val_loss: 0.2263\n",
      "Epoch 648/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2557 - val_loss: 0.2258\n",
      "Epoch 649/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2556 - val_loss: 0.2280\n",
      "Epoch 650/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2551 - val_loss: 0.2252\n",
      "Epoch 651/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2554 - val_loss: 0.2275\n",
      "Epoch 652/800\n",
      "13465/13465 [==============================] - 1s 87us/step - loss: 0.2552 - val_loss: 0.2253\n",
      "Epoch 653/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2545 - val_loss: 0.2252\n",
      "Epoch 654/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2551 - val_loss: 0.2246\n",
      "Epoch 655/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2539 - val_loss: 0.2255\n",
      "Epoch 656/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2537 - val_loss: 0.2258\n",
      "Epoch 657/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2536 - val_loss: 0.2242\n",
      "Epoch 658/800\n",
      "13465/13465 [==============================] - 1s 87us/step - loss: 0.2536 - val_loss: 0.2249\n",
      "Epoch 659/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2532 - val_loss: 0.2264\n",
      "Epoch 660/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2529 - val_loss: 0.2240\n",
      "Epoch 661/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2529 - val_loss: 0.2241\n",
      "Epoch 662/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2524 - val_loss: 0.2241\n",
      "Epoch 663/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2527 - val_loss: 0.2236\n",
      "Epoch 664/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2524 - val_loss: 0.2248\n",
      "Epoch 665/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2515 - val_loss: 0.2229\n",
      "Epoch 666/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2515 - val_loss: 0.2230\n",
      "Epoch 667/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2509 - val_loss: 0.2241\n",
      "Epoch 668/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.2510 - val_loss: 0.2226\n",
      "Epoch 669/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2505 - val_loss: 0.2226\n",
      "Epoch 670/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2504 - val_loss: 0.2225\n",
      "Epoch 671/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.2503 - val_loss: 0.2234\n",
      "Epoch 672/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2499 - val_loss: 0.2244\n",
      "Epoch 673/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2497 - val_loss: 0.2221\n",
      "Epoch 674/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2495 - val_loss: 0.2237\n",
      "Epoch 675/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2488 - val_loss: 0.2245\n",
      "Epoch 676/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2493 - val_loss: 0.2215\n",
      "Epoch 677/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2490 - val_loss: 0.2228\n",
      "Epoch 678/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2484 - val_loss: 0.2211\n",
      "Epoch 679/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2482 - val_loss: 0.2252\n",
      "Epoch 680/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2480 - val_loss: 0.2246\n",
      "Epoch 681/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2476 - val_loss: 0.2210\n",
      "Epoch 682/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2475 - val_loss: 0.2204\n",
      "Epoch 683/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2469 - val_loss: 0.2210\n",
      "Epoch 684/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2472 - val_loss: 0.2206\n",
      "Epoch 685/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2467 - val_loss: 0.2220\n",
      "Epoch 686/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2463 - val_loss: 0.2235\n",
      "Epoch 687/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2461 - val_loss: 0.2198\n",
      "Epoch 688/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2463 - val_loss: 0.2197\n",
      "Epoch 689/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2457 - val_loss: 0.2195\n",
      "Epoch 690/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2454 - val_loss: 0.2195\n",
      "Epoch 691/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2452 - val_loss: 0.2198\n",
      "Epoch 692/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2460 - val_loss: 0.2189\n",
      "Epoch 693/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2448 - val_loss: 0.2191\n",
      "Epoch 694/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2448 - val_loss: 0.2197\n",
      "Epoch 695/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2452 - val_loss: 0.2189\n",
      "Epoch 696/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2440 - val_loss: 0.2214\n",
      "Epoch 697/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2435 - val_loss: 0.2183\n",
      "Epoch 698/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2435 - val_loss: 0.2181\n",
      "Epoch 699/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2439 - val_loss: 0.2218\n",
      "Epoch 700/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2429 - val_loss: 0.2191\n",
      "Epoch 701/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.2431 - val_loss: 0.2179\n",
      "Epoch 702/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.2423 - val_loss: 0.2180\n",
      "Epoch 703/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2426 - val_loss: 0.2193\n",
      "Epoch 704/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2423 - val_loss: 0.2172\n",
      "Epoch 705/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2425 - val_loss: 0.2170\n",
      "Epoch 706/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2423 - val_loss: 0.2185\n",
      "Epoch 707/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2415 - val_loss: 0.2169\n",
      "Epoch 708/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2412 - val_loss: 0.2166\n",
      "Epoch 709/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2406 - val_loss: 0.2164\n",
      "Epoch 710/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2406 - val_loss: 0.2175\n",
      "Epoch 711/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2405 - val_loss: 0.2164\n",
      "Epoch 712/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2403 - val_loss: 0.2176\n",
      "Epoch 713/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.2407 - val_loss: 0.2157\n",
      "Epoch 714/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2405 - val_loss: 0.2165\n",
      "Epoch 715/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2405 - val_loss: 0.2167\n",
      "Epoch 716/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2396 - val_loss: 0.2158\n",
      "Epoch 717/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2389 - val_loss: 0.2157\n",
      "Epoch 718/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.2392 - val_loss: 0.2155\n",
      "Epoch 719/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2390 - val_loss: 0.2151\n",
      "Epoch 720/800\n",
      "13465/13465 [==============================] - 1s 97us/step - loss: 0.2384 - val_loss: 0.2161\n",
      "Epoch 721/800\n",
      "13465/13465 [==============================] - 1s 109us/step - loss: 0.2380 - val_loss: 0.2150\n",
      "Epoch 722/800\n",
      "13465/13465 [==============================] - 1s 107us/step - loss: 0.2380 - val_loss: 0.2147\n",
      "Epoch 723/800\n",
      "13465/13465 [==============================] - 1s 107us/step - loss: 0.2381 - val_loss: 0.2151\n",
      "Epoch 724/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 0.2378 - val_loss: 0.2177\n",
      "Epoch 725/800\n",
      "13465/13465 [==============================] - 1s 107us/step - loss: 0.2371 - val_loss: 0.2154\n",
      "Epoch 726/800\n",
      "13465/13465 [==============================] - 1s 108us/step - loss: 0.2380 - val_loss: 0.2139\n",
      "Epoch 727/800\n",
      "13465/13465 [==============================] - 1s 105us/step - loss: 0.2379 - val_loss: 0.2147\n",
      "Epoch 728/800\n",
      "13465/13465 [==============================] - 1s 98us/step - loss: 0.2364 - val_loss: 0.2137\n",
      "Epoch 729/800\n",
      "13465/13465 [==============================] - 1s 101us/step - loss: 0.2365 - val_loss: 0.2141\n",
      "Epoch 730/800\n",
      "13465/13465 [==============================] - 1s 97us/step - loss: 0.2358 - val_loss: 0.2139\n",
      "Epoch 731/800\n",
      "13465/13465 [==============================] - 1s 98us/step - loss: 0.2367 - val_loss: 0.2135\n",
      "Epoch 732/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2355 - val_loss: 0.2135\n",
      "Epoch 733/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2353 - val_loss: 0.2140\n",
      "Epoch 734/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2357 - val_loss: 0.2138\n",
      "Epoch 735/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2347 - val_loss: 0.2135\n",
      "Epoch 736/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2347 - val_loss: 0.2128\n",
      "Epoch 737/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2352 - val_loss: 0.2132\n",
      "Epoch 738/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2344 - val_loss: 0.2131\n",
      "Epoch 739/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2339 - val_loss: 0.2122\n",
      "Epoch 740/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2339 - val_loss: 0.2127\n",
      "Epoch 741/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2336 - val_loss: 0.2123\n",
      "Epoch 742/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2334 - val_loss: 0.2148\n",
      "Epoch 743/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2334 - val_loss: 0.2125\n",
      "Epoch 744/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2327 - val_loss: 0.2135\n",
      "Epoch 745/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2335 - val_loss: 0.2112\n",
      "Epoch 746/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2323 - val_loss: 0.2122\n",
      "Epoch 747/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2326 - val_loss: 0.2132\n",
      "Epoch 748/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2324 - val_loss: 0.2118\n",
      "Epoch 749/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.2315 - val_loss: 0.2121\n",
      "Epoch 750/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2317 - val_loss: 0.2112\n",
      "Epoch 751/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2316 - val_loss: 0.2137\n",
      "Epoch 752/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2319 - val_loss: 0.2107\n",
      "Epoch 753/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2310 - val_loss: 0.2111\n",
      "Epoch 754/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2307 - val_loss: 0.2111\n",
      "Epoch 755/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2307 - val_loss: 0.2100\n",
      "Epoch 756/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2311 - val_loss: 0.2103\n",
      "Epoch 757/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2299 - val_loss: 0.2109\n",
      "Epoch 758/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2301 - val_loss: 0.2102\n",
      "Epoch 759/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2304 - val_loss: 0.2131\n",
      "Epoch 760/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.2299 - val_loss: 0.2098\n",
      "Epoch 761/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2291 - val_loss: 0.2096\n",
      "Epoch 762/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2287 - val_loss: 0.2161\n",
      "Epoch 763/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2293 - val_loss: 0.2100\n",
      "Epoch 764/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2285 - val_loss: 0.2089\n",
      "Epoch 765/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2289 - val_loss: 0.2111\n",
      "Epoch 766/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2281 - val_loss: 0.2097\n",
      "Epoch 767/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2279 - val_loss: 0.2094\n",
      "Epoch 768/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.2276 - val_loss: 0.2096\n",
      "Epoch 769/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2277 - val_loss: 0.2086\n",
      "Epoch 770/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2278 - val_loss: 0.2081\n",
      "Epoch 771/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2273 - val_loss: 0.2084\n",
      "Epoch 772/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2268 - val_loss: 0.2086\n",
      "Epoch 773/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2270 - val_loss: 0.2085\n",
      "Epoch 774/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2262 - val_loss: 0.2091\n",
      "Epoch 775/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2268 - val_loss: 0.2080\n",
      "Epoch 776/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2260 - val_loss: 0.2082\n",
      "Epoch 777/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2264 - val_loss: 0.2117\n",
      "Epoch 778/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2260 - val_loss: 0.2076\n",
      "Epoch 779/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2254 - val_loss: 0.2075\n",
      "Epoch 780/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2251 - val_loss: 0.2071\n",
      "Epoch 781/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2251 - val_loss: 0.2071\n",
      "Epoch 782/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2249 - val_loss: 0.2078\n",
      "Epoch 783/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2245 - val_loss: 0.2068\n",
      "Epoch 784/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2244 - val_loss: 0.2067\n",
      "Epoch 785/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2242 - val_loss: 0.2065\n",
      "Epoch 786/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2240 - val_loss: 0.2073\n",
      "Epoch 787/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2234 - val_loss: 0.2081\n",
      "Epoch 788/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2239 - val_loss: 0.2122\n",
      "Epoch 789/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2238 - val_loss: 0.2099\n",
      "Epoch 790/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2233 - val_loss: 0.2091\n",
      "Epoch 791/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2227 - val_loss: 0.2058\n",
      "Epoch 792/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2225 - val_loss: 0.2056\n",
      "Epoch 793/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2226 - val_loss: 0.2067\n",
      "Epoch 794/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2221 - val_loss: 0.2073\n",
      "Epoch 795/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2224 - val_loss: 0.2057\n",
      "Epoch 796/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2226 - val_loss: 0.2057\n",
      "Epoch 797/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2215 - val_loss: 0.2075\n",
      "Epoch 798/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 0.2215 - val_loss: 0.2045\n",
      "Epoch 799/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.2215 - val_loss: 0.2056\n",
      "Epoch 800/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.2210 - val_loss: 0.2049\n"
     ]
    }
   ],
   "source": [
    "autoencoder.compile(loss='mean_squared_error',\n",
    "                    optimizer='sgd')\n",
    "\n",
    "cp = ModelCheckpoint(filepath=\"autoencoder_traffic.h5\",\n",
    "                               save_best_only=True,\n",
    "                               verbose=0)\n",
    "\n",
    "tb = TensorBoard(log_dir='./logs',\n",
    "                histogram_freq=0,\n",
    "                write_graph=True,\n",
    "                write_images=True)\n",
    "\n",
    "history = autoencoder.fit(x_train, x_train,\n",
    "                    epochs=nb_epoch,\n",
    "                    validation_data=(x_opt, x_opt),\n",
    "                    verbose=1,\n",
    "                    callbacks=[cp, tb]).history"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 376
    },
    "colab_type": "code",
    "id": "F6QtojGe9VGd",
    "outputId": "7c4f6e09-e84e-4ca9-d88a-347afab1d80b"
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAe8AAAFnCAYAAACPasF4AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzs3Xl83FW9//HXrNkm+96kTdJ03yht\n6UKrlNLSBRAQ0IrIpqIIKqBevXARlOXicpGfiF5B1Cu7CyqIbdnK0tJ9b7qkSdvsy0wyWSbJJJnM\n/P5IMm1pm6Ylk8lk3s/Hw4fMd2a+8zmdNu+c8z3fcww+n8+HiIiIhAxjsAsQERGRs6PwFhERCTEK\nbxERkRCj8BYREQkxCm8REZEQo/AWEREJMQpvkRA0fvx4vvWtb510/L777mP8+PFnfb777ruPJ598\nss/XvPrqq9x88839Pi4igaPwFglRBw8exOVy+R93dHSwZ8+eIFYkIoNF4S0SoubMmcNbb73lf7xu\n3TqmTp16wmtWrVrF5ZdfzrJly7jxxhspLS0FwOl0cuutt7Jo0SJuu+02mpub/e8pKirihhtuYOnS\npVxxxRVn9QtBQ0MD3/72t1m6dCkrVqzg6aef9j/3i1/8gqVLl7J06VJuvPFGampq+jwuIqen8BYJ\nUcuXL+df//qX//Ebb7zBsmXL/I8rKyu5//77eeqpp1i9ejULFy7khz/8IQDPPPMMiYmJvPvuu/zw\nhz9k3bp1AHi9Xu644w6uvPJK1qxZw4MPPsg3vvENPB5Pv2p6/PHHiY+PZ82aNbz44ou89NJLbN26\nlUOHDrF69Wr+9a9/sWbNGpYsWcKGDRtOe1xE+qbwFglRs2fP5tChQ9TV1dHW1saOHTuYN2+e//n1\n69czZ84ccnJyALjuuuvYtGkTHo+HrVu3snz5cgCys7OZPXs2AIcPH6auro5rr70WgJkzZ5KUlMSO\nHTv6VdP777/P9ddfD0BCQgJLlixh/fr1xMXFUV9fz+uvv05jYyNf+tKXuOqqq057XET6pvAWCVEm\nk4lLL72UVatWsXbtWhYsWIDZbPY/73Q6iYuL8z+OjY3F5/PhdDppbGwkNjbW/1zv65qamnC73Sxf\nvpxly5axbNky6urqaGho6FdN9fX1J3xmXFwcdXV1pKen8+STT/pHAG677TaqqqpOe1xE+qbwFglh\nK1asYM2aNaxevZoVK1ac8FxycvIJodvY2IjRaCQxMZG4uLgTrnPX19cDkJaWRkxMDKtXr/b/b926\ndSxZsqRf9aSkpJzwmQ0NDaSkpAAwd+5cnn76adavX09mZiY///nP+zwuIqen8BYJYeeffz61tbUc\nOnTIP/Tda/78+WzdupWysjIAXn75ZebPn4/ZbGb69Om8/fbbAJSWlrJt2zYAsrKyyMjIYPXq1UB3\nqN9zzz20trb2q56FCxfyyiuv+N/71ltvsXDhQtatW8ePfvQjvF4v0dHRTJgwAYPBcNrjItI385lf\nIiJDlcFgYMmSJbS1tWE0nvi7eEZGBg8//DDf+MY36OzsJDs7m4ceegiAr33ta9x9990sWrSI/Px8\nLr30Uv/5Hn/8cR588EGeeOIJjEYjt9xyC9HR0f2q56677uLBBx9k2bJlGI1GbrvtNqZNm0Z7eztv\nvPEGS5cuxWq1kpSUxKOPPkpaWtopj4tI3wzaz1tERCS0aNhcREQkxCi8RUREQozCW0REJMQovEVE\nREKMwltERCTEhMytYnZ785lfdBYSE6NxOvt37+pQp7YMTWrL0DNc2gFqy1A10G1JTY095fGw7Xmb\nzaZglzBg1JahSW0ZeoZLO0BtGaoGqy1hG94iIiKhSuEtIiISYhTeIiIiIUbhLSIiEmIU3iIiIiFG\n4S0iIhJiFN4iIiIhJmQWaRERERlsTz75Cw4e3E99fR1ut5sRI7KIi4vn0Ud/1uf7/v3v14mJsXHR\nRRcHpC6Ft4iIyGl885t3A91hfPhwMXfeeVe/3rdixRWBLEvhLSIicja2b9/Kyy8/T2trK3feeTc7\ndmzjvffewev1snjxIj7/+Zt49tnfkpCQQF5ePq+++mcMBiMlJUdYuPASbr31tk9cQ1iGd6OrnXd2\nVnLB2BTiYqzBLkdERPrhib/sYndx3YCec1p+Mnddd95Zv6+4uIiXXnoVq9XKjh3b+PWvf4fRaGTl\nyqu4/PJrTnjtvn0FvPji3/B6vVx33RUK73O1bk8Vf3v/MB3ufJbPzQl2OSIiEmLGjBmL1drd+YuM\njOTOO2/DZDLhdDppamo64bXjx08gMjJyQD8/LMO70+MFoL2zK8iViIhIf51LDzlQLBYLANXVVbzy\nygv8/vcvEB0dzS23fOGk15pMA79ZSVjeKmY0GgDw+nxBrkREREJZQ0MDiYmJREdHc/DgASoqKujs\n7Az454ZneBt6wtsb5EJERCSkjR07jqioaG6//VbeeedNVq5cyf/8z08C/rlhOWyunreIiJyN42/9\nmjFjFjNmzAK6h8Qff/xX/udSU2Ox25tPeG/vawHeeOOdAaknzHveCm8REQk9YRre3f+vnreIiISi\nsAxvQ096+3TNW0REQlBYhnfvsHmXet4iIhKCwjO8jbrmLSIioSssw9uk2eYiIhLCwvJWsZ5Rc3zq\neYuISB/OdUvQXlVVlTQ2NjBhwqQBrSssw9t/q5h63iIi0odz3RK019atm+nq8ii8B8KxRVqCXIiI\niISkX//6lxQU7MHr7eLaa7/AJZcsYcOG9fzpT7/DaDSTkpLCHXfcxR//+DssFitpaRlceOGCAfv8\n8AxvLdIiIhJyfr3r9xTUHRjQc05OnsA3zrv1rN6zfftWnM56nnrqGdrb3Xz5yzfyqU9dxN/+9gr3\n3Xcf2dljWLv2bSwWC0uXriAtLW1AgxvCNbw1YU1ERM7Rnj272LNnF3fe2b0vt9fbRX19HRdfvJj7\n77+fRYuWsmTJUhITkwJWQ3iGd+8Ka+p5i4iEjLPtIQeKxWLhM5+5muuvv/GE45dd9hkuv3wpf//7\nv/je977No4/+PGA1hOWtYr3D5up4i4jI2Zo0aQrr13+I1+vF7XbzxBPdIf2HPzxDREQEV111DQsX\nXkJJyRGMRiNdXV0DXkNAw7uwsJDFixfz/PPPn/TcRx99xLXXXsvnP/95nnrqqUCWcRKDhs1FROQc\nTZ8+gylTpvG1r93CN795GxMmTAQgNTWNm266iW9/+xuUlBzlggvmMGXKNP70pz/w9ttrBrSGgA2b\nt7a28tBDDzFv3rxTPv/www/z7LPPkp6ezg033MDSpUsZM2ZMoMo5gSasiYjI2Th+S1CA22//5kmv\nufzyK7nllhtO2BJ07twL+ec/Vw94PQHreVutVp555hnS0tJOeq6srIz4+HgyMzMxGo1cdNFFbNiw\nIVClnMTY02r1vEVEJBQFrOdtNpsxm099ervdTlLSsVl4SUlJlJWV9Xm+xMRozGbTgNSW5HT31Ggi\nNTV2QM4ZbMOlHaC2DFXDpS3DpR2gtgxVg9GWkJlt7nS2Dti5mpraAHC3e04Y3ghVqamxw6IdoLYM\nVcOlLcOlHaC2DFUD3ZbT/SIQlNnmaWlpOBwO/+OamppTDq8Hiu7zFhGRUBaU8M7OzsblclFeXo7H\n42Ht2rXMnz9/0D7ff6uYJqyJiEgICtiw+d69e/nJT35CRUUFZrOZNWvWsGjRIrKzs1myZAkPPvgg\n3/nOdwBYsWIFeXl5gSrlJAZNWBMRkRAWsPCeMmUKzz333Gmfv+CCC3jllVcC9fF9OnarWFA+XkRE\n5BMJ6xXW1PMWEZFQFJ7hrQlrIiISwsIzvLUxiYiIhLDwDG9/zzvIhYiIiJyD8Axv3SomIiIhLCzD\nW7uKiYhIKAvL8PZf81Z4i4hICArT8NaWoCIiErrCM7w1YU1EREJYeIa3et4iIhLCwjO8e3rePl3z\nFhGREBSe4a0JayIiEsLCMrwN2phERERCWFiGt9Y2FxGRUBae4a0JayIiEsLCM7x7Wq2et4iIhKLw\nDO/etc19CnAREQk9YRneBoMBs6m76V1dmrUmIiKhJSzDG8Bq6W56p0fhLSIioSVsw9ti7gnvLg2b\ni4hIaAnf8O4ZNveo5y0iIiEmfMPbYgKgU9e8RUQkxIRveJvV8xYRkdAU9uGtnreIiISasA1vq7ln\n2Fw9bxERCTFhG97+YXP1vEVEJMSEbXibFd4iIhKiwja8rWYt0iIiIqEpbMPbYtatYiIiEprCOLx7\nbxXTCmsiIhJawj681fMWEZFQE/bhrUVaREQk1IRxeHdf89ZscxERCTVhG96abS4iIqEqfMO7Z2OS\n9s6uIFciIiJydsI2vKMjzQC4OxTeIiISWhTeHZ4gVyIiInJ2wja8oyIsALS1q+ctIiKhJWzDWz1v\nEREJVWEf3up5i4hIqAnb8I6K6Alv9bxFRCTEhG14R0d2X/PWbHMREQk14RvePT1vd7t63iIiElrC\nNrwjrCaMBgMdHq9WWRMRkZAStuFtMBiIje4eOne1dQa5GhERkf4L2/AGiI22AtDU0hHkSkRERPov\nrMM7Lqa7593cqvAWEZHQEd7h3dvzVniLiEgICevw7h02b27VNW8REQkdYR3evcPmDa72IFciIiLS\nf2Ed3inxUQA4GtxBrkRERKT/wjq8UxO6w9ve0BbkSkRERPovzMM7EgB7Yxs+ny/I1YiIiPRPWIe3\nLcpCpNVEW3sXLW4tkyoiIqHBHMiTP/roo+zatQuDwcC9997LtGnT/M+98MILvPbaaxiNRqZMmcJ9\n990XyFJOyWAwkJoQRVmtC3tDG7Yoy6DXICIicrYC1vPevHkzJSUlvPLKKzzyyCM88sgj/udcLhfP\nPvssL7zwAi+99BLFxcXs3LkzUKX0Sde9RUQk1AQsvDds2MDixYsByM/Pp7GxEZfLBYDFYsFisdDa\n2orH46GtrY34+PhAldIn/3VvhbeIiISIgA2bOxwOJk+e7H+clJSE3W7HZrMRERHBHXfcweLFi4mI\niOCyyy4jLy+vz/MlJkZjNpsGtMbU1FjyRybC5jKcLZ2kpsYO6PkHUyjX/nFqy9A0XNoyXNoBastQ\nNRhtCeg17+MdP5vb5XLx29/+ltWrV2Oz2bjppps4cOAAEyZMOO37nc7WAa0nNTUWu72ZuMjuP4Li\n8gbs9uYB/YzB0tuW4UBtGZqGS1uGSztAbRmqBrotp/tFIGDD5mlpaTgcDv/j2tpaUlNTASguLmbk\nyJEkJSVhtVqZNWsWe/fuDVQpfcpKjQGgytGC16vbxUREZOgLWHjPnz+fNWvWAFBQUEBaWho2mw2A\nrKwsiouLcbu7Vzbbu3cvubm5gSqlTzGRFhJjI+jweLE36rq3iIgMfQEbNp8xYwaTJ09m5cqVGAwG\nHnjgAV599VViY2NZsmQJX/7yl7nxxhsxmUycf/75zJo1K1ClnFFWSgzO5nYq7C2kJ0YHrQ4REZH+\nCOg17+9+97snPD7+mvbKlStZuXJlID++37JSY9h7pJ4Ku4sZ41KDXY6IiEifwnqFtV5ZKd3D+RWO\nliBXIiIicmYKb45NWquwK7xFRGToU3gDI5JjMADV9a14urzBLkdERKRPCm8gwmoiLSmaLq9PvW8R\nERnyFN49cjO6b4Q/Ut0U5EpERET6pvDu0RveJdXDY5UfEREZvhTePXrD+2iVwltERIY2hXePUemx\nGIByu4tOT1ewyxERETkthXePqAgzGcndk9bKNWlNRESGMIX3cXIz4gA4WqVJayIiMnQpvI+Tm9l9\n3ftwpcJbRESGLoX3ccZmxwNQWN4Q5EpEREROT+F9nJFpNiKtJuwNbpzN7cEuR0RE5JQU3scxGY2M\nyerpfZep9y0iIkNTWIZ3WXMlD733/6hwVZ303LiRCYCGzkVEZOgKy/De7ShgT80Bdtr3nvRcb3gf\nUs9bRESGqLAMb2NPs7u8Jy/GkpcZi9lkoNzegqutc7BLExEROaOwDG+TsSe8fSeHt8Vs8l/33ne0\nflDrEhER6Y/wDG+DCTh1eANMzU8GYM/hukGrSUREpL/CO7y93lM+PzWvO7z3Hq7H5/MNWl0iIiL9\nEZ7h3cewOUBWagwJNiuNLR2U1boGszQREZEzCs/wPsOwucFgYMpoDZ2LiMjQFN7hfZphc4BpPeG9\n85BjUGoSERHprzAN795hc89pXzN1dDJWs5Hiyibqm9yDVZqIiMgZhWd4G80AdPlO3/OOsJr8s863\nHbQPSl0iIiL9EZ7hbTj9Ii3HmzU+DYCtB2sDXpOIiEh/hWd4G/uesNZrWn4yZpORovJG7TImIiJD\nRniGt3+2+emHzQGiIsxMHZ2ED9heqKFzEREZGsI0vPs3bA7Hhs437asJaE0iIiL9FZ7h3TNs7j3D\nsDnA+eNSiLCYKKpopLq+NdCliYiInFF4hvcZFmk5XqTVzAUTunvf6/ecvP+3iIjIYAvL8Db285p3\nr/lTMwD4aG81Xq/WOhcRkeAKy/A+m2veAONGJpCaEImzuZ19JdomVEREgis8w7vnmrenH8Pm0L3W\n+fypmQCs31MdsLpERET6IzzD27+2ef/CG2D+lEwMdN8y1uruDFBlIiIiZxaW4W0+i9nmvZLjI5mY\nm0inx8s69b5FRCSIwjK8+7tIy8ctmpENwLvby/H6NHFNRESCo1/hvXfvXtauXQvAL37xC2666Sa2\nbt0a0MIC6diuYv3veQNMH5NCclwEtc429h7WxDUREQmOfoX3ww8/TF5eHlu3bmXPnj3cf//9/PKX\nvwx0bQFjPIdr3gBGo4GLj+t9i4iIBEO/wjsiIoLc3FzeeecdPve5zzFmzBiMxtAdcT+2McnZDZsD\nfGpaJmaTkT3FddQ4teKaiIgMvn4lcFtbG6tWreLtt99mwYIFNDQ00NTUFOjaAub4YXPfWV67jo22\nMndSOj7g3W0VAahORESkb/0K73vuuYfXX3+du+++G5vNxnPPPcfNN98c4NICx2gwYjGaAej0nv1t\nX5fM7B46/2B3JS26bUxERAaZuT8vmjt3LlOmTMFms+FwOJg3bx4zZswIdG0BFWmOoLPDQ3tXB1aT\n9azem5MRy8ScRPaXOPnb+4e5cen4AFUpIiJysn71vB966CFWrVpFQ0MDK1eu5Pnnn+fBBx8McGmB\nFWGOAKCjq+Oc3n/94rGYjAbe21HB0erQvYQgIiKhp1/hvW/fPq677jpWrVrF1VdfzRNPPEFJSUmg\nawuoCHN3b7v9HMM7K9XmHz5ftbF0wOoSERE5k36Fd++krvfee49FixYB0NFxbqE3VESaunve5xre\nAItnZWMxG9lyoJbCsoaBKk1ERKRP/QrvvLw8VqxYQUtLCxMnTuQf//gH8fHxga4toCItn2zYHCAl\nPoqls0cB8MaG0B6JEBGR0NGvCWsPP/wwhYWF5OfnAzBmzBh++tOfBrSwQIvomaTW4f1kIwiLZ2bz\n9tYy9hyuY2eRg+ljUgaiPBERkdPqV8/b7Xbz7rvv8q1vfYvbb7+d9evXY7We3QztoaZ3wtonGTYH\niIuxctWCPABefKuQjs6zW7VNRETkbPUrvO+//35cLhcrV67kc5/7HA6Hg//6r/8KdG0BdWzCWvsn\nPteimdlkpcbgaHTz/s7KT3w+ERGRvvRr2NzhcPD444/7H1988cV86UtfClhRgyHSf6vYJ19kxWwy\n8tlPj+bJv+3hXxuOMndyOrHRoT0yISIiQ1e/l0dta2vzP25tbaW9/ZP3WIMpcoCGzXtNH5PChFEJ\nNLd28tI7hwbknCIiIqfSr5735z//eZYvX86UKVMAKCgo4Nvf/nZACwu0KHMkAG6Pe0DOZzAYuHn5\nBH747GY2FtQwZ2I652nymoiIBEC/et7XXnstL730EldddRVXX301L7/8MkVFRYGuLaBiI2wAtHS2\nDNg50xKjufrTowH405qDtLo9A3ZuERGRXv3qeQNkZmaSmZnpf7x79+4zvufRRx9l165dGAwG7r33\nXqZNm+Z/rqqqinvuuYfOzk4mTZrEj3/847Ms/ZOJ6wlvV+fAbuu5ZNZINu+v5UhVE395r4iblk0Y\n0POLiIic86bcZ9pKc/PmzZSUlPDKK6/wyCOP8Mgjj5zw/GOPPcatt97KX//6V0wmE5WVgztL22aN\nAcA1gD1vAKPRwK0rJmAyGnh/ZyX7S5wDen4REZFzDm+DwdDn8xs2bGDx4sUA5Ofn09jYiMvlAsDr\n9bJt2zb/UqsPPPAAI0aMONdSzklcAIbNe2Wl2rjiwlwAnn6tAHtDW99vEBEROQt9DptfdNFFpwxp\nn8+H09l3j9LhcDB58mT/46SkJOx2Ozabjfr6emJiYvjv//5vCgoKmDVrFt/5znf6PF9iYjRms6nP\n15yNRnf3yEGrp5XU1NgBO2+vG6+YwqHKJgoO1/HGxlK+96VZA/4ZxwtEG4JFbRmahktbhks7QG0Z\nqgajLX2G94svvjhgH3T8MLvP56OmpoYbb7yRrKwsbrvtNt577z0WLlx42vc7nQN7bTopuWfYvKOV\n6poGTMaB+8Wg101Lx3Hv0xv5YGcFeRk2LpqeNeCfAd1/Uez25oCce7CpLUPTcGnLcGkHqC1D1UC3\n5XS/CPQZ3llZ5x42aWlpOBwO/+Pa2lpSU1MBSExMZMSIEYwa1b2px7x58zh06FCf4T3QTEYTcdZY\nmjqaaepoJjEyYcA/IyU+ihuXTuD3/97Pi28fIjvNRv6I0N7QRUREgu+cr3mfyfz581mzZg3QfV94\nWloaNlv3dWaz2czIkSM5evSo//m8vLxAlXJavYFd7w7cdp4LpmUyf2oGnR4vv3hlF/VNA3NfuYiI\nhK9+3yp2tmbMmMHkyZNZuXIlBoOBBx54gFdffZXY2FiWLFnCvffeyw9+8AN8Ph/jxo3zT14bTEkR\nCZRQRr3bST65AfucG5eOp76pnf0lTn764g4evPUCIq0B+6MXEZFhLqAJ8t3vfveExxMmHLvnOScn\nh5deeimQH39GSZGJANS7A3s7l8Vs4mtXTuZnL+6gwtHCn1Yf5NbLJmI2BWzgQ0REhrGwTo+UqGQA\nalsdZ3jlJxcXbeXLl0/EaDCwcV8Nf1p9MOCfKSIiw1NYh/cIWwYAlS3Vg/J5uRlxfPOaqVjMRtbt\nqeI3/9h7xsVuREREPi68wzsmHYCqlhq8Pu+gfOZ5Y1K4ZUX35YMtB2p5bf3RQflcEREZPsI6vKMt\n0SRExNPp7cTRVj9onzt3Uga3rpgIwD/XHeGHz27C61UPXERE+ieswxsg09/7Hpyh814LpmWyYm4O\nAOX2Fu5/dhMdnV2DWoOIiISmsA/vETHd170rXFWD/tmfvWg0M8d3L1xTVdfK//6zQNfARUTkjMI+\nvEfGdq8iV9pcPuifbTQYuP2qKXxxyTisZiM7ixw8/spOymtdg16LiIiEjrAP79y47iVajzSWBqXX\nazQYuGRmNjf3TGIrOOrk6dcLaHF3DnotIiISGsI+vFOikrBZYnB1tlAX4MVa+jJ3Ugb/deMszCYj\n5fYWvvnEh7y9tSxo9YiIyNAV9uFtMBjIjRsJwNHGkqDWMnpEHI9+dQ45Gd27yLz49iH+tPoA7R2a\nyCYiIseEfXjDsaHzo03B7+mmJETxwM0XsGx2d03v7azk9sffx97QFuTKRERkqFB4A7nxveFdGuRK\njvncojH+e8EBfvzHLWw9UBvEikREZKjQ1lZATmz3sHlZcwWdXg8W49D4Y1kwLZMur5f/W32QFreH\nX/9jL2aTgduvnML541KDXZ6IiASJet5AtCWKETEZeHxdHAnyde+Pu2h6Fk9/byFLZnX/guHp8vHk\nq3vYol64iEjYUnj3GJ80BoCDzqIgV3Iys8nIFxaP5YoLc/3HfvOPvfzh3/tpau0IXmEiIhIUCu8e\n4xN7wrt+6IV3r6s/PZoHb7mAC6d0rwr34e4q7vrlOv78diGdHs1IFxEJFwrvHmMSRmM0GClpLqPN\n4w52Oac1Kj2Wr1w+iUdvm0t6YhQAz63az2MvbKet3RPk6kREZDAovHtEmSPJiR2J1+elqOFwsMs5\no4ykaH7wxRlMHZ0MwJGqZu5+ch3PvL5Pq7OJiAxzCu/j9F733l9/KMiV9E+8LYK7P3ceT//nYnIz\nYunweNlQUM03n/iQd7eXa5tREZFhSuF9nCnJ3euL77aH1u5emSkx3H/TLH5062z/UPrzbxbylZ+u\n5YNdlUGuTkREBprC+zg5cSOJt8bibG+grLki2OWcFYPBwMg0G9/7wvnMn5KB1dz91f5x1QFufexd\njlQ1BblCEREZKArv4xgNRs5LnQLALvveIFdzbpLiIvny5ZN48NbZRFhN/uMP/d9WXnizUJPaRESG\nAYX3x/SG984QDe9eGUnR/Oaei7jrumn+Y+9sL+eOX3zAc2sO0qpJbSIiIUvh/TFjE0YTY46murWW\nCldVsMv5xKblp/Ds9y/meyunE9nTE1+7o4I7n/iQR57bSqWjJcgViojI2VJ4f4zJaGJm+nQA1ldu\nDnI1A8NgMDAxN4mf3n4hn/30aP/x4oomHvj9Zg6WOvGG0AQ9EZFwp/A+hfkjZgOwuXo7HV3DZ3jZ\nFmXh8gtz+cEXZ/iPdXl9/OTFHXzlJ2tZu708pGbZi4iEK4X3KWTHjmBUbDZtnjZ22vcEu5wBN25k\nAr/97kIevOUCpuUn+4fTn3uzkF/8ZRf7j9bT5fUGuUoRETkdhfdp9Pa+3ytfPyx7oxazkVHpsdx1\n3Xl8//oZmE0GAPYerudnL+/ksRe242hoG5ZtFxEJdQrv05idMYMYSzQlTWUUNRwJdjkBlZMRy9Pf\nu5jHvj6PBVMzsUVZKK5o4j/+dwNf/sladhfX6Zq4iMgQovA+DavJykVZFwLwdul7wS1mkKQlRHHr\nZRP5zxtmkNazUhvAE3/ZxT1PrqO81hXE6kREpJfCuw+fzr4Qi9HC3roDlDaXB7ucQZOZHMNjX5vH\n3Z87z3+sqbWTH/5+M7c+9i47ixxBrE5ERBTefYi12vhU1lwA/lm0KsjVDL6po5P51V2f4t4bZpIY\nG+E//su/7uapv+/RkqsiIkGi8D6DZbmXEGWO4oDzEPvrCoNdzqCLjrQwJjueH906my8uGYeh5/i2\ng3Ye+r+t3PGL91m7owJPl2bZvfN1AAAgAElEQVSni4gMFoX3GcRYolmaczEAfy9+A68vPEPKFmXh\nkpnZPPuDRTz05dnMm5wBQFt7F8+tOchtP3uPP68tCnKVIiLhQeHdDxdlzycxIoEKVxXvl38U7HKC\nLivVxlevmMS3rpl2wvHVm0q575mNrN1erp64iEgAKbz7wWqycN24KwF4rXgVjrb6IFc0NEwfm8Lv\n/uNivnr5JP+xqrpWnnuzkP/87Qae/Ntu6hrdQaxQRGR4Mge7gFBxXupkZqadx7baXbx44K98c/pX\nMRgMZ37jMGc0Gpg3JYN5UzJoa/fw5pYy/r2xhLqmduqa2tl31MniWdksnzOK6EhLsMsVERkW1PM+\nC9eNu5IYSzQHnUW8V74+2OUMOVERZq5ckMejX53LgqmZALR3dvHGhhLufOJD7v7VOg6WOoNcpYhI\n6FN4n4VYq43rx18DwF8PvcaTO57hQP2hIFc19CTHR3LrZRP59T2f5pqLju1i1ujq4Ccv7uD3/97P\n0eomXRcXETlHCu+zND1tKhdld6+8dsB5iL8cei3IFQ1dkVYzl83L5envLeTKBXn+4+t2V/HjP27l\ntp+9x78+OqoQFxE5S7rmfQ6uHnM5ZqOZd0o/oLqlhuqWWjJi0oJd1pBlNhm5ckEeS2ePZHuhnT2H\n69m0rwaAVz84zKsfHGblojFcPCMryJWKiIQG9bzPgcVo5rNjLmdK8gQAfrP7DzR3uHB73HR5u4Jc\n3dAVaTVz4ZRMbrtiEj/44gymj0nxL/ry8rtFfO3n7/OXdwppaunA69VGKCIip6Oe9yfwhQnX8Otd\nv6fCVcUP1v0YgPNTpzIzfTrJkYmMissOcoVDk8FgYNzIBMaNTKC6vpU/v1vkXy/9T//ez5+AtMQo\nvn7lZEalx2LUrH4RkRMYfCGyYbPd3jyg50tNjR2Qcza2N/Gzrb/C2d5w0nNPLfrpJz5/fwxUW4LJ\n0djGhoIaNhRUU13X6j+eEh/J5RfmMm9yOhazKYgVnr3h8L30Gi5tGS7tALVlqBrotqSmxp7yuIbN\nP6H4iDi+f8G3iLOe/Afs8XqCUFFoSomP4ooLc/n1f1zCI1+dw/lju4fUHY1u/rjqAF/7+ft856n1\nlFQPj3/gIiKfhMJ7AMRabfxo3g8YHZ97wvE6t+5pPlsWs5HM5Bi+ec00fvOdi5g+JsX/nLO5nR/9\ncQv3PbOR19YdocurWeoiEp4U3gPEarJw5/SvMC4h33/sxxt/xtul7wexqtBmtZj41rXTeOSrc7j6\n06Mxm7qvfVfVtfKPdUd44i+7Wb+nivZOTRIUkfCiCWsDKMJk5c7pX+Hlg3/no6rNAPy96A0uzLyA\naEt0kKsLXZnJMVxxYQxXXJhLaU0zL75VSGF5IwVH6ik4Us+rHxxmcl4ScyalMyknUcvWisiwp/Ae\nYCajiesnXENadAr/KP43AC8c+Bs3TLyWKHNUkKsLfaPSY/nBDTOpb3Kz9aCdtTsqqKlvZd3uKtbt\nriI7NYZlc0Zx/thUIqwmzVQXkWFJ4R0ABoOBJTkLyU/I48kdT7PTvoeSpjK+OPFa4q1xNHU0Mzo+\nB6vJGuxSQ1ZSXCSXXjCSi88fQWFZI9sK7Xy0p4pyewu/+9d+YD8ZSdFcc9FoJuclEWnVX3URGT50\nq1iAVbfU8qf9r1DSVHbC8dy4UXx35h0DMsSr2yy6dXq8fLS3in99VEJd07GtSGMizSydPYrZE9NI\nSxy8yxf6Xoae4dIOUFuGqsG6Vcz04IMPPjhgnxJAra0dA3q+mJiIAT/nqdisMczNmIXJYKawodh/\nvKG9EbPRzJiEvD7e3T+D1ZbB8EnaYjIayM2IY+H0EURHWrBajLS2e3C1dbK/xMnb28rZdrCWxpYO\nGls6yE61DXD1J9L3MvQMl3aA2jJUDXRbYmIiTnlcY4mDwGQ0sTzvEmamn8eze5+n3FUJwGuHV1Ph\nqmJcYj7np00jRpPaBoTVYmLZnFEsmzMKR0MbH+6u4lB5AwdKGyi3t1BubwGgwuFi/KhELCYj40Ym\nBLlqEZH+07B5ELg6Wnh8+2+oaa31H0uLTuG+2fdgNp7971Macjozr8/H5n01vP7RUaqOW8ENwGCA\nLy4Zx4jkGCbkJA7YZ+p7GXqGSztAbRmqBmvYPKA970cffZRdu3ZhMBi49957mTZt2kmv+Z//+R92\n7tzJc889F8hShhSbNYbvX/AtfrPr9xxqOAxAbauDfxavYnnuYqItmpU+0IwGA3MnZzB3cga1DW28\nv7OCbQfs1Da04fPB828WApCVGsOKuTnM6JmtLiIyFAUsvDdv3kxJSQmvvPIKxcXF3Hvvvbzyyisn\nvKaoqIgtW7ZgsVgCVcaQFWGycteMr1PSVMbasnVsqdnBu2UfsqFqK4tHXcQlIz+FxWThuX1/pqjh\nMD+YfRdR5shglz0spCVEcd3CMVxzUT5HqppYu72Cj/ZWA1Bhb+GZ1/cBMDLNxlcvn0R2WmCvjYuI\nnK2AhfeGDRtYvHgxAPn5+TQ2NuJyubDZjv0gfOyxx7j77rv51a9+FagyhrycuJHcOOnzjEnIY1P1\ndg43HuX1w6v5oHw9yVFJHG4sAWCPYx+zM2YEudrhxWgwkD8invwR8Xz5sonsPORgQ0E1Ww/aASir\ndfHD328mJz2W88YkM35UIpnJ0STYTj2BRERksAQsvB0OB5MnT/Y/TkpKwm63+8P71VdfZfbs2WRl\nZQWqhJBhNBhZkDWX+SPmcNBZxF8K/0l1ay2NHceum/zfvpcZmzAamyWGVo+b+Iju6yCdXZ3UtTrR\n3MNPxmAwcP64VM4fl4qrrZO3tpSxalMpni4vJTXNlNQ0w/qjANy8fAITRiWQGBsRcjudicjwMGg/\n8Y+fF9fQ0MCrr77KH/7wB2pqavr1/sTEaMwD/IPydBMBgiktbQbzx01na8Vufr7+tyc89/92/i9m\no5kqVy3/seDrzMo6j1989Ds2lG3j50v/i1EJw+MXoWB/L6nAbaOSuO2a83C3e3jtw8O8vaWUKkf3\nLPU/rjoAQG5mHN/94kxGZcSe9n79YLdlIA2XtgyXdoDaMlQNRlsCNtv8ySefJDU1lZUrVwJwySWX\n8M9//hObzcbq1av55S9/ic1mo6Ojg9LSUq699lruvffe055vOM02PxtHGkv426F/caSp5KTnlucu\nZtXRtwFYkbuYy0ZfOtjlDbih/L00utp5/M+7KKt1nXA8wmoiLtrCZfNymTU+jejI7t+Jh3JbztZw\nactwaQeoLUNVyM82nz9/Pk8++SQrV66koKCAtLQ0/5D5smXLWLZsGQDl5eX853/+Z5/BHc7y4nP4\n7qw7cLobeOPIW2yu3k6Xr3sXrd7gBk4YYt9fV0hSZALpMWmDXu9wFm+L4Ee3zqbF3cmBEifv76rk\ncEUTre0e7B1d/HHVAf646gDT8pO5fF7usOpJiMjQErDwnjFjBpMnT2blypUYDAYeeOABXn31VWJj\nY1myZEmgPnbYSoxM4IaJ13HDxOs42lTKtppdrC1bh4/ugZP1lZtYX7nJ//qEiHhunvQFxiTkaZet\nARYTaWHm+DRmjk/D1dbJkaomVm8qZX9J9/7tu4vr2F1ch+GFbUwdncyCqZnMmqBfpERk4GiRlhBm\nb63jaFMpLx38G+1dp16O76ZJK0Nqlnoofy8VjhYOVzSyq7iO7YX2k56/9IKRLJ6VTXJc9y1/ofRL\nVSh/L8cbLu0AtWWoCvlhcwm81OhkUqOTWT7lU2wuLiDaHEWUOYoXD/yVvXX7ge5Z6m0eNwtGzMFk\n1MzoQMpKiSErJYZPnTeC0ppmNuyvxeFsZVvPrWdvbinjzS3dG9SMHhHHVZ/KY2JOIiajMZhli0gI\n0sYkw0BMTAQRXVHYrDFEmiM4L3UybZ42SprLASioO8Cqo++wrWYXyZGJxEfEs6VmB/Y2B5kx6UGu\n/kTD5XuJt0Vw8QU5TB6VwKzxqRgNBmKizNQ3ufH6wNnczoaCGgqO1NPp8eL1+kiMjRiyvfHh8r0M\nl3aA2jJUaWMSOWdWk5XPj7+az469gj2Ofbxx+E2qW2upaa3lN7v/cMJrxy74ITGWaIobjpAXn3NO\na6sPVR1dnVS4qsiJy8ZoCF7vNivVxhcvHQdArbOVf28s5VB5A1V1rRyubOJwZRMA8TYry2aPYlp+\nMpnJMUGrV0SGPl3zHgbO1Bafz0dNay1ry9ezq3YvzZ3HbnWKtdpo7uh+vDRnEZ/JX3bGz2tob8Te\n6mBsYv4nL/5jBvJ7+c2uP7C3bj+fH3c1n86eNyDnPBtnakuru5MNBTW8vbWMGmfbCc9lpcaQmx7L\nBRPTGZsdT4TVhDGIvfLh8u9luLQD1JahSte8ZcAYDAYyYtL5wvjPsnLc1TR2NPH8/r+wv77QH9wA\na0re5UhTKV8YfzUAdW4n4xPHnNRr/emWX9LY0cxd53+dsYmjB7UtZ6P3uv/m6u1BCe8ziY60cMnM\nbC6ZmU19k5sDpU4KjtSzq6iOCnsLFfYW1vesuQ5w2xWTmJaf4r+PXETCl34KhBmDwUBCRDx3Tv8K\nrZ2t7HHs5+3S96ls6Q6JQmcRP9r4M//rUyKT+O6sO4m1HluTvvee8oPOIsYmjqbN00Ztq4OcuJGD\n25hhJCkukgunZHLhlEw8XV72HXWyaV81GwqOrUD49Ov7MBkNzJ+awYScRM7LTwl6j1xEgkPhHcai\nLdHMyZzJnMyZuDpaOOg8xMbqbRTWF+HpWQjG4a7n/o8eZYQtk2hz1AkB7WxvoMvbxTN7nuOgs4hv\nTv8qE5LGBqs5pxVq2WY2GZmWn8y0/GQ+Mz+PzftrKK5soqWtk+LKJj7YVcUHu6oAiLCY+MLisYwf\nmUB6UnSQKxeRwaLwFqB7j/GZ6dOZmT6d1s5Wyl1V7K3bzzulH9Dp9VDS1H2L0/76Qv97NlZtxYCB\ng84iALbU7BiS4R3K0pOiuWJ+nv9xUUUj7++soLCsAXuDm/bOLv9a61ERZi6cnMGCaZmMTLNhNIbY\nby0i0m8KbzlJtCWacYn5jEvM56r8Fey07+XPB/9xwkS3Xhuqtvj/u66tni5vl+4nD6AxWfGMyYoH\noKquhbXbKygsb6C0xkVbu4d3tpfzzvZyIq0mslNtTMxJ5FPnZZISHxXkykVkICm8pU9Gg5EZadOY\nkTaNLm8XG6u2MjZxNH8oeInSnvvIex1qOMyvd/2eO6d/ZcjerzycZCbHcP2S7lvQSqqbcbk72Xqg\nln1H67E3uCmqaKSoopHXPzrKuOx4fEBaYhS3rJio6+QiIU7hLf1mMpqYnzUHgK9Pu4XN1dvYX1+I\nq7OFClf3NdgDzkPcufb7xFpsLM65iLmZs7BZdM9yoOVkdN9OMjk3CYDahja2Hayl4Eg9+446KSxv\nBOBQeSNbD9jJSbexYNoIJuUmktSzXKuIhA6Ft5yT+IhYluQsZEnOQqB7J7Nf7fqd//nmThd/L3qD\nvxe9gdVoIT4ijtToFK4dcwVp0al4fV7+uO8lTAYTN01a2e+euquzhWhz1FkuuhJ+vcy0hCiWz8lh\n+Zwc6pvcrNtTxfZCO6U1Lto7uygsb/QHel5mHDPGpTAiJYax2QnYoixBrl5EzkThLQNiYvI4nlr0\nU1ydLWyq2sbasnV0ejtxdbbQ4e3E3laHva2O/22to9Prwdne4H/vlfnLSYxMOONnlDVX8tiWJ7gw\n8wK+OPG6QDZnWEmKi+Qz8/P4zPw8Ojq7qGtys+OQg+KKRgqO1nOkqokjVd2rvJlNBpLjIlk0I5uZ\n41NJioskRNZxEgkrCm8ZUDZLDJeM+jSXjPo0Xp+X8uZKMMD6ik2sq9xEbZvjpPf810ePMi1lMl+e\n8sU+z/1B+XoAPqraovA+R1aLiczkGP/yq+2dXew9XM+OQ3bK7S5Ka1zUONt46Z1DvPTOIf/7xo1K\n4OZlE8jQ7WgiQ4LCWwLGaDAyKi4bgFETslk4cgEbq7ayvnITbR73Ca/d7SjgyZ3PMCVzHO42D+nR\nqYxLzCchIt7/Gq96gAMuwmJi5vhUZo5PBaDG2cr7Oys5WtXE4aomOjq9ABSWNnDv0xsZkxXPyDQb\n549LwWQ0MjLNpmF2kSBQeMugyYxJ5+oxl3FV/goa2ruvt752eDUVrioqXFUUNRyhqOGI//WRpki+\nOvVLpEenYjKa6OpZOAa612s3GAxUt9RyqKGYBSPmaob7AEhPjOZzF48BoKOzi3J7C29uKaXe1cHR\nyib/DPa1Oyr875k7OZ0rF+SRnhhNp8eLyWTQbHaRAFN4y6AzGAz+a9w3TVoJQGN7EwV1BylrK+WD\nkk0AuLvcPLnzmVOeo6WzFZs1hoc2/RwAq9HKnMyZ/ud1nfaTs1pMjB4Rx9evnEJqaiwlZfUUVzZR\nXNHIO9vKaXF7ANhYUMPGghpiIs20uD2kJUaxcHoWC6ZlqlcuEiAKbxkS4iPiuHDEBaSmLuLSrEtw\ntNXzXvk6ypsrcbjrT3r9Cwf+Sl7cKP/jP+1/hY3V27h18vXEWm0n9NKP/285d9GRFqaOTmbq6GRW\nzM2hwtFCYVkDpTXNbC90+MO81tnGn9cW8df3ikmOj2DB1Ewm5iQRE2XWVqciA0ThLUNOYmQCiZEJ\n/h3L6t1ONlRuwevzUtx4lEMNh9ntKGC3o+CE9xU6i/hH0b+5fsI1eLwe//Hj/1sGhtViIi8zjrzM\nOKB7pMPZ3E6ts41D5Q3sL3FyoLR7Cde/f3iEv3/YfTkkJtJMWmIU08ekMH5UImOy4zXELnIOFN4y\n5CVFJnLZ6EuB7pAodBZz0FnEHsc+/25ovTZWb+Vw41HmjbjAf8ztaed/d/+RpMhEPjfuykGtPVwY\nDAaS4iJJiotkQk4iV8zPo9LRwoFSJyXVzewvceJodNPi9nCkqpkjVc3AERJjI0hPjGJafgozxqeS\nGh+puQsi/WDwhcjFwYHeqF2bvw9NZ9MWn89Hm6eN4sajALxZ8h6lzeV99rRz4kaSbcvkC+Ov8YdE\ne1cHPp+XSPPArjQWrt/L6VQ6WiitaWbP4XqOVjdRVdd60musZiMTchKZlJtEq7uTnIxYpuQlYTEP\nzHr5+k6GJrWl7/OdinreErIMBgPRlmimpkwCYGrKJNyedrbU7GC3o4AjjSUn3ZJW0lRGSVMZ5a4q\n0qJSsFlj2GUvwOvzcv+c7wx4gMsxI1JiGJESw9zJGQA0t3ZQ6WjB0ejmw12VFJY30uHxsru4jt3F\ndSe89+Lzszh/bAqj0mOJjbaody5hTz3vYUBtOT17ax2FziLauty8V7b+hJXdPi49OhUfPpbnLiY9\nOpXn9/+FZbmLmJk+/Zw+W9/L2fF0ealvcrN5fy27D9dRYXfR1n7yZMOoCDPTx6SQlxlLWmIUORlx\nxMdY+/UZ+k6GJrWl7/OdisJ7GFBb+s/j9XCksYRDDYc50lTKvrqDZ3zPpOTxXJa3BFdHC1NSJvqP\nv136Pu+Xf0SUOZJ5mRdw8cgFJ7zv423p8nZhMBjOcl32oSFYf8dcbZ1s2ldDXaObQ+UNVNa10tZ+\n4mURo8HAmKw4RqTayM2IZVJOIgmxEf7njt/XXP9Whia1pe/znYqGzSWsmI1mxibmMzYxH+je6MTr\n8+L1edlctZ01JWtxd5041L6v7qA/5BMjEjg/bSrnpU7h70Vv+F/z10OvsTB7/mmHcz1eD49sfpw4\nayx3z7g9QK0bfmxRFi6Zme1/7PP5qHG28eGuSpyudqrqWimtaT5ho5XjxdusXL94HONHJhDXz965\nSChQeEtYO3670ktzL+bS3ItxtNVT6CymuPEIVS01lDSV+V/jbG/g3bIPebfsw5PO9bOtvyIzJp0L\nMs5nQtLYE56raqmlttVBbasDt8eta+vnyGAwkJEUzXU9q8AB1De5OVzZRElNM0XljRyuaqLT072s\na6Org9/8Yy8GIG9EHDFRVmIiTZyXn8LY7HhthyohS+Et8jEpUUmkRCVxYc/tZm5POyVNZVS31lLe\nXElbl5sdtbtPel9JcxklzWVsqt7G6PhcxqXlMiIiixExGdS01PhfZ2+rZ2TsCLbX7qa21cGns+YR\nbYkatPYNN723qM2akAZ0987tDW3sKq7jQImTsloXjsbugO+1saD7+4iLtpCdZiMvM47JuUnkZcYR\nYR2Yme0igaRr3sOA2hIcdW1OWjwteLxd7LLv5e3S9/v1vixbJteOvYL/t+NpAOZlXsBV+SuwWfte\nfazN4+b5/X9hbuZM/wz7wRJK38uptLg72X24lgZfHYX7vTga26lytJ602Y3FbGT8yAQ6PN7uIfsZ\nWWSmxJBgiwhS5acX6t/J8dSWvs93KgrvYUBtGRp6N0spbjjK4cajlLsr2Fqxq9/vjzRFkBqVzNVj\nLmdUXBYRpgh8Ph8mY3dP8J/Fq3izZC0ATy36aUDacDqh/L30+r99L7O5ejuX513K8rzFeLq8VNhb\nqHG2squojtKaZiocLad8b4LNSm5GHLkZseRkxDIyzUZibERQb1kbDt9JL7Wl7/OdiobNRQZI7w/y\n/IRc8hNy/f+IO7o6qWm1kxyZwKbq7ex17Keo4TAjY7OobrXT5mkDwN3VTpmrkl/ufNp/zhhzNCsn\nfJa0qBTq2k5e4136b3P1dgDWVW5ied5izCYjOT1hPHtiOgDO5nYOljqpru8O9C6vl7omNw2uDnYW\nOdhZdGw/+qS4CCblJpGVEoPZZGRKXhLp2u9cBonCWyTArCYLI2NHAHDxyAVcPHKBv5fe1NHMWz0r\nw42KzT5pIlyLp5Vn9z5/0jkf2PATJiaNY2H2fP5Q8CLTUidzWd4SmjtcVLiqGJeYjwGDFjM5S4mx\nEf5FZK76VPfa+l6fj1pnG0ermzha1UxJdTPldhf1Te2s2111wvtHpMRgMRtJiYvkkpnZJMdHkhgb\ngdkUercHytCm8BYJgt5QjbPGcs3YK/zHP5O/HHurg9SoZHbY97CxaisdXZ3Y2xy4Oo8N6Tra6viw\nYgMfVmwAoNxVyVsl79Hp7fS/ZmLSOGamT+dIYwmX5iwkJSqZrTU7iTZHMSl5/CC1NPQZe2a4ZyRF\nM3dSd7B7fT4KjtRT5WjhSHUzrrZOiioaqewZdi+pbmZboR2AqAgTkVYzMZEWrpifS1JcBLkZsfh8\nKNTlnCm8RYYQi9HMCFt3QMzOmMHsjBlA9wIvZa4KXB0tNLY3YTaa+WfxKho7js2gPj64AfbXF7K/\nvhCA9ZWbSIiIp6G9+17oL038HOMTx5AYmYDH62FLzU7cHjdzMmZq5ns/GA0G//aovdo7uzha1UR7\np5ddxQ4OlDhpa/fQ4Oqgrb0LZ3M7v/nHXgBMRgNer4/zxqQwKt3GxJxE0pOiiY+xarRE+kXhLRIC\nTEYTucftXw5wQcb59M43dbY3Uugspq6tji6fl7dK3zvpHL3BDfDc/j8D3b8g7Lbv8y9MU9RwmK9M\n+RIGgwGvz8vWmp2MiMkgOWXsSeeTE0VYTIwflQjAtPzuUPf5fFTYW7qvoRc7qHG24WxyU9fUDuC/\njv7a+qNA99KvyXERjB+ZyNiR8fh8kJsZS3pi97X0o9VNpCdGExWhH93hTn8DREKU0WCEnk5a773p\nva4as4JOrwezwURDeyNvHHkLj9eDDx9ba3b6X9c7iavXTvteHt78OPnxuRyoL6TO7QRgRsVULh+5\nlNToFLbX7CIvPofkqCTauzqwGrVRyOkYDAay02xkp9n896F7fT7qm9w0ujrYXmin1tlGdX0r9c3t\ntLV7KLd7KLe38M728p5zQG5GHLXOVlrcHqaMTuLrn5kczGbJEKBbxYYBtWVoGqpt6V0O9nBjCTWt\ntdS1OSlsKKbB3YjH56Gl8+StOk8l0hR5wlKyczNn8dkxlxNjGZozru949z8ASIiI55H59wW5mpP5\nfD4aXN07rR0qb6C0xoWzuZ2SmlP/HUpLiiYrOZrUhCgiLCbGZMeTnhRNWkLoXfYYqv9WzoVuFROR\ngDAajBgNRsYl5jOuZ433Xl3eLg44D1Hb6qDe7cRmicFmieGd8vepaXGc8NqPrwG/sWorG6u2Mit9\nOk0dLuKsNiLNkRxyFlPT2j15a3rqVL4y5Qb11E/BYDCQGBtBYmwEk/OOjaLUNbqpa3JTVdfC7uI6\n6pvbqXS0UFvfSm39yb9oJdispMRHkZMRS2ZPuOdmxBIVYdYEuWFE4S0ifiajicnJE5icfOLxq6Yv\npqSyhgpXFQ3tTTR1NOPDR0tnK2+VvIePYwN4xw/Lf9xO+x6+v+5HZESnkRKVTIwlmtkZM8m2ZfLv\no2/j6nBx3bgrQ3LntUBJjo8kOT6ScSMTuGh6FgBdXi8dPgO7DtRQ1+Smuq6VcnsL5XYXDa4OGlwd\nFFWcvFFLRlI0WakxjM2KZ+zIBDKSoun0ePFBv7dVlaFB4S0i/RJtifbvxna8K/OX4/V5afW0Ud1S\nS2lzOW6Pm9pWB1tqdpz0+pbOVoobj1LceBTgpHvbP6jYwJTkCcRYYjAajGTZMkmMTCApMoHG9ib/\n0rBen5f1lZvIi8shu+c++nBhMhoZlRpLlOnEEYxOj5cGVzulNS4OVzXS5Oqgsq6FI1Xdw7jV9a1U\n17ey7aD9hPdZzUZmjE+lobmd0SPimTclgxHJ0RohGcIU3iLyiRkNRmyWGMYk5DEmIc9//ObJXwC6\nN3extzmwt9VR01KLq7MFH2Bvc3C44SjurvYTzre37kCfnzciJoPKlmoAYi027p7xdaIsUUSZo7AY\nw/fHmsVsJDUhitSEKGaOT/Ufb2v3UNfopqGlnUpHKyXVTRwsa6C+Z9Z7h8fr36zlQGkD/95YQnyM\nlREpMcTHWLGYjcyZlE5GUjQJsREYFepBF75/y0Vk0ESaIxgZm8XI2KyTnuvydtHQ3kSLpwVHWz2H\nnMUcqD9ERkw6nd5OqlpqTrjNDfAHN0Bzp4sfb/o5ACaDiYSIeGyWGGIs0UxNmUhu3Cgc7mNLyza0\nN9Lp9WAxmtnj2IfP5+NmItEAABXJSURBVGNaavfsbVdnCzHmT9bj3FC1lfToVEbH55zzOQZaVIS5\ne9Y7NqbkHbsm0uX14vX62FVUR1V9KynxkWw/aKewvIHGlg4aWzr8r/2wZzU5s8mAxWwkKS6SiTmJ\nZKXEkJbYPVEuKS64672HE802HwbUlqFJbRlY9W4npc0VVLiq8Pl8WIxmdtkLqHPX097VcdIiNX2J\nNEWQGJlAVc9WrV+fdjPrKjaxt24/ObEjuf28W4i12k753tpWOxWuaqanTsHr89LW5fbvC1/oLPLv\nFhfozWMC+Z14fT4cjW6q61oorXFRWtOM09WOvcFN03GB/nEmo4EEm5X8rHhyM+JIT4wiMS6C/9/e\nnUdHXd57HH/Pmkkmk2VCJhDWJCxBZL2FUyQCtgpux1PtEZdDsb0FixHE60VEyubtKQaKXKy11Qq2\nNkcRjV6KlaKt59CDNkSBNpqAQgIEspBlMpkts8/v/hEZE0nAhSy/5Pv6h/ktM/N8mDnnm9/veeZ5\nhqSZiTN0vdRqX/h+XSky2lwIIdqxmlKxmlKZkn51bN/8Ud8D2vq/3UEvgUiAWk8dp1xVKIrCCUcl\nESUCQENrU+yxPxKIFW6A5z7+Y+xxlfscTxzaQkpcMpnmwei1ekanZDMqaTgtAScvHXsVT8jL94Zf\ny+H6f9MaauWx6StITxhEuf2z2OtEohF0Wh2fNVfwl9Pvct9VdzEo/ksjAdsJRUK8X1vCdzKmdPmH\nQ0/RajTYUuKxpcQzKWdQh2OBYITWQJjKGidV9W7qHT6cngB19lY8vhB2VwC7q4EPjzd0eL0Ek55h\n6Wb8wQgjB1vITDMzecwgEk2Gno7XL8iVdz8gWfomydK3RKIRkq0mHM0+PEEPpY3lhKIhTrvOcrz5\nBNlJIznRUklUiV6R9/tBzs3MHTaLh//R9pvyCWm55E/+TwDK7Z+i0+jItX4xc91rJ/bwj+p/kp08\niv/+j/zLvv7X/UyiSpT/Pfo7kowWlkxc9DXTfDUXJpI5VevivL2Vmqa20e/1jlYuVWmGpJlJSjBg\nNOiYlJPGyAwLyYlGAqEIVkscCSoq8HLlLYQQV5BOqyPeYMKjDZFqSmHu8FkXnROKholEw9j9DoKR\nIJ85KqhsOYM31Ird30yCIR5XwH3RALvO7Kncx57KfbHtcvun7D/zHt5Qa2yE/erpK9BpdMTrTZTU\nHQHglPMMzX4HVlPqFUrexuFv4ZSzKpazOwb22T6fxjVrSFKH/YFQBJc3yNl6N1X1Hlo8Ac7UuTjf\n7CMciVJn91Jnbzv3k1P2Ds/V67RkD7GQmZ7ImGHJmE0GgqEIqUlx5GQmX/EMaiHFWwghPmfQ6jFo\n9QxNHAJAVheDzryhVqpc59BoNJxxnqXRZyfVlILFkEiF8zT/avi40+e9deqdDtsFHz3d6XkFHz7N\nDSPnMiwxMzZjnVFnICUumUAkSJKx7WqsNeTjj8d2cXVaLrOHXdPhNU46KkkyWsgwt03L2n4Rm2a/\ng4yEdHpKnEHXbhS8rcMxjy+EJxSlodFDs9vPyXMt1NpbaXEHMBl11Dt8nKh2cqLayYF/1XR4rkbT\n9gfD1VlWbKnxJMTpmZBlJcls7Pcj4qV4CyHE12Q2JMSWVR1vHdvh2Nzhs2L93S0BJ76wn3L7p7gC\nbkLRMGfd1VS5znWY2ObLvOHWDlftXzYsMZOstGGcdzZxsuUU5fZPcQZcaDRaDtUdJsloocp9DqPW\nwH9Ne4BhlkwaW7+4om3y2XEHPWQnj+z1CXES4w1kjbDQmBQHwNwpHX+RUNvkpabJS53dS3WjF4fb\nj8MdwOEKoChQ39xK/ZdmmjPqtZji9Bj1WrIzk8gZmozJqCMjtW1pV3O8Hp1W3RMBSZ93PyBZ+ibJ\n0vf0pRyKohBVojT5m0mPT8Md9OCPBIjTGalwnOLfjWVtv4tvbSAUDXdLG2zxg7h+5BzGW8di0pn4\npOkYk9InEK830RJwkmS09Ehx/yafi9cforrBw+k6N01OHw53ALvLT2OLH1/g8v9fyYlGxgxNJj01\nnqQEI6OHJZOcYCQxwYDJ+M2va3uqz1uKdz8gWfomydL3qDVHMBIENHhCHuJ0cZxxnSOk9+H2+NBr\nDZx0VFLlrkav0VHtqUWD5pJX9peTGpeCI9ACtM1HPyYlG38kgDvoZqptEllJI/CF/SQazbHnKIoS\ne8/G1ib0Wj1p7Va6u5Qr/bm0+kPUO3xUnXfjD0Y4VetEAarOu2ly+i/5XJ22bY55s8lAWrKJnMwk\nRg22kJpkIj3FdNkrdineXyLFu2uSpW+SLH1Pf8kBl8+iKAoNrY2Y9PHUec9zxnWOYCRIrnU0f6v6\nB8eaPyPZmNShL/zrMmj15FrH4g/7OdlyqsMxnUbH8ilLCEQCHK4v5QejbyIl7osBZsFIiMDnA/+y\nhw7psc8lqig0OHxEIlGOnmyi0eGjxRPA4QnQ6g/T4gl0OTJeA5jjDcQZdAy3JTJysIUEU9vt+dFD\nk7GlJpA5JFmKd3tSvLsmWfomydL39JcccOWyXPiNvCvoJhgJUtFyCq1Gy4mWSuy+ZpKMFpxBF86A\ni0Ck6wlaLmdU0giykkdg1psprvsI++ez3iUazNw//V4Uvx5bwiAshsRenaXN4wtxrt5No9NPKByl\nqt7dtoJbiw+nJ3jJ+xnJZiPbH5mLErpy3RzyUzEhhBAX0Wq0JMdZSI5rKxI5KaMAuGHk3IvOVRQF\nX9iPJ+ThjOsczoALf9iPRqPFE/LiDwf4qP5op+9zxnWWM66zF+33hLxs++cLse0h5gzcQQ+ekJc0\nUyrTM6YyImk4npCHlLhk/q/ibWYMnsa8kdd96+zn3DUoKIywDIvtS4w3MH6UlfGdnB+ORKlt8qLV\najhV66Km0YvHF6TO3orTGyQciRKJKvTEUDi58u4HJEvfJFn6nv6SA/p+llA0TLPfQbIxiVpvHWfd\nNbiDHpr9DiLRCEcaStFqtN9qUpyRScOxGBIJRkPY4tNINJiJKFH+dvYA0DZRjk6rwxqXwrHmE9yS\ndQPJcUn4wn4O1R2m6OReAJ6a/T+Y9Kav9J7BSJDWsK9DF0B70uf9JVK8uyZZ+ibJ0vf0lxzQ/7L8\n+/QJwtEw1Z5aGlqbqPPWk2ZKJRQN09DaxFn3ORRFIfz5FLffVFbSCE53cgfgtuwbGZOawwjLUNxB\nD7Xe87xY9jK3Zs/nuuF5sfOeLd3JZ80V3D9xEVcPuvj6vF/MsLZp0yZKS0vRaDSsWbOGSZMmxY4d\nOnSIbdu2odVqycrK4pe//CValf/uTgghxDdzYWKckUnDL3tuvbcBR8CJVqPBHfSi1+qo9tThCXo5\n39pAnec87pCn0+d2VrgB9p7a3+n+opN7+cxRgV6rx2pK4djn89f/7uM/cM2Q6URRCIQDTEjLZcbg\naV8l6hXRbcX7ww8/pKqqit27d1NZWcmaNWvYvXt37Pj69ev505/+xODBg3nooYc4ePAgc+bM6a7m\nCCGE6CcyzLbYzHEXTG63YM0FoUgIg85AJBrBEWihJeCi3tuAQWegxe+k0WenJeiksuU0Y1NHU9Fy\n+vM+fE2H2/mfNB3rtB3/rPso9vhfjZ/wl9PvsmX+49ADvd7dVryLi4u5/vrrAcjJycHpdOLxeEhM\nbFst580334w9tlqtOByO7mqKEEKIAciga1vQRKfVMSg+jUHxaYxOyery/Eg0QlSJYtAZ8If9sRnx\nWvxOmv0OApEgY1JzyEkexR/KX8HubyYYCcV+btcScOIPB9AR3+3Zuq14NzU1MWHChNi21WqlsbEx\nVrAv/NvQ0MAHH3zAihUruqspQgghxGXptDp0tK07btKbMNG2Glxnlk9dEnsciUZwhzxYDIkMtqTQ\n6O/+sQg99lOxzsbF2e12li5dyoYNG0hNvfQKOqmpCej1XS/m/k10NRBAjSRL3yRZ+p7+kgMkS18y\nmJTY457I0m3F22az0dTUFNtuaGggPf2LVWw8Hg9Llizh4YcfJi8vr7OX6MDhaL3sOV9HfxupKVn6\nHsnS9/SXHCBZ+qqeGm3ebb3qs2bN4p132pa/Ky8vx2azxW6VAxQUFHDfffcxe/bs7mqCEEII0S91\n25X3tGnTmDBhAnfffTcajYYNGzbw5ptvYrFYyMvLY8+ePVRVVVFUVATArbfeyl133dVdzRFCCCH6\njW7t8165cmWH7dzcLzr+y8rKuvOthRBCiH5LZkURQgghVEaKtxBCCKEyUryFEEIIlZHiLYQQQqiM\nFG8hhBBCZaR4CyGEECojxVsIIYRQGY3S2aTjQgghhOiz5MpbCCGEUBkp3kIIIYTKSPEWQgghVEaK\ntxBCCKEyUryFEEIIlZHiLYQQQqhMty4J2ldt2rSJ0tJSNBoNa9asYdKkSb3dpK/kxIkT5Ofn8+Mf\n/5iFCxdSV1fHqlWriEQipKen86tf/Qqj0cjevXt56aWX0Gq1LFiwgDvvvLO3m97Bli1bOHLkCOFw\nmJ/97GdMnDhRlTl8Ph+rV6/GbrcTCATIz88nNzdXlVkA/H4/t956K/n5+cycOVOVOUpKSlixYgVj\nxowBYOzYsSxevFiVWQD27t3Ljh070Ov1PPTQQ4wbN06VWV5//XX27t0b2y4rK2PXrl1s3LgRgHHj\nxvHEE08AsGPHDvbv349Go2HZsmXMmTOnN5rcJa/Xy2OPPYbT6SQUCvHggw+Snp7e81mUAaakpES5\n//77FUVRlIqKCmXBggW93KKvxuv1KgsXLlTWrl2rFBYWKoqiKKtXr1b27dunKIqiPPXUU8rLL7+s\neL1eZd68eYrL5VJ8Pp9yyy23KA6Hozeb3kFxcbGyePFiRVEUpbm5WZkzZ44qcyiKorz99tvK73//\ne0VRFKW6ulqZN2+earMoiqJs27ZNueOOO5Q33nhDtTkOHTqkLF++vMM+tWZpbm5W5s2bp7jdbqW+\nvl5Zu3atarO0V1JSomzcuFFZuHChUlpaqiiKojzyyCPKgQMHlLNnzyq33367EggEFLvdrsyfP18J\nh8O93OKOCgsLla1btyqKoijnz59X5s+f3ytZBtxt8+LiYq6//noAcnJycDqdeDyeXm7V5RmNRl54\n4QVsNltsX0lJCd///vcBuO666yguLqa0tJSJEydisVgwmUxMmzaNo0eP9lazLzJ9+nSefvppAJKS\nkvD5fKrMAXDzzTezZMkSAOrq6sjIyFBtlsrKSioqKpg7dy6gzu9WV9Sapbi4mJkzZ5KYmIjNZuMX\nv/iFarO09+yzz7JkyRJqampidz0vZCkpKeHaa6/FaDRitVoZOnQoFRUVvdzijlJTU2lpaQHA5XKR\nkpLSK1kGXPFuamoiNTU1tm21WmlsbOzFFn01er0ek8nUYZ/P58NoNAKQlpZGY2MjTU1NWK3W2Dl9\nLZ9OpyMhIQGAoqIiZs+ercoc7d19992sXLmSNWvWqDbL5s2bWb16dWxbrTkAKioqWLp0Kffccw8f\nfPCBarNUV1fj9/tZunQp9957L8XFxarNcsHHH3/MkCFD0Ol0JCUlxfarKcstt9xCbW0tN9xwAwsX\nLmTVqlW9kmVA9nm3p/ST2WG7ytFX8/3973+nqKiIF198kXnz5sX2qy0HwKuvvsrx48d59NFHO7RT\nLVn27NnDlClTGD58eKfH1ZIDYNSoUSxbtoybbrqJc+fOsWjRIiKRSOy4mrIAtLS08Jvf/Iba2loW\nLVqkyu9Xe0VFRdx+++0X7VdTlj//+c9kZmayc+dOPv30Ux588EEsFkvseE9lGXBX3jabjaampth2\nQ0MD6enpvdiiby4hIQG/3w9AfX09Nput03ztb7X3BQcPHuS5557jhRdewGKxqDZHWVkZdXV1AIwf\nP55IJILZbFZdlgMHDvDee++xYMECXn/9dX7729+q9jPJyMjg5ptvRqPRMGLECAYNGoTT6VRllrS0\nNKZOnYper2fEiBGYzWZVfr/aKykpYerUqVit1titZ+g6y4X9fcnRo0fJy8sDIDc3l0AggMPhiB3v\nqSwDrnjPmjWLd955B4Dy8nJsNhuJiYm93Kpv5pprrolleffdd7n22muZPHkyn3zyCS6XC6/Xy9Gj\nR/nOd77Tyy39gtvtZsuWLTz//POkpKQA6swBcPjwYV588UWgrTumtbVVlVm2b9/OG2+8wWuvvcad\nd95Jfn6+KnNA2+jsnTt3AtDY2IjdbueOO+5QZZa8vDwOHTpENBrF4XCo9vt1QX19PWazGaPRiMFg\nIDs7m8OHDwNfZPnud7/LgQMHCAaD1NfX09DQwOjRo3u55R2NHDmS0tJSAGpqajCbzeTk5PR4lgG5\nqtjWrVs5fPgwGo2GDRs2kJub29tNuqyysjI2b95MTU0Ner2ejIwMtm7dyurVqwkEAmRmZvLkk09i\nMBjYv38/O3fuRKPRsHDhQm677bbebn7M7t27eeaZZ8jKyortKygoYO3atarKAW0/rfr5z39OXV0d\nfr+fZcuWcfXVV/PYY4+pLssFzzzzDEOHDiUvL0+VOTweDytXrsTlchEKhVi2bBnjx49XZRZo65Ip\nKioC4IEHHmDixImqzVJWVsb27dvZsWMH0DY2Yf369USjUSZPnszjjz8OQGFhIW+99RYajYaHH36Y\nmTNn9mazL+L1elmzZg12u51wOMyKFStIT0/v8SwDsngLIYQQajbgbpsLIYQQaifFWwghhFAZKd5C\nCCGEykjxFkIIIVRGircQQgihMgN+hjUhBorq6mpuvPFGpk6d2mH/nDlzWLx48bd+/ZKSErZv386u\nXbu+9WsJIS5NircQA4jVaqWwsLC3myGE+JakeAshuOqqq8jPz6ekpASv10tBQQFjx46ltLSUgoIC\n9Ho9Go2G9evXM3r0aM6cOcO6deuIRqPExcXx5JNPAhCNRtmwYQPHjx/HaDTy/PPPYzabezmdEP2P\n9HkLIYhEIowZM4bCwkLuuecefv3rXwOwatUqHn/8cQoLC/nJT37CE088AcCGDRv46U9/yssvv8wP\nf/hD/vrXvwJtS4suX76c1157Db1ez/vvv99rmYToz+TKW4gBpLm5mR/96Ecd9j366KMAscUWpk2b\nxs6dO3G5XNjt9tg6xTNmzOCRRx4B2pZ2nDFjBtC2RCK09XlnZ2czaNAgAAYPHozL5er+UEIMQFK8\nhRhALtXn3X6mZI1Gg0aj6fI4tN0i/zKdTncFWimEuBy5bS6EAODQoUMAHDlyhHHjxmGxWEhPT4+t\noFRcXMyUKVOAtqvzgwcPArBv3z62bdvWO40WYoCSK28hBpDObpsPGzYMgGPHjrFr1y6cTiebN28G\nYPPmzRQUFKDT6dBqtWzcuBGAdevWsW7dOl555RX0ej2bNm3i7NmzPZpFiIFMVhUTQjBu3DjKy8vR\n6+XveSHUQG6bCyGEECojV95CCCGEysiVtxBCCKEyUryFEEIIlZHiLYQQQqiMFG8hhBBCZaR4CyGE\nECojxVsIIYRQmf8HWc3UufSzifsAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x7f2113d28ef0>"
      ]
     },
     "metadata": {
      "tags": []
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "plt.plot(history['loss'], linewidth=2, label='Train')\n",
    "plt.plot(history['val_loss'], linewidth=2, label='Test')\n",
    "plt.legend(loc='upper right')\n",
    "plt.title('Model loss')\n",
    "plt.ylabel('Loss')\n",
    "plt.xlabel('Epoch')\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "46rGA9-SOfFf"
   },
   "outputs": [],
   "source": [
    "x_opt_predictions = autoencoder.predict(x_opt)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "TJOTw8HCOuPx"
   },
   "outputs": [],
   "source": [
    "mse = np.mean(np.power(x_opt - x_opt_predictions, 2), axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 59,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 34
    },
    "colab_type": "code",
    "id": "qWT6ZK8nYUdN",
    "outputId": "7ab31272-ceb3-4724-b483-48b54f545fb8"
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(13465,)"
      ]
     },
     "execution_count": 59,
     "metadata": {
      "tags": []
     },
     "output_type": "execute_result"
    }
   ],
   "source": [
    "mse.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 60,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 51
    },
    "colab_type": "code",
    "id": "kDobg2zfabLr",
    "outputId": "c70b3fef-2354-49f0-c888-9fe5a3182c66"
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([0.02252524, 0.00385963, 0.00289919, ..., 0.02084558, 0.07711355,\n",
       "       0.02602267])"
      ]
     },
     "execution_count": 60,
     "metadata": {
      "tags": []
     },
     "output_type": "execute_result"
    }
   ],
   "source": [
    "mse"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 62,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 68
    },
    "colab_type": "code",
    "id": "EzH0oKfdaiK1",
    "outputId": "520033d2-2255-47ea-9ff2-415ec3f52667"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.2048736127277406\n",
      "545.0855175368674\n",
      "0.0007104950648083556\n"
     ]
    }
   ],
   "source": [
    "print(mse.mean())\n",
    "print(mse.max())\n",
    "print(mse.min())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 55,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 34
    },
    "colab_type": "code",
    "id": "8hqFl0W1YMsC",
    "outputId": "a0571fa6-0eb6-4435-f70c-e59c4c6e9be1"
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "7.455034536099385"
      ]
     },
     "execution_count": 55,
     "metadata": {
      "tags": []
     },
     "output_type": "execute_result"
    }
   ],
   "source": [
    "tr = mse.mean() + mse.std()\n",
    "tr"
   ]
  }
 ],
 "metadata": {
  "colab": {
   "name": "Anomaly_detection.ipynb",
   "provenance": [],
   "toc_visible": true,
   "version": "0.3.2"
  },
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}
