{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 34
    },
    "colab_type": "code",
    "id": "0uS5M7jfP-Al",
    "outputId": "38920e86-6d2a-4135-dd12-491a38db4fab"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "bengin_traffic.csv  sample_data\n"
     ]
    }
   ],
   "source": [
    "!ls"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "x2qbnjrWP-As"
   },
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "from scipy import stats\n",
    "import tensorflow as tf\n",
    "import pickle\n",
    "from sklearn.metrics import confusion_matrix, precision_recall_curve\n",
    "from sklearn.metrics import recall_score, classification_report, auc, roc_curve\n",
    "from sklearn.metrics import precision_recall_fscore_support, f1_score\n",
    "from sklearn.preprocessing import MinMaxScaler\n",
    "from sklearn.preprocessing import StandardScaler\n",
    "from sklearn.preprocessing import scale\n",
    "from keras.models import Model, load_model, Sequential\n",
    "from keras.layers import Input, Dense\n",
    "from keras.callbacks import ModelCheckpoint, TensorBoard"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "27VMlgfZP-Av"
   },
   "outputs": [],
   "source": [
    "df_bening = pd.read_csv('bengin_traffic.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 34
    },
    "colab_type": "code",
    "id": "kjSLFbAhP-Ay",
    "outputId": "0c7287da-45be-42ab-acdf-62aadf6e6674"
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(40395, 115)"
      ]
     },
     "execution_count": 4,
     "metadata": {
      "tags": []
     },
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df_bening.shape"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "k8PbQTHJjEct"
   },
   "source": [
    "There should have been 49,548 instances"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "VGZGzJEi9VGR"
   },
   "outputs": [],
   "source": [
    "#df_bening = (df_bening - df_bening.mean()) / df_bening.std()\n",
    "#df_bening = scale(df_bening)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "gh3tg77_AiY_"
   },
   "outputs": [],
   "source": [
    "scaler = MinMaxScaler()\n",
    "df_bening = scaler.fit_transform(df_bening)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "ORF702HoP-A2"
   },
   "outputs": [],
   "source": [
    "x_train, x_opt, x_test = np.split(df_bening, [int((1/3)*len(df_bening)), int((2/3)*len(df_bening))])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 87,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 85
    },
    "colab_type": "code",
    "id": "i1G9vonQP-A4",
    "outputId": "6f2c5ddb-79af-46da-cc5d-701702008086"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(40395, 115)\n",
      "(13465, 115)\n",
      "(13465, 115)\n",
      "(13465, 115)\n"
     ]
    }
   ],
   "source": [
    "print(x_train.shape)\n",
    "print(x_opt.shape)\n",
    "print(x_test.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "s38Jxq73P-A7"
   },
   "outputs": [],
   "source": [
    "nb_epoch = 800\n",
    "input_dim = x_train.shape[1]\n",
    "learning_rate = 0.012\n",
    "\n",
    "autoencoder = Sequential()\n",
    "autoencoder.add(Dense(int(0.75 * input_dim), activation=\"tanh\", input_dim=input_dim))\n",
    "autoencoder.add(Dense(int(0.5 * input_dim), activation=\"tanh\"))\n",
    "autoencoder.add(Dense(int(0.33 * input_dim), activation=\"tanh\"))\n",
    "autoencoder.add(Dense(int(0.25 * input_dim), activation=\"tanh\"))\n",
    "autoencoder.add(Dense(int(0.33 * input_dim), activation=\"tanh\"))\n",
    "autoencoder.add(Dense(int(0.5 * input_dim), activation=\"tanh\"))\n",
    "autoencoder.add(Dense(int(0.75 * input_dim), activation=\"tanh\"))\n",
    "autoencoder.add(Dense(input_dim))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 68,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 27234
    },
    "colab_type": "code",
    "id": "Rd-G5V6gP-A9",
    "outputId": "90478a36-7138-49e3-aa0d-e7dc1055ffdb"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train on 13465 samples, validate on 13465 samples\n",
      "Epoch 1/800\n",
      "13465/13465 [==============================] - 2s 113us/step - loss: 0.0344 - val_loss: 0.0252\n",
      "Epoch 2/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0203 - val_loss: 0.0153\n",
      "Epoch 3/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0124 - val_loss: 0.0094\n",
      "Epoch 4/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0081 - val_loss: 0.0064\n",
      "Epoch 5/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0059 - val_loss: 0.0050\n",
      "Epoch 6/800\n",
      "13465/13465 [==============================] - 1s 94us/step - loss: 0.0050 - val_loss: 0.0044\n",
      "Epoch 7/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0045 - val_loss: 0.0041\n",
      "Epoch 8/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0043 - val_loss: 0.0040\n",
      "Epoch 9/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.0042 - val_loss: 0.0039\n",
      "Epoch 10/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0041 - val_loss: 0.0038\n",
      "Epoch 11/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0040 - val_loss: 0.0038\n",
      "Epoch 12/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0040 - val_loss: 0.0037\n",
      "Epoch 13/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0040 - val_loss: 0.0037\n",
      "Epoch 14/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0039 - val_loss: 0.0037\n",
      "Epoch 15/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0039 - val_loss: 0.0036\n",
      "Epoch 16/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0039 - val_loss: 0.0036\n",
      "Epoch 17/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0038 - val_loss: 0.0036\n",
      "Epoch 18/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0038 - val_loss: 0.0035\n",
      "Epoch 19/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0038 - val_loss: 0.0035\n",
      "Epoch 20/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0037 - val_loss: 0.0035\n",
      "Epoch 21/800\n",
      "13465/13465 [==============================] - 1s 87us/step - loss: 0.0037 - val_loss: 0.0034\n",
      "Epoch 22/800\n",
      "13465/13465 [==============================] - 1s 86us/step - loss: 0.0037 - val_loss: 0.0034\n",
      "Epoch 23/800\n",
      "13465/13465 [==============================] - 1s 86us/step - loss: 0.0037 - val_loss: 0.0034\n",
      "Epoch 24/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0036 - val_loss: 0.0034\n",
      "Epoch 25/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0036 - val_loss: 0.0033\n",
      "Epoch 26/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0036 - val_loss: 0.0033\n",
      "Epoch 27/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0035 - val_loss: 0.0033\n",
      "Epoch 28/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0035 - val_loss: 0.0032\n",
      "Epoch 29/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0035 - val_loss: 0.0032\n",
      "Epoch 30/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0034 - val_loss: 0.0032\n",
      "Epoch 31/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0034 - val_loss: 0.0031\n",
      "Epoch 32/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0034 - val_loss: 0.0031\n",
      "Epoch 33/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0033 - val_loss: 0.0031\n",
      "Epoch 34/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0033 - val_loss: 0.0031\n",
      "Epoch 35/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0033 - val_loss: 0.0030\n",
      "Epoch 36/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0032 - val_loss: 0.0030\n",
      "Epoch 37/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0032 - val_loss: 0.0030\n",
      "Epoch 38/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0032 - val_loss: 0.0029\n",
      "Epoch 39/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0031 - val_loss: 0.0029\n",
      "Epoch 40/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0031 - val_loss: 0.0029\n",
      "Epoch 41/800\n",
      "13465/13465 [==============================] - 1s 101us/step - loss: 0.0031 - val_loss: 0.0028\n",
      "Epoch 42/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 0.0030 - val_loss: 0.0028\n",
      "Epoch 43/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 0.0030 - val_loss: 0.0028\n",
      "Epoch 44/800\n",
      "13465/13465 [==============================] - 1s 104us/step - loss: 0.0030 - val_loss: 0.0027\n",
      "Epoch 45/800\n",
      "13465/13465 [==============================] - 1s 105us/step - loss: 0.0029 - val_loss: 0.0027\n",
      "Epoch 46/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 0.0029 - val_loss: 0.0027\n",
      "Epoch 47/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 0.0029 - val_loss: 0.0026\n",
      "Epoch 48/800\n",
      "13465/13465 [==============================] - 1s 100us/step - loss: 0.0028 - val_loss: 0.0026\n",
      "Epoch 49/800\n",
      "13465/13465 [==============================] - 1s 100us/step - loss: 0.0028 - val_loss: 0.0026\n",
      "Epoch 50/800\n",
      "13465/13465 [==============================] - 1s 100us/step - loss: 0.0028 - val_loss: 0.0025\n",
      "Epoch 51/800\n",
      "13465/13465 [==============================] - 1s 101us/step - loss: 0.0027 - val_loss: 0.0025\n",
      "Epoch 52/800\n",
      "13465/13465 [==============================] - 1s 96us/step - loss: 0.0027 - val_loss: 0.0025\n",
      "Epoch 53/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.0027 - val_loss: 0.0024\n",
      "Epoch 54/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0027 - val_loss: 0.0024\n",
      "Epoch 55/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0026 - val_loss: 0.0024\n",
      "Epoch 56/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0026 - val_loss: 0.0023\n",
      "Epoch 57/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0026 - val_loss: 0.0023\n",
      "Epoch 58/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0025 - val_loss: 0.0023\n",
      "Epoch 59/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0025 - val_loss: 0.0022\n",
      "Epoch 60/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0025 - val_loss: 0.0022\n",
      "Epoch 61/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0024 - val_loss: 0.0022\n",
      "Epoch 62/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0024 - val_loss: 0.0022\n",
      "Epoch 63/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0024 - val_loss: 0.0021\n",
      "Epoch 64/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0023 - val_loss: 0.0021\n",
      "Epoch 65/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0023 - val_loss: 0.0021\n",
      "Epoch 66/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0023 - val_loss: 0.0021\n",
      "Epoch 67/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0023 - val_loss: 0.0020\n",
      "Epoch 68/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0022 - val_loss: 0.0020\n",
      "Epoch 69/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0022 - val_loss: 0.0020\n",
      "Epoch 70/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0022 - val_loss: 0.0020\n",
      "Epoch 71/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0022 - val_loss: 0.0019\n",
      "Epoch 72/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0021 - val_loss: 0.0019\n",
      "Epoch 73/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0021 - val_loss: 0.0019\n",
      "Epoch 74/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0021 - val_loss: 0.0019\n",
      "Epoch 75/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0021 - val_loss: 0.0018\n",
      "Epoch 76/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0021 - val_loss: 0.0018\n",
      "Epoch 77/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0020 - val_loss: 0.0018\n",
      "Epoch 78/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0020 - val_loss: 0.0018\n",
      "Epoch 79/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0020 - val_loss: 0.0018\n",
      "Epoch 80/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0020 - val_loss: 0.0017\n",
      "Epoch 81/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0020 - val_loss: 0.0017\n",
      "Epoch 82/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0019 - val_loss: 0.0017\n",
      "Epoch 83/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0019 - val_loss: 0.0017\n",
      "Epoch 84/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0019 - val_loss: 0.0017\n",
      "Epoch 85/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0019 - val_loss: 0.0017\n",
      "Epoch 86/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0019 - val_loss: 0.0016\n",
      "Epoch 87/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0019 - val_loss: 0.0016\n",
      "Epoch 88/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0018 - val_loss: 0.0016\n",
      "Epoch 89/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0018 - val_loss: 0.0016\n",
      "Epoch 90/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0018 - val_loss: 0.0016\n",
      "Epoch 91/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0018 - val_loss: 0.0016\n",
      "Epoch 92/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0018 - val_loss: 0.0016\n",
      "Epoch 93/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0018 - val_loss: 0.0015\n",
      "Epoch 94/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0018 - val_loss: 0.0015\n",
      "Epoch 95/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0017 - val_loss: 0.0015\n",
      "Epoch 96/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0017 - val_loss: 0.0015\n",
      "Epoch 97/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0017 - val_loss: 0.0015\n",
      "Epoch 98/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0017 - val_loss: 0.0015\n",
      "Epoch 99/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0017 - val_loss: 0.0015\n",
      "Epoch 100/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0017 - val_loss: 0.0015\n",
      "Epoch 101/800\n",
      "13465/13465 [==============================] - 1s 94us/step - loss: 0.0017 - val_loss: 0.0015\n",
      "Epoch 102/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0017 - val_loss: 0.0014\n",
      "Epoch 103/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0017 - val_loss: 0.0014\n",
      "Epoch 104/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0016 - val_loss: 0.0014\n",
      "Epoch 105/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0016 - val_loss: 0.0014\n",
      "Epoch 106/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0016 - val_loss: 0.0014\n",
      "Epoch 107/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0016 - val_loss: 0.0014\n",
      "Epoch 108/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0016 - val_loss: 0.0014\n",
      "Epoch 109/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0016 - val_loss: 0.0014\n",
      "Epoch 110/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0016 - val_loss: 0.0014\n",
      "Epoch 111/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0016 - val_loss: 0.0014\n",
      "Epoch 112/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0016 - val_loss: 0.0014\n",
      "Epoch 113/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0016 - val_loss: 0.0014\n",
      "Epoch 114/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0016 - val_loss: 0.0014\n",
      "Epoch 115/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0016 - val_loss: 0.0013\n",
      "Epoch 116/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 117/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 118/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 119/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 120/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 121/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 122/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 123/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 124/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 125/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 126/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 127/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 128/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 129/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 130/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 131/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0015 - val_loss: 0.0013\n",
      "Epoch 132/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 133/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 134/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 135/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 136/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 137/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 138/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 139/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 140/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 141/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 142/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 143/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 144/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 145/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 146/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 147/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 148/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 149/800\n",
      "13465/13465 [==============================] - 1s 94us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 150/800\n",
      "13465/13465 [==============================] - 1s 94us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 151/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 152/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 153/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0014 - val_loss: 0.0012\n",
      "Epoch 154/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0013 - val_loss: 0.0012\n",
      "Epoch 155/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 156/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 157/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 158/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 159/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 160/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 161/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 162/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 163/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 164/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 165/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 166/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 167/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 168/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 169/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 170/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 171/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 172/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 173/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 174/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 175/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 176/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 177/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 178/800\n",
      "13465/13465 [==============================] - 1s 94us/step - loss: 0.0013 - val_loss: 0.0011\n",
      "Epoch 179/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0012 - val_loss: 0.0011\n",
      "Epoch 180/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.0012 - val_loss: 0.0011\n",
      "Epoch 181/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0012 - val_loss: 0.0011\n",
      "Epoch 182/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0012 - val_loss: 0.0010\n",
      "Epoch 183/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0012 - val_loss: 0.0010\n",
      "Epoch 184/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.0012 - val_loss: 0.0010\n",
      "Epoch 185/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0012 - val_loss: 0.0010\n",
      "Epoch 186/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0012 - val_loss: 0.0010\n",
      "Epoch 187/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0012 - val_loss: 0.0010\n",
      "Epoch 188/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0012 - val_loss: 0.0010\n",
      "Epoch 189/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0012 - val_loss: 0.0010\n",
      "Epoch 190/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.0012 - val_loss: 0.0010\n",
      "Epoch 191/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.0012 - val_loss: 0.0010\n",
      "Epoch 192/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.0012 - val_loss: 0.0010\n",
      "Epoch 193/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0012 - val_loss: 0.0010\n",
      "Epoch 194/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0012 - val_loss: 0.0010\n",
      "Epoch 195/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0012 - val_loss: 0.0010\n",
      "Epoch 196/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0012 - val_loss: 0.0010\n",
      "Epoch 197/800\n",
      "13465/13465 [==============================] - 1s 95us/step - loss: 0.0012 - val_loss: 9.9886e-04\n",
      "Epoch 198/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0012 - val_loss: 9.9562e-04\n",
      "Epoch 199/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0012 - val_loss: 9.9237e-04\n",
      "Epoch 200/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0012 - val_loss: 9.8923e-04\n",
      "Epoch 201/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0012 - val_loss: 9.8619e-04\n",
      "Epoch 202/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0012 - val_loss: 9.8300e-04\n",
      "Epoch 203/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0012 - val_loss: 9.7987e-04\n",
      "Epoch 204/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0012 - val_loss: 9.7676e-04\n",
      "Epoch 205/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0012 - val_loss: 9.7359e-04\n",
      "Epoch 206/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0012 - val_loss: 9.7055e-04\n",
      "Epoch 207/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0011 - val_loss: 9.6757e-04\n",
      "Epoch 208/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 9.6450e-04\n",
      "Epoch 209/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 9.6145e-04\n",
      "Epoch 210/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 9.5847e-04\n",
      "Epoch 211/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 9.5554e-04\n",
      "Epoch 212/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 9.5254e-04\n",
      "Epoch 213/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0011 - val_loss: 9.4962e-04\n",
      "Epoch 214/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 9.4662e-04\n",
      "Epoch 215/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 9.4383e-04\n",
      "Epoch 216/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0011 - val_loss: 9.4092e-04\n",
      "Epoch 217/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 9.3811e-04\n",
      "Epoch 218/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0011 - val_loss: 9.3526e-04\n",
      "Epoch 219/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 9.3247e-04\n",
      "Epoch 220/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0011 - val_loss: 9.2960e-04\n",
      "Epoch 221/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0011 - val_loss: 9.2692e-04\n",
      "Epoch 222/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 9.2414e-04\n",
      "Epoch 223/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0011 - val_loss: 9.2134e-04\n",
      "Epoch 224/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0011 - val_loss: 9.1866e-04\n",
      "Epoch 225/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0011 - val_loss: 9.1597e-04\n",
      "Epoch 226/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 9.1332e-04\n",
      "Epoch 227/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 9.1062e-04\n",
      "Epoch 228/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0011 - val_loss: 9.0798e-04\n",
      "Epoch 229/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0011 - val_loss: 9.0538e-04\n",
      "Epoch 230/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0011 - val_loss: 9.0288e-04\n",
      "Epoch 231/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0011 - val_loss: 9.0021e-04\n",
      "Epoch 232/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 8.9764e-04\n",
      "Epoch 233/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0011 - val_loss: 8.9511e-04\n",
      "Epoch 234/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 8.9268e-04\n",
      "Epoch 235/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 8.9021e-04\n",
      "Epoch 236/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 8.8772e-04\n",
      "Epoch 237/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0011 - val_loss: 8.8519e-04\n",
      "Epoch 238/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0011 - val_loss: 8.8285e-04\n",
      "Epoch 239/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0011 - val_loss: 8.8039e-04\n",
      "Epoch 240/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0011 - val_loss: 8.7803e-04\n",
      "Epoch 241/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0010 - val_loss: 8.7564e-04\n",
      "Epoch 242/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0010 - val_loss: 8.7332e-04\n",
      "Epoch 243/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0010 - val_loss: 8.7098e-04\n",
      "Epoch 244/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0010 - val_loss: 8.6874e-04\n",
      "Epoch 245/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.0010 - val_loss: 8.6641e-04\n",
      "Epoch 246/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.0010 - val_loss: 8.6417e-04\n",
      "Epoch 247/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0010 - val_loss: 8.6192e-04\n",
      "Epoch 248/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0010 - val_loss: 8.5965e-04\n",
      "Epoch 249/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0010 - val_loss: 8.5745e-04\n",
      "Epoch 250/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0010 - val_loss: 8.5524e-04\n",
      "Epoch 251/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0010 - val_loss: 8.5312e-04\n",
      "Epoch 252/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0010 - val_loss: 8.5091e-04\n",
      "Epoch 253/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.0010 - val_loss: 8.4886e-04\n",
      "Epoch 254/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0010 - val_loss: 8.4669e-04\n",
      "Epoch 255/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0010 - val_loss: 8.4465e-04\n",
      "Epoch 256/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 0.0010 - val_loss: 8.4257e-04\n",
      "Epoch 257/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0010 - val_loss: 8.4059e-04\n",
      "Epoch 258/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 0.0010 - val_loss: 8.3848e-04\n",
      "Epoch 259/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0010 - val_loss: 8.3649e-04\n",
      "Epoch 260/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 0.0010 - val_loss: 8.3450e-04\n",
      "Epoch 261/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 0.0010 - val_loss: 8.3260e-04\n",
      "Epoch 262/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 0.0010 - val_loss: 8.3053e-04\n",
      "Epoch 263/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 9.9879e-04 - val_loss: 8.2860e-04\n",
      "Epoch 264/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 9.9671e-04 - val_loss: 8.2669e-04\n",
      "Epoch 265/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 9.9465e-04 - val_loss: 8.2472e-04\n",
      "Epoch 266/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.9261e-04 - val_loss: 8.2285e-04\n",
      "Epoch 267/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.9058e-04 - val_loss: 8.2097e-04\n",
      "Epoch 268/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 9.8858e-04 - val_loss: 8.1913e-04\n",
      "Epoch 269/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 9.8660e-04 - val_loss: 8.1731e-04\n",
      "Epoch 270/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.8463e-04 - val_loss: 8.1546e-04\n",
      "Epoch 271/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.8268e-04 - val_loss: 8.1365e-04\n",
      "Epoch 272/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.8074e-04 - val_loss: 8.1186e-04\n",
      "Epoch 273/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.7883e-04 - val_loss: 8.1012e-04\n",
      "Epoch 274/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.7692e-04 - val_loss: 8.0839e-04\n",
      "Epoch 275/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.7505e-04 - val_loss: 8.0661e-04\n",
      "Epoch 276/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.7318e-04 - val_loss: 8.0491e-04\n",
      "Epoch 277/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.7133e-04 - val_loss: 8.0319e-04\n",
      "Epoch 278/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.6950e-04 - val_loss: 8.0155e-04\n",
      "Epoch 279/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.6769e-04 - val_loss: 7.9986e-04\n",
      "Epoch 280/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.6588e-04 - val_loss: 7.9821e-04\n",
      "Epoch 281/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.6410e-04 - val_loss: 7.9654e-04\n",
      "Epoch 282/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.6234e-04 - val_loss: 7.9487e-04\n",
      "Epoch 283/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.6058e-04 - val_loss: 7.9322e-04\n",
      "Epoch 284/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 9.5885e-04 - val_loss: 7.9157e-04\n",
      "Epoch 285/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 9.5713e-04 - val_loss: 7.9005e-04\n",
      "Epoch 286/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.5542e-04 - val_loss: 7.8861e-04\n",
      "Epoch 287/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.5373e-04 - val_loss: 7.8692e-04\n",
      "Epoch 288/800\n",
      "13465/13465 [==============================] - 1s 103us/step - loss: 9.5205e-04 - val_loss: 7.8544e-04\n",
      "Epoch 289/800\n",
      "13465/13465 [==============================] - 1s 108us/step - loss: 9.5039e-04 - val_loss: 7.8391e-04\n",
      "Epoch 290/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 9.4874e-04 - val_loss: 7.8240e-04\n",
      "Epoch 291/800\n",
      "13465/13465 [==============================] - 1s 108us/step - loss: 9.4711e-04 - val_loss: 7.8080e-04\n",
      "Epoch 292/800\n",
      "13465/13465 [==============================] - 2s 112us/step - loss: 9.4549e-04 - val_loss: 7.7931e-04\n",
      "Epoch 293/800\n",
      "13465/13465 [==============================] - 1s 108us/step - loss: 9.4388e-04 - val_loss: 7.7781e-04\n",
      "Epoch 294/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 9.4229e-04 - val_loss: 7.7641e-04\n",
      "Epoch 295/800\n",
      "13465/13465 [==============================] - 1s 99us/step - loss: 9.4071e-04 - val_loss: 7.7495e-04\n",
      "Epoch 296/800\n",
      "13465/13465 [==============================] - 1s 100us/step - loss: 9.3914e-04 - val_loss: 7.7355e-04\n",
      "Epoch 297/800\n",
      "13465/13465 [==============================] - 1s 101us/step - loss: 9.3760e-04 - val_loss: 7.7212e-04\n",
      "Epoch 298/800\n",
      "13465/13465 [==============================] - 1s 101us/step - loss: 9.3605e-04 - val_loss: 7.7074e-04\n",
      "Epoch 299/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 9.3453e-04 - val_loss: 7.6921e-04\n",
      "Epoch 300/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.3302e-04 - val_loss: 7.6783e-04\n",
      "Epoch 301/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.3152e-04 - val_loss: 7.6646e-04\n",
      "Epoch 302/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.3003e-04 - val_loss: 7.6510e-04\n",
      "Epoch 303/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.2855e-04 - val_loss: 7.6375e-04\n",
      "Epoch 304/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.2708e-04 - val_loss: 7.6239e-04\n",
      "Epoch 305/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.2563e-04 - val_loss: 7.6103e-04\n",
      "Epoch 306/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.2419e-04 - val_loss: 7.5971e-04\n",
      "Epoch 307/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.2276e-04 - val_loss: 7.5835e-04\n",
      "Epoch 308/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 9.2135e-04 - val_loss: 7.5712e-04\n",
      "Epoch 309/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.1994e-04 - val_loss: 7.5580e-04\n",
      "Epoch 310/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 9.1854e-04 - val_loss: 7.5455e-04\n",
      "Epoch 311/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.1716e-04 - val_loss: 7.5326e-04\n",
      "Epoch 312/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 9.1578e-04 - val_loss: 7.5192e-04\n",
      "Epoch 313/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.1442e-04 - val_loss: 7.5076e-04\n",
      "Epoch 314/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 9.1306e-04 - val_loss: 7.4947e-04\n",
      "Epoch 315/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.1173e-04 - val_loss: 7.4825e-04\n",
      "Epoch 316/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.1039e-04 - val_loss: 7.4704e-04\n",
      "Epoch 317/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.0907e-04 - val_loss: 7.4587e-04\n",
      "Epoch 318/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.0775e-04 - val_loss: 7.4466e-04\n",
      "Epoch 319/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 9.0646e-04 - val_loss: 7.4351e-04\n",
      "Epoch 320/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 9.0516e-04 - val_loss: 7.4228e-04\n",
      "Epoch 321/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.0388e-04 - val_loss: 7.4109e-04\n",
      "Epoch 322/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 9.0261e-04 - val_loss: 7.3990e-04\n",
      "Epoch 323/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.0134e-04 - val_loss: 7.3872e-04\n",
      "Epoch 324/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 9.0008e-04 - val_loss: 7.3763e-04\n",
      "Epoch 325/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 8.9884e-04 - val_loss: 7.3651e-04\n",
      "Epoch 326/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.9760e-04 - val_loss: 7.3537e-04\n",
      "Epoch 327/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.9637e-04 - val_loss: 7.3420e-04\n",
      "Epoch 328/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.9515e-04 - val_loss: 7.3315e-04\n",
      "Epoch 329/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.9394e-04 - val_loss: 7.3201e-04\n",
      "Epoch 330/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.9274e-04 - val_loss: 7.3089e-04\n",
      "Epoch 331/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.9154e-04 - val_loss: 7.2980e-04\n",
      "Epoch 332/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.9035e-04 - val_loss: 7.2866e-04\n",
      "Epoch 333/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.8918e-04 - val_loss: 7.2763e-04\n",
      "Epoch 334/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.8800e-04 - val_loss: 7.2655e-04\n",
      "Epoch 335/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.8684e-04 - val_loss: 7.2554e-04\n",
      "Epoch 336/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.8569e-04 - val_loss: 7.2451e-04\n",
      "Epoch 337/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.8453e-04 - val_loss: 7.2340e-04\n",
      "Epoch 338/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.8339e-04 - val_loss: 7.2238e-04\n",
      "Epoch 339/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 8.8226e-04 - val_loss: 7.2131e-04\n",
      "Epoch 340/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.8113e-04 - val_loss: 7.2034e-04\n",
      "Epoch 341/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.8001e-04 - val_loss: 7.1925e-04\n",
      "Epoch 342/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.7891e-04 - val_loss: 7.1821e-04\n",
      "Epoch 343/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 8.7780e-04 - val_loss: 7.1726e-04\n",
      "Epoch 344/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 8.7670e-04 - val_loss: 7.1625e-04\n",
      "Epoch 345/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.7560e-04 - val_loss: 7.1531e-04\n",
      "Epoch 346/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.7453e-04 - val_loss: 7.1431e-04\n",
      "Epoch 347/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.7345e-04 - val_loss: 7.1330e-04\n",
      "Epoch 348/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.7237e-04 - val_loss: 7.1233e-04\n",
      "Epoch 349/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.7130e-04 - val_loss: 7.1133e-04\n",
      "Epoch 350/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.7025e-04 - val_loss: 7.1037e-04\n",
      "Epoch 351/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.6919e-04 - val_loss: 7.0947e-04\n",
      "Epoch 352/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.6814e-04 - val_loss: 7.0848e-04\n",
      "Epoch 353/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.6710e-04 - val_loss: 7.0749e-04\n",
      "Epoch 354/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.6606e-04 - val_loss: 7.0662e-04\n",
      "Epoch 355/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.6504e-04 - val_loss: 7.0563e-04\n",
      "Epoch 356/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.6401e-04 - val_loss: 7.0474e-04\n",
      "Epoch 357/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 8.6299e-04 - val_loss: 7.0374e-04\n",
      "Epoch 358/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.6198e-04 - val_loss: 7.0293e-04\n",
      "Epoch 359/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.6098e-04 - val_loss: 7.0197e-04\n",
      "Epoch 360/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.5997e-04 - val_loss: 7.0106e-04\n",
      "Epoch 361/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.5898e-04 - val_loss: 7.0012e-04\n",
      "Epoch 362/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.5798e-04 - val_loss: 6.9925e-04\n",
      "Epoch 363/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.5700e-04 - val_loss: 6.9834e-04\n",
      "Epoch 364/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.5602e-04 - val_loss: 6.9752e-04\n",
      "Epoch 365/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.5504e-04 - val_loss: 6.9661e-04\n",
      "Epoch 366/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.5407e-04 - val_loss: 6.9571e-04\n",
      "Epoch 367/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.5310e-04 - val_loss: 6.9482e-04\n",
      "Epoch 368/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.5214e-04 - val_loss: 6.9394e-04\n",
      "Epoch 369/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.5118e-04 - val_loss: 6.9305e-04\n",
      "Epoch 370/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.5023e-04 - val_loss: 6.9218e-04\n",
      "Epoch 371/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.4928e-04 - val_loss: 6.9126e-04\n",
      "Epoch 372/800\n",
      "13465/13465 [==============================] - 1s 88us/step - loss: 8.4834e-04 - val_loss: 6.9040e-04\n",
      "Epoch 373/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.4740e-04 - val_loss: 6.8960e-04\n",
      "Epoch 374/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.4646e-04 - val_loss: 6.8878e-04\n",
      "Epoch 375/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.4554e-04 - val_loss: 6.8791e-04\n",
      "Epoch 376/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.4461e-04 - val_loss: 6.8709e-04\n",
      "Epoch 377/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.4369e-04 - val_loss: 6.8622e-04\n",
      "Epoch 378/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.4277e-04 - val_loss: 6.8543e-04\n",
      "Epoch 379/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.4185e-04 - val_loss: 6.8464e-04\n",
      "Epoch 380/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.4095e-04 - val_loss: 6.8381e-04\n",
      "Epoch 381/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.4004e-04 - val_loss: 6.8295e-04\n",
      "Epoch 382/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.3914e-04 - val_loss: 6.8205e-04\n",
      "Epoch 383/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.3824e-04 - val_loss: 6.8127e-04\n",
      "Epoch 384/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.3734e-04 - val_loss: 6.8055e-04\n",
      "Epoch 385/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.3646e-04 - val_loss: 6.7972e-04\n",
      "Epoch 386/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.3557e-04 - val_loss: 6.7900e-04\n",
      "Epoch 387/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.3469e-04 - val_loss: 6.7816e-04\n",
      "Epoch 388/800\n",
      "13465/13465 [==============================] - 1s 94us/step - loss: 8.3381e-04 - val_loss: 6.7732e-04\n",
      "Epoch 389/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.3293e-04 - val_loss: 6.7650e-04\n",
      "Epoch 390/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.3206e-04 - val_loss: 6.7572e-04\n",
      "Epoch 391/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.3119e-04 - val_loss: 6.7497e-04\n",
      "Epoch 392/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.3032e-04 - val_loss: 6.7416e-04\n",
      "Epoch 393/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.2946e-04 - val_loss: 6.7341e-04\n",
      "Epoch 394/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.2860e-04 - val_loss: 6.7260e-04\n",
      "Epoch 395/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.2774e-04 - val_loss: 6.7181e-04\n",
      "Epoch 396/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.2690e-04 - val_loss: 6.7109e-04\n",
      "Epoch 397/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.2604e-04 - val_loss: 6.7033e-04\n",
      "Epoch 398/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.2520e-04 - val_loss: 6.6959e-04\n",
      "Epoch 399/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.2435e-04 - val_loss: 6.6875e-04\n",
      "Epoch 400/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.2352e-04 - val_loss: 6.6798e-04\n",
      "Epoch 401/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.2267e-04 - val_loss: 6.6724e-04\n",
      "Epoch 402/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.2184e-04 - val_loss: 6.6654e-04\n",
      "Epoch 403/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.2101e-04 - val_loss: 6.6576e-04\n",
      "Epoch 404/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.2018e-04 - val_loss: 6.6496e-04\n",
      "Epoch 405/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.1936e-04 - val_loss: 6.6432e-04\n",
      "Epoch 406/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.1853e-04 - val_loss: 6.6354e-04\n",
      "Epoch 407/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.1771e-04 - val_loss: 6.6273e-04\n",
      "Epoch 408/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.1690e-04 - val_loss: 6.6205e-04\n",
      "Epoch 409/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.1608e-04 - val_loss: 6.6132e-04\n",
      "Epoch 410/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.1527e-04 - val_loss: 6.6057e-04\n",
      "Epoch 411/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.1446e-04 - val_loss: 6.5979e-04\n",
      "Epoch 412/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.1365e-04 - val_loss: 6.5910e-04\n",
      "Epoch 413/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.1285e-04 - val_loss: 6.5846e-04\n",
      "Epoch 414/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.1205e-04 - val_loss: 6.5772e-04\n",
      "Epoch 415/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.1125e-04 - val_loss: 6.5694e-04\n",
      "Epoch 416/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.1045e-04 - val_loss: 6.5617e-04\n",
      "Epoch 417/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.0966e-04 - val_loss: 6.5553e-04\n",
      "Epoch 418/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.0886e-04 - val_loss: 6.5479e-04\n",
      "Epoch 419/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.0808e-04 - val_loss: 6.5406e-04\n",
      "Epoch 420/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.0729e-04 - val_loss: 6.5336e-04\n",
      "Epoch 421/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 8.0650e-04 - val_loss: 6.5268e-04\n",
      "Epoch 422/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.0572e-04 - val_loss: 6.5197e-04\n",
      "Epoch 423/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.0494e-04 - val_loss: 6.5130e-04\n",
      "Epoch 424/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.0416e-04 - val_loss: 6.5053e-04\n",
      "Epoch 425/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.0338e-04 - val_loss: 6.4985e-04\n",
      "Epoch 426/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 8.0261e-04 - val_loss: 6.4912e-04\n",
      "Epoch 427/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 8.0184e-04 - val_loss: 6.4847e-04\n",
      "Epoch 428/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.0107e-04 - val_loss: 6.4772e-04\n",
      "Epoch 429/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 8.0030e-04 - val_loss: 6.4708e-04\n",
      "Epoch 430/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 7.9953e-04 - val_loss: 6.4641e-04\n",
      "Epoch 431/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.9877e-04 - val_loss: 6.4570e-04\n",
      "Epoch 432/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 7.9801e-04 - val_loss: 6.4503e-04\n",
      "Epoch 433/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.9725e-04 - val_loss: 6.4441e-04\n",
      "Epoch 434/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.9649e-04 - val_loss: 6.4361e-04\n",
      "Epoch 435/800\n",
      "13465/13465 [==============================] - 1s 95us/step - loss: 7.9573e-04 - val_loss: 6.4303e-04\n",
      "Epoch 436/800\n",
      "13465/13465 [==============================] - 1s 95us/step - loss: 7.9498e-04 - val_loss: 6.4231e-04\n",
      "Epoch 437/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 7.9423e-04 - val_loss: 6.4159e-04\n",
      "Epoch 438/800\n",
      "13465/13465 [==============================] - 1s 94us/step - loss: 7.9347e-04 - val_loss: 6.4089e-04\n",
      "Epoch 439/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 7.9273e-04 - val_loss: 6.4026e-04\n",
      "Epoch 440/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 7.9198e-04 - val_loss: 6.3963e-04\n",
      "Epoch 441/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.9123e-04 - val_loss: 6.3899e-04\n",
      "Epoch 442/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.9049e-04 - val_loss: 6.3831e-04\n",
      "Epoch 443/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.8975e-04 - val_loss: 6.3761e-04\n",
      "Epoch 444/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.8901e-04 - val_loss: 6.3700e-04\n",
      "Epoch 445/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.8828e-04 - val_loss: 6.3629e-04\n",
      "Epoch 446/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.8754e-04 - val_loss: 6.3567e-04\n",
      "Epoch 447/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.8680e-04 - val_loss: 6.3496e-04\n",
      "Epoch 448/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.8607e-04 - val_loss: 6.3431e-04\n",
      "Epoch 449/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.8534e-04 - val_loss: 6.3368e-04\n",
      "Epoch 450/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.8461e-04 - val_loss: 6.3305e-04\n",
      "Epoch 451/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.8388e-04 - val_loss: 6.3234e-04\n",
      "Epoch 452/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 7.8315e-04 - val_loss: 6.3163e-04\n",
      "Epoch 453/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.8244e-04 - val_loss: 6.3100e-04\n",
      "Epoch 454/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.8171e-04 - val_loss: 6.3036e-04\n",
      "Epoch 455/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.8099e-04 - val_loss: 6.2973e-04\n",
      "Epoch 456/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.8027e-04 - val_loss: 6.2907e-04\n",
      "Epoch 457/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.7955e-04 - val_loss: 6.2841e-04\n",
      "Epoch 458/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.7883e-04 - val_loss: 6.2780e-04\n",
      "Epoch 459/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.7811e-04 - val_loss: 6.2709e-04\n",
      "Epoch 460/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.7740e-04 - val_loss: 6.2647e-04\n",
      "Epoch 461/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.7669e-04 - val_loss: 6.2587e-04\n",
      "Epoch 462/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.7598e-04 - val_loss: 6.2522e-04\n",
      "Epoch 463/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.7526e-04 - val_loss: 6.2462e-04\n",
      "Epoch 464/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.7456e-04 - val_loss: 6.2396e-04\n",
      "Epoch 465/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.7385e-04 - val_loss: 6.2333e-04\n",
      "Epoch 466/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.7314e-04 - val_loss: 6.2271e-04\n",
      "Epoch 467/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.7244e-04 - val_loss: 6.2206e-04\n",
      "Epoch 468/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.7174e-04 - val_loss: 6.2144e-04\n",
      "Epoch 469/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.7103e-04 - val_loss: 6.2077e-04\n",
      "Epoch 470/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.7034e-04 - val_loss: 6.2018e-04\n",
      "Epoch 471/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.6963e-04 - val_loss: 6.1954e-04\n",
      "Epoch 472/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.6894e-04 - val_loss: 6.1895e-04\n",
      "Epoch 473/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.6824e-04 - val_loss: 6.1828e-04\n",
      "Epoch 474/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.6755e-04 - val_loss: 6.1769e-04\n",
      "Epoch 475/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.6685e-04 - val_loss: 6.1705e-04\n",
      "Epoch 476/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.6616e-04 - val_loss: 6.1645e-04\n",
      "Epoch 477/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.6547e-04 - val_loss: 6.1581e-04\n",
      "Epoch 478/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.6478e-04 - val_loss: 6.1520e-04\n",
      "Epoch 479/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.6409e-04 - val_loss: 6.1459e-04\n",
      "Epoch 480/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.6340e-04 - val_loss: 6.1403e-04\n",
      "Epoch 481/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.6271e-04 - val_loss: 6.1342e-04\n",
      "Epoch 482/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.6203e-04 - val_loss: 6.1277e-04\n",
      "Epoch 483/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.6134e-04 - val_loss: 6.1216e-04\n",
      "Epoch 484/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 7.6066e-04 - val_loss: 6.1156e-04\n",
      "Epoch 485/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.5998e-04 - val_loss: 6.1099e-04\n",
      "Epoch 486/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.5930e-04 - val_loss: 6.1035e-04\n",
      "Epoch 487/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.5862e-04 - val_loss: 6.0969e-04\n",
      "Epoch 488/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.5794e-04 - val_loss: 6.0908e-04\n",
      "Epoch 489/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.5726e-04 - val_loss: 6.0851e-04\n",
      "Epoch 490/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.5659e-04 - val_loss: 6.0790e-04\n",
      "Epoch 491/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.5591e-04 - val_loss: 6.0728e-04\n",
      "Epoch 492/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.5524e-04 - val_loss: 6.0671e-04\n",
      "Epoch 493/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.5457e-04 - val_loss: 6.0611e-04\n",
      "Epoch 494/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.5390e-04 - val_loss: 6.0544e-04\n",
      "Epoch 495/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.5322e-04 - val_loss: 6.0488e-04\n",
      "Epoch 496/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.5255e-04 - val_loss: 6.0430e-04\n",
      "Epoch 497/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.5189e-04 - val_loss: 6.0371e-04\n",
      "Epoch 498/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.5122e-04 - val_loss: 6.0312e-04\n",
      "Epoch 499/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.5055e-04 - val_loss: 6.0247e-04\n",
      "Epoch 500/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.4989e-04 - val_loss: 6.0185e-04\n",
      "Epoch 501/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.4922e-04 - val_loss: 6.0130e-04\n",
      "Epoch 502/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.4856e-04 - val_loss: 6.0069e-04\n",
      "Epoch 503/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.4789e-04 - val_loss: 6.0013e-04\n",
      "Epoch 504/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.4724e-04 - val_loss: 5.9947e-04\n",
      "Epoch 505/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.4658e-04 - val_loss: 5.9894e-04\n",
      "Epoch 506/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.4592e-04 - val_loss: 5.9839e-04\n",
      "Epoch 507/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.4526e-04 - val_loss: 5.9779e-04\n",
      "Epoch 508/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.4460e-04 - val_loss: 5.9715e-04\n",
      "Epoch 509/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.4394e-04 - val_loss: 5.9662e-04\n",
      "Epoch 510/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.4329e-04 - val_loss: 5.9601e-04\n",
      "Epoch 511/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.4264e-04 - val_loss: 5.9544e-04\n",
      "Epoch 512/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.4198e-04 - val_loss: 5.9484e-04\n",
      "Epoch 513/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.4133e-04 - val_loss: 5.9426e-04\n",
      "Epoch 514/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.4068e-04 - val_loss: 5.9370e-04\n",
      "Epoch 515/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.4003e-04 - val_loss: 5.9315e-04\n",
      "Epoch 516/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.3938e-04 - val_loss: 5.9249e-04\n",
      "Epoch 517/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.3873e-04 - val_loss: 5.9190e-04\n",
      "Epoch 518/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.3808e-04 - val_loss: 5.9135e-04\n",
      "Epoch 519/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.3743e-04 - val_loss: 5.9078e-04\n",
      "Epoch 520/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 7.3679e-04 - val_loss: 5.9019e-04\n",
      "Epoch 521/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.3614e-04 - val_loss: 5.8961e-04\n",
      "Epoch 522/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.3550e-04 - val_loss: 5.8902e-04\n",
      "Epoch 523/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.3485e-04 - val_loss: 5.8845e-04\n",
      "Epoch 524/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.3422e-04 - val_loss: 5.8787e-04\n",
      "Epoch 525/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.3357e-04 - val_loss: 5.8728e-04\n",
      "Epoch 526/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.3293e-04 - val_loss: 5.8672e-04\n",
      "Epoch 527/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.3229e-04 - val_loss: 5.8614e-04\n",
      "Epoch 528/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.3166e-04 - val_loss: 5.8565e-04\n",
      "Epoch 529/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.3102e-04 - val_loss: 5.8509e-04\n",
      "Epoch 530/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.3038e-04 - val_loss: 5.8449e-04\n",
      "Epoch 531/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 7.2975e-04 - val_loss: 5.8385e-04\n",
      "Epoch 532/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 7.2911e-04 - val_loss: 5.8327e-04\n",
      "Epoch 533/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.2848e-04 - val_loss: 5.8271e-04\n",
      "Epoch 534/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.2785e-04 - val_loss: 5.8215e-04\n",
      "Epoch 535/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 7.2721e-04 - val_loss: 5.8165e-04\n",
      "Epoch 536/800\n",
      "13465/13465 [==============================] - 1s 108us/step - loss: 7.2658e-04 - val_loss: 5.8108e-04\n",
      "Epoch 537/800\n",
      "13465/13465 [==============================] - 1s 107us/step - loss: 7.2595e-04 - val_loss: 5.8050e-04\n",
      "Epoch 538/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 7.2532e-04 - val_loss: 5.7999e-04\n",
      "Epoch 539/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 7.2469e-04 - val_loss: 5.7938e-04\n",
      "Epoch 540/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 7.2407e-04 - val_loss: 5.7887e-04\n",
      "Epoch 541/800\n",
      "13465/13465 [==============================] - 1s 108us/step - loss: 7.2344e-04 - val_loss: 5.7824e-04\n",
      "Epoch 542/800\n",
      "13465/13465 [==============================] - 1s 104us/step - loss: 7.2282e-04 - val_loss: 5.7770e-04\n",
      "Epoch 543/800\n",
      "13465/13465 [==============================] - 1s 99us/step - loss: 7.2219e-04 - val_loss: 5.7718e-04\n",
      "Epoch 544/800\n",
      "13465/13465 [==============================] - 1s 98us/step - loss: 7.2157e-04 - val_loss: 5.7662e-04\n",
      "Epoch 545/800\n",
      "13465/13465 [==============================] - 1s 100us/step - loss: 7.2095e-04 - val_loss: 5.7609e-04\n",
      "Epoch 546/800\n",
      "13465/13465 [==============================] - 1s 100us/step - loss: 7.2032e-04 - val_loss: 5.7549e-04\n",
      "Epoch 547/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.1970e-04 - val_loss: 5.7492e-04\n",
      "Epoch 548/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.1908e-04 - val_loss: 5.7439e-04\n",
      "Epoch 549/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.1846e-04 - val_loss: 5.7388e-04\n",
      "Epoch 550/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.1784e-04 - val_loss: 5.7333e-04\n",
      "Epoch 551/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.1722e-04 - val_loss: 5.7270e-04\n",
      "Epoch 552/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.1661e-04 - val_loss: 5.7216e-04\n",
      "Epoch 553/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.1599e-04 - val_loss: 5.7163e-04\n",
      "Epoch 554/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.1538e-04 - val_loss: 5.7105e-04\n",
      "Epoch 555/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.1476e-04 - val_loss: 5.7052e-04\n",
      "Epoch 556/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.1415e-04 - val_loss: 5.6997e-04\n",
      "Epoch 557/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.1354e-04 - val_loss: 5.6945e-04\n",
      "Epoch 558/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.1292e-04 - val_loss: 5.6887e-04\n",
      "Epoch 559/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.1231e-04 - val_loss: 5.6834e-04\n",
      "Epoch 560/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.1170e-04 - val_loss: 5.6779e-04\n",
      "Epoch 561/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.1109e-04 - val_loss: 5.6726e-04\n",
      "Epoch 562/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.1049e-04 - val_loss: 5.6677e-04\n",
      "Epoch 563/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 7.0988e-04 - val_loss: 5.6619e-04\n",
      "Epoch 564/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.0927e-04 - val_loss: 5.6563e-04\n",
      "Epoch 565/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.0867e-04 - val_loss: 5.6511e-04\n",
      "Epoch 566/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.0806e-04 - val_loss: 5.6457e-04\n",
      "Epoch 567/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.0746e-04 - val_loss: 5.6409e-04\n",
      "Epoch 568/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.0685e-04 - val_loss: 5.6347e-04\n",
      "Epoch 569/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.0625e-04 - val_loss: 5.6292e-04\n",
      "Epoch 570/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.0565e-04 - val_loss: 5.6242e-04\n",
      "Epoch 571/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.0505e-04 - val_loss: 5.6185e-04\n",
      "Epoch 572/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.0445e-04 - val_loss: 5.6130e-04\n",
      "Epoch 573/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.0385e-04 - val_loss: 5.6079e-04\n",
      "Epoch 574/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 7.0325e-04 - val_loss: 5.6028e-04\n",
      "Epoch 575/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.0265e-04 - val_loss: 5.5971e-04\n",
      "Epoch 576/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.0206e-04 - val_loss: 5.5919e-04\n",
      "Epoch 577/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 7.0146e-04 - val_loss: 5.5869e-04\n",
      "Epoch 578/800\n",
      "13465/13465 [==============================] - 1s 95us/step - loss: 7.0086e-04 - val_loss: 5.5815e-04\n",
      "Epoch 579/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 7.0027e-04 - val_loss: 5.5765e-04\n",
      "Epoch 580/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.9968e-04 - val_loss: 5.5717e-04\n",
      "Epoch 581/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.9909e-04 - val_loss: 5.5658e-04\n",
      "Epoch 582/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.9849e-04 - val_loss: 5.5606e-04\n",
      "Epoch 583/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.9790e-04 - val_loss: 5.5554e-04\n",
      "Epoch 584/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.9731e-04 - val_loss: 5.5502e-04\n",
      "Epoch 585/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.9672e-04 - val_loss: 5.5448e-04\n",
      "Epoch 586/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.9614e-04 - val_loss: 5.5392e-04\n",
      "Epoch 587/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.9555e-04 - val_loss: 5.5345e-04\n",
      "Epoch 588/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.9496e-04 - val_loss: 5.5299e-04\n",
      "Epoch 589/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.9437e-04 - val_loss: 5.5243e-04\n",
      "Epoch 590/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.9380e-04 - val_loss: 5.5187e-04\n",
      "Epoch 591/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.9321e-04 - val_loss: 5.5132e-04\n",
      "Epoch 592/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.9262e-04 - val_loss: 5.5083e-04\n",
      "Epoch 593/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.9204e-04 - val_loss: 5.5035e-04\n",
      "Epoch 594/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.9146e-04 - val_loss: 5.4979e-04\n",
      "Epoch 595/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.9088e-04 - val_loss: 5.4926e-04\n",
      "Epoch 596/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.9030e-04 - val_loss: 5.4872e-04\n",
      "Epoch 597/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.8972e-04 - val_loss: 5.4826e-04\n",
      "Epoch 598/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.8914e-04 - val_loss: 5.4773e-04\n",
      "Epoch 599/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.8857e-04 - val_loss: 5.4726e-04\n",
      "Epoch 600/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.8799e-04 - val_loss: 5.4674e-04\n",
      "Epoch 601/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.8742e-04 - val_loss: 5.4622e-04\n",
      "Epoch 602/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.8684e-04 - val_loss: 5.4571e-04\n",
      "Epoch 603/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.8626e-04 - val_loss: 5.4521e-04\n",
      "Epoch 604/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.8570e-04 - val_loss: 5.4467e-04\n",
      "Epoch 605/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.8512e-04 - val_loss: 5.4415e-04\n",
      "Epoch 606/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.8455e-04 - val_loss: 5.4371e-04\n",
      "Epoch 607/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.8398e-04 - val_loss: 5.4318e-04\n",
      "Epoch 608/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.8341e-04 - val_loss: 5.4266e-04\n",
      "Epoch 609/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.8284e-04 - val_loss: 5.4212e-04\n",
      "Epoch 610/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.8227e-04 - val_loss: 5.4166e-04\n",
      "Epoch 611/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.8171e-04 - val_loss: 5.4113e-04\n",
      "Epoch 612/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.8114e-04 - val_loss: 5.4065e-04\n",
      "Epoch 613/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.8057e-04 - val_loss: 5.4020e-04\n",
      "Epoch 614/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.8001e-04 - val_loss: 5.3969e-04\n",
      "Epoch 615/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.7945e-04 - val_loss: 5.3914e-04\n",
      "Epoch 616/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.7889e-04 - val_loss: 5.3867e-04\n",
      "Epoch 617/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.7832e-04 - val_loss: 5.3817e-04\n",
      "Epoch 618/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.7776e-04 - val_loss: 5.3766e-04\n",
      "Epoch 619/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.7720e-04 - val_loss: 5.3714e-04\n",
      "Epoch 620/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.7664e-04 - val_loss: 5.3665e-04\n",
      "Epoch 621/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.7608e-04 - val_loss: 5.3616e-04\n",
      "Epoch 622/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.7552e-04 - val_loss: 5.3568e-04\n",
      "Epoch 623/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.7496e-04 - val_loss: 5.3521e-04\n",
      "Epoch 624/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.7441e-04 - val_loss: 5.3470e-04\n",
      "Epoch 625/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.7386e-04 - val_loss: 5.3423e-04\n",
      "Epoch 626/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.7330e-04 - val_loss: 5.3373e-04\n",
      "Epoch 627/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.7275e-04 - val_loss: 5.3321e-04\n",
      "Epoch 628/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.7219e-04 - val_loss: 5.3277e-04\n",
      "Epoch 629/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 6.7164e-04 - val_loss: 5.3228e-04\n",
      "Epoch 630/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 6.7109e-04 - val_loss: 5.3179e-04\n",
      "Epoch 631/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.7054e-04 - val_loss: 5.3126e-04\n",
      "Epoch 632/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 6.6999e-04 - val_loss: 5.3079e-04\n",
      "Epoch 633/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.6944e-04 - val_loss: 5.3030e-04\n",
      "Epoch 634/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.6890e-04 - val_loss: 5.2981e-04\n",
      "Epoch 635/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.6835e-04 - val_loss: 5.2930e-04\n",
      "Epoch 636/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.6780e-04 - val_loss: 5.2885e-04\n",
      "Epoch 637/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.6726e-04 - val_loss: 5.2838e-04\n",
      "Epoch 638/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.6671e-04 - val_loss: 5.2787e-04\n",
      "Epoch 639/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.6617e-04 - val_loss: 5.2740e-04\n",
      "Epoch 640/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.6563e-04 - val_loss: 5.2689e-04\n",
      "Epoch 641/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.6509e-04 - val_loss: 5.2645e-04\n",
      "Epoch 642/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.6455e-04 - val_loss: 5.2599e-04\n",
      "Epoch 643/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.6401e-04 - val_loss: 5.2550e-04\n",
      "Epoch 644/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.6347e-04 - val_loss: 5.2499e-04\n",
      "Epoch 645/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.6293e-04 - val_loss: 5.2449e-04\n",
      "Epoch 646/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.6239e-04 - val_loss: 5.2404e-04\n",
      "Epoch 647/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.6185e-04 - val_loss: 5.2359e-04\n",
      "Epoch 648/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.6132e-04 - val_loss: 5.2311e-04\n",
      "Epoch 649/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.6079e-04 - val_loss: 5.2260e-04\n",
      "Epoch 650/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.6025e-04 - val_loss: 5.2212e-04\n",
      "Epoch 651/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.5972e-04 - val_loss: 5.2170e-04\n",
      "Epoch 652/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.5919e-04 - val_loss: 5.2124e-04\n",
      "Epoch 653/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.5866e-04 - val_loss: 5.2075e-04\n",
      "Epoch 654/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.5813e-04 - val_loss: 5.2028e-04\n",
      "Epoch 655/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.5760e-04 - val_loss: 5.1983e-04\n",
      "Epoch 656/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.5707e-04 - val_loss: 5.1936e-04\n",
      "Epoch 657/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.5654e-04 - val_loss: 5.1889e-04\n",
      "Epoch 658/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.5601e-04 - val_loss: 5.1840e-04\n",
      "Epoch 659/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.5549e-04 - val_loss: 5.1796e-04\n",
      "Epoch 660/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.5496e-04 - val_loss: 5.1749e-04\n",
      "Epoch 661/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.5444e-04 - val_loss: 5.1702e-04\n",
      "Epoch 662/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.5392e-04 - val_loss: 5.1659e-04\n",
      "Epoch 663/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.5339e-04 - val_loss: 5.1610e-04\n",
      "Epoch 664/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.5288e-04 - val_loss: 5.1566e-04\n",
      "Epoch 665/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.5235e-04 - val_loss: 5.1524e-04\n",
      "Epoch 666/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.5183e-04 - val_loss: 5.1477e-04\n",
      "Epoch 667/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.5132e-04 - val_loss: 5.1430e-04\n",
      "Epoch 668/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.5080e-04 - val_loss: 5.1385e-04\n",
      "Epoch 669/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.5028e-04 - val_loss: 5.1337e-04\n",
      "Epoch 670/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.4976e-04 - val_loss: 5.1295e-04\n",
      "Epoch 671/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 6.4925e-04 - val_loss: 5.1248e-04\n",
      "Epoch 672/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.4874e-04 - val_loss: 5.1204e-04\n",
      "Epoch 673/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.4822e-04 - val_loss: 5.1158e-04\n",
      "Epoch 674/800\n",
      "13465/13465 [==============================] - 1s 95us/step - loss: 6.4771e-04 - val_loss: 5.1114e-04\n",
      "Epoch 675/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.4720e-04 - val_loss: 5.1066e-04\n",
      "Epoch 676/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.4669e-04 - val_loss: 5.1018e-04\n",
      "Epoch 677/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.4618e-04 - val_loss: 5.0974e-04\n",
      "Epoch 678/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.4567e-04 - val_loss: 5.0930e-04\n",
      "Epoch 679/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.4516e-04 - val_loss: 5.0887e-04\n",
      "Epoch 680/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.4466e-04 - val_loss: 5.0842e-04\n",
      "Epoch 681/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.4414e-04 - val_loss: 5.0805e-04\n",
      "Epoch 682/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.4364e-04 - val_loss: 5.0758e-04\n",
      "Epoch 683/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.4313e-04 - val_loss: 5.0710e-04\n",
      "Epoch 684/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 6.4263e-04 - val_loss: 5.0666e-04\n",
      "Epoch 685/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.4213e-04 - val_loss: 5.0623e-04\n",
      "Epoch 686/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.4163e-04 - val_loss: 5.0575e-04\n",
      "Epoch 687/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.4113e-04 - val_loss: 5.0533e-04\n",
      "Epoch 688/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.4063e-04 - val_loss: 5.0490e-04\n",
      "Epoch 689/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.4013e-04 - val_loss: 5.0446e-04\n",
      "Epoch 690/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.3963e-04 - val_loss: 5.0406e-04\n",
      "Epoch 691/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.3913e-04 - val_loss: 5.0364e-04\n",
      "Epoch 692/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.3863e-04 - val_loss: 5.0314e-04\n",
      "Epoch 693/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.3814e-04 - val_loss: 5.0270e-04\n",
      "Epoch 694/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.3765e-04 - val_loss: 5.0226e-04\n",
      "Epoch 695/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.3715e-04 - val_loss: 5.0180e-04\n",
      "Epoch 696/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.3666e-04 - val_loss: 5.0137e-04\n",
      "Epoch 697/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.3617e-04 - val_loss: 5.0100e-04\n",
      "Epoch 698/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.3567e-04 - val_loss: 5.0052e-04\n",
      "Epoch 699/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 6.3518e-04 - val_loss: 5.0007e-04\n",
      "Epoch 700/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.3469e-04 - val_loss: 4.9972e-04\n",
      "Epoch 701/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.3420e-04 - val_loss: 4.9923e-04\n",
      "Epoch 702/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.3372e-04 - val_loss: 4.9883e-04\n",
      "Epoch 703/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.3323e-04 - val_loss: 4.9838e-04\n",
      "Epoch 704/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.3275e-04 - val_loss: 4.9797e-04\n",
      "Epoch 705/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.3226e-04 - val_loss: 4.9756e-04\n",
      "Epoch 706/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.3177e-04 - val_loss: 4.9715e-04\n",
      "Epoch 707/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.3129e-04 - val_loss: 4.9671e-04\n",
      "Epoch 708/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.3081e-04 - val_loss: 4.9628e-04\n",
      "Epoch 709/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.3033e-04 - val_loss: 4.9586e-04\n",
      "Epoch 710/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.2985e-04 - val_loss: 4.9543e-04\n",
      "Epoch 711/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.2937e-04 - val_loss: 4.9499e-04\n",
      "Epoch 712/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.2889e-04 - val_loss: 4.9463e-04\n",
      "Epoch 713/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.2841e-04 - val_loss: 4.9419e-04\n",
      "Epoch 714/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.2794e-04 - val_loss: 4.9373e-04\n",
      "Epoch 715/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.2746e-04 - val_loss: 4.9336e-04\n",
      "Epoch 716/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.2698e-04 - val_loss: 4.9292e-04\n",
      "Epoch 717/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.2651e-04 - val_loss: 4.9256e-04\n",
      "Epoch 718/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.2604e-04 - val_loss: 4.9212e-04\n",
      "Epoch 719/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.2556e-04 - val_loss: 4.9172e-04\n",
      "Epoch 720/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.2509e-04 - val_loss: 4.9128e-04\n",
      "Epoch 721/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.2462e-04 - val_loss: 4.9087e-04\n",
      "Epoch 722/800\n",
      "13465/13465 [==============================] - 1s 94us/step - loss: 6.2415e-04 - val_loss: 4.9045e-04\n",
      "Epoch 723/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.2368e-04 - val_loss: 4.9004e-04\n",
      "Epoch 724/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.2321e-04 - val_loss: 4.8966e-04\n",
      "Epoch 725/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.2275e-04 - val_loss: 4.8921e-04\n",
      "Epoch 726/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.2228e-04 - val_loss: 4.8882e-04\n",
      "Epoch 727/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.2181e-04 - val_loss: 4.8839e-04\n",
      "Epoch 728/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.2135e-04 - val_loss: 4.8801e-04\n",
      "Epoch 729/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.2089e-04 - val_loss: 4.8757e-04\n",
      "Epoch 730/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.2042e-04 - val_loss: 4.8719e-04\n",
      "Epoch 731/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1996e-04 - val_loss: 4.8679e-04\n",
      "Epoch 732/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.1950e-04 - val_loss: 4.8639e-04\n",
      "Epoch 733/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.1904e-04 - val_loss: 4.8596e-04\n",
      "Epoch 734/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.1858e-04 - val_loss: 4.8562e-04\n",
      "Epoch 735/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1812e-04 - val_loss: 4.8522e-04\n",
      "Epoch 736/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1766e-04 - val_loss: 4.8477e-04\n",
      "Epoch 737/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1721e-04 - val_loss: 4.8440e-04\n",
      "Epoch 738/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1675e-04 - val_loss: 4.8404e-04\n",
      "Epoch 739/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1630e-04 - val_loss: 4.8363e-04\n",
      "Epoch 740/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1585e-04 - val_loss: 4.8322e-04\n",
      "Epoch 741/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.1539e-04 - val_loss: 4.8278e-04\n",
      "Epoch 742/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1494e-04 - val_loss: 4.8242e-04\n",
      "Epoch 743/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1449e-04 - val_loss: 4.8204e-04\n",
      "Epoch 744/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1404e-04 - val_loss: 4.8159e-04\n",
      "Epoch 745/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.1359e-04 - val_loss: 4.8124e-04\n",
      "Epoch 746/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1314e-04 - val_loss: 4.8085e-04\n",
      "Epoch 747/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.1269e-04 - val_loss: 4.8045e-04\n",
      "Epoch 748/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1225e-04 - val_loss: 4.8007e-04\n",
      "Epoch 749/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1180e-04 - val_loss: 4.7969e-04\n",
      "Epoch 750/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.1136e-04 - val_loss: 4.7932e-04\n",
      "Epoch 751/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1091e-04 - val_loss: 4.7889e-04\n",
      "Epoch 752/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1047e-04 - val_loss: 4.7854e-04\n",
      "Epoch 753/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.1002e-04 - val_loss: 4.7811e-04\n",
      "Epoch 754/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.0958e-04 - val_loss: 4.7774e-04\n",
      "Epoch 755/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.0914e-04 - val_loss: 4.7736e-04\n",
      "Epoch 756/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.0870e-04 - val_loss: 4.7699e-04\n",
      "Epoch 757/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.0826e-04 - val_loss: 4.7662e-04\n",
      "Epoch 758/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.0783e-04 - val_loss: 4.7620e-04\n",
      "Epoch 759/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.0739e-04 - val_loss: 4.7583e-04\n",
      "Epoch 760/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.0695e-04 - val_loss: 4.7546e-04\n",
      "Epoch 761/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.0652e-04 - val_loss: 4.7510e-04\n",
      "Epoch 762/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.0608e-04 - val_loss: 4.7473e-04\n",
      "Epoch 763/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.0565e-04 - val_loss: 4.7435e-04\n",
      "Epoch 764/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 6.0522e-04 - val_loss: 4.7398e-04\n",
      "Epoch 765/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.0479e-04 - val_loss: 4.7361e-04\n",
      "Epoch 766/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.0436e-04 - val_loss: 4.7321e-04\n",
      "Epoch 767/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.0393e-04 - val_loss: 4.7283e-04\n",
      "Epoch 768/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.0350e-04 - val_loss: 4.7249e-04\n",
      "Epoch 769/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.0307e-04 - val_loss: 4.7207e-04\n",
      "Epoch 770/800\n",
      "13465/13465 [==============================] - 1s 93us/step - loss: 6.0264e-04 - val_loss: 4.7170e-04\n",
      "Epoch 771/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.0221e-04 - val_loss: 4.7136e-04\n",
      "Epoch 772/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 6.0179e-04 - val_loss: 4.7100e-04\n",
      "Epoch 773/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.0136e-04 - val_loss: 4.7065e-04\n",
      "Epoch 774/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 6.0094e-04 - val_loss: 4.7025e-04\n",
      "Epoch 775/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.0052e-04 - val_loss: 4.6989e-04\n",
      "Epoch 776/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 6.0009e-04 - val_loss: 4.6953e-04\n",
      "Epoch 777/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 5.9967e-04 - val_loss: 4.6916e-04\n",
      "Epoch 778/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 5.9925e-04 - val_loss: 4.6882e-04\n",
      "Epoch 779/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 5.9883e-04 - val_loss: 4.6842e-04\n",
      "Epoch 780/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 5.9841e-04 - val_loss: 4.6809e-04\n",
      "Epoch 781/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 5.9799e-04 - val_loss: 4.6772e-04\n",
      "Epoch 782/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 5.9758e-04 - val_loss: 4.6734e-04\n",
      "Epoch 783/800\n",
      "13465/13465 [==============================] - 1s 107us/step - loss: 5.9716e-04 - val_loss: 4.6699e-04\n",
      "Epoch 784/800\n",
      "13465/13465 [==============================] - 1s 104us/step - loss: 5.9675e-04 - val_loss: 4.6664e-04\n",
      "Epoch 785/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 5.9633e-04 - val_loss: 4.6627e-04\n",
      "Epoch 786/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 5.9592e-04 - val_loss: 4.6591e-04\n",
      "Epoch 787/800\n",
      "13465/13465 [==============================] - 1s 105us/step - loss: 5.9550e-04 - val_loss: 4.6556e-04\n",
      "Epoch 788/800\n",
      "13465/13465 [==============================] - 1s 105us/step - loss: 5.9509e-04 - val_loss: 4.6519e-04\n",
      "Epoch 789/800\n",
      "13465/13465 [==============================] - 1s 106us/step - loss: 5.9468e-04 - val_loss: 4.6483e-04\n",
      "Epoch 790/800\n",
      "13465/13465 [==============================] - 1s 98us/step - loss: 5.9427e-04 - val_loss: 4.6453e-04\n",
      "Epoch 791/800\n",
      "13465/13465 [==============================] - 1s 103us/step - loss: 5.9386e-04 - val_loss: 4.6419e-04\n",
      "Epoch 792/800\n",
      "13465/13465 [==============================] - 1s 102us/step - loss: 5.9345e-04 - val_loss: 4.6381e-04\n",
      "Epoch 793/800\n",
      "13465/13465 [==============================] - 1s 101us/step - loss: 5.9304e-04 - val_loss: 4.6345e-04\n",
      "Epoch 794/800\n",
      "13465/13465 [==============================] - 1s 92us/step - loss: 5.9264e-04 - val_loss: 4.6310e-04\n",
      "Epoch 795/800\n",
      "13465/13465 [==============================] - 1s 91us/step - loss: 5.9223e-04 - val_loss: 4.6276e-04\n",
      "Epoch 796/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 5.9183e-04 - val_loss: 4.6239e-04\n",
      "Epoch 797/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 5.9142e-04 - val_loss: 4.6206e-04\n",
      "Epoch 798/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 5.9102e-04 - val_loss: 4.6169e-04\n",
      "Epoch 799/800\n",
      "13465/13465 [==============================] - 1s 90us/step - loss: 5.9062e-04 - val_loss: 4.6135e-04\n",
      "Epoch 800/800\n",
      "13465/13465 [==============================] - 1s 89us/step - loss: 5.9021e-04 - val_loss: 4.6100e-04\n"
     ]
    }
   ],
   "source": [
    "autoencoder.compile(loss='mean_squared_error',\n",
    "                    optimizer='sgd')\n",
    "\n",
    "cp = ModelCheckpoint(filepath=\"autoencoder_traffic_minmax.h5\",\n",
    "                               save_best_only=True,\n",
    "                               verbose=0)\n",
    "\n",
    "tb = TensorBoard(log_dir='./logs',\n",
    "                histogram_freq=0,\n",
    "                write_graph=True,\n",
    "                write_images=True)\n",
    "\n",
    "history = autoencoder.fit(x_train, x_train,\n",
    "                    epochs=nb_epoch,\n",
    "                    validation_data=(x_opt, x_opt),\n",
    "                    verbose=1,\n",
    "                    callbacks=[cp, tb]).history"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 69,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 376
    },
    "colab_type": "code",
    "id": "F6QtojGe9VGd",
    "outputId": "449acb78-7bfe-421b-9305-17a7b92702ee"
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfsAAAFnCAYAAAChL+DqAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzt3Xt0VPW99/H3npmEJOQeZzCIYgwI\nCKLSasXIVS6a1qO2XiIix2rVFqQiKNIgEJeIgpaqaB9Foes8oJgWeXq0KrFa7FGI4aAWJFoRVEgg\nQu4Qcp2Z/fyRZJKYhBkwQ7KHz2st1sy+zu+LuD7799s3wzRNExEREQlZtu5ugIiIiASXwl5ERCTE\nKexFRERCnMJeREQkxCnsRUREQpzCXkREJMQp7EVC3KBBg/jtb3/bbv78+fMZNGjQce9v/vz5rFix\n4pjrbNiwgdtuuy3g+SISXAp7kVPAl19+SVVVlW+6vr6ezz77rBtbJCInk8Je5BTwk5/8hL///e++\n6Q8//JDzzz+/zTpvv/02P/vZz7jyyiuZNm0a+/btA6C8vJzbb7+d8ePHc9ddd3HkyBHfNrt372bq\n1KlMnjyZq6+++rgOICoqKrj33nuZPHky6enprFy50rfsD3/4A5MnT2by5MlMmzaNgwcPHnO+iByb\nwl7kFHDVVVfxt7/9zTf95ptvcuWVV/qmDxw4wIIFC3juuefYuHEjY8eOZeHChQC8+OKLJCQk8I9/\n/IOFCxfy4YcfAuD1epkxYwbXXHMNOTk5ZGVlMX36dNxud0BtWr58OXFxceTk5PDKK6+wbt06tm3b\nxldffcXGjRv529/+Rk5ODhMnTiQ3N7fT+SLin8Je5BRwySWX8NVXX1FaWkpNTQ2ffvopI0eO9C3f\nvHkzP/nJT+jfvz8AN9xwA3l5ebjdbrZt28ZVV10FQL9+/bjkkksA+PrrryktLeX6668H4Ec/+hGJ\niYl8+umnAbXpn//8J1OmTAEgPj6eiRMnsnnzZmJjYykrK+ONN96gsrKSW2+9lWuvvbbT+SLin8Je\n5BRgt9uZNGkSb7/9Nps2beLyyy/H4XD4lpeXlxMbG+ubjomJwTRNysvLqaysJCYmxreseb3Dhw9T\nW1vLVVddxZVXXsmVV15JaWkpFRUVAbWprKyszW/GxsZSWlpKnz59WLFihW+E4a677qKoqKjT+SLi\nn8Je5BSRnp5OTk4OGzduJD09vc2ypKSkNiFdWVmJzWYjISGB2NjYNufpy8rKAHC5XPTu3ZuNGzf6\n/nz44YdMnDgxoPacdtppbX6zoqKC0047DYBLL72UlStXsnnzZpKTk3nyySePOV9Ejk1hL3KKuOii\nizh06BBfffWVbyi+WVpaGtu2baOgoACAV199lbS0NBwOBxdeeCHvvvsuAPv27ePjjz8G4IwzzuD0\n009n48aNQONBwOzZs6murg6oPWPHjiU7O9u37d///nfGjh3Lhx9+yMMPP4zX6yUqKorBgwdjGEan\n80XEP4f/VUQkFBiGwcSJE6mpqcFma3ucf/rpp7N48WKmT59OQ0MD/fr145FHHgHg7rvv5r777mP8\n+PGkpqYyadIk3/6WL19OVlYWTz31FDabjV/+8pdERUUF1J5Zs2aRlZXFlVdeic1m46677mL48OHU\n1dXx5ptvMnnyZMLDw0lMTGTJkiW4XK4O54uIf4beZy8iIhLaNIwvIiIS4hT2IiIiIU5hLyIiEuIU\n9iIiIiFOYS8iIhLiQvLWu+LiI/5XOk4JCVGUlwd2/3BPFyq1hEodoFp6qlCpJVTqANVyLE5nTKfL\n1LMPkMNh7+4mdJlQqSVU6gDV0lOFSi2hUgeolhOlsBcREQlxCnsREZEQp7AXEREJcUG9QG/JkiVs\n374dwzDIzMxk+PDhvmVbtmxh+fLl2O12Ro8ezYwZM6ipqWHevHmUlpZSV1fH9OnTGTduHPPmzSM/\nP5/4+HgA7rjjDsaOHRvMpouIiISMoIX91q1b2bt3L9nZ2ezZs4fMzEzfG64AFi9ezKpVq+jTpw9T\np05l8uTJ7Nq1i2HDhnHnnXeyf/9+br/9dsaNGwfA7Nmzfd9FREQkcEEL+9zcXCZMmABAamoqlZWV\nVFVVER0dTUFBAXFxcSQnJwMwZswYcnNzufXWW33bFxUV0adPn2A1T0RE5JQRtLAvKSlh6NChvunE\nxESKi4uJjo6muLiYxMTENsua36MNkJGRwXfffcfzzz/vm7d27Vr+9Kc/kZSUxIIFC9ps/30JCVFB\nuaXhWPcwWk2o1BIqdYBq6alCpZZQqQNUy4k4aQ/VOZ436b766qt88cUXPPDAA7z++utcc801xMfH\nM2TIEFauXMmzzz7LwoULO90+GA9ccDpjgvKwnu4QKrWESh2gWnqqUKklVOqAnl3LihV/4Msvv6Cs\nrJTa2lr69j2D2Ng4lix5osP1m2t566036N07mjFjftip6mMdOAQt7F0uFyUlJb7pQ4cO4XQ6O1x2\n8OBBXC4XO3fuJCkpieTkZIYMGYLH46GsrIyRI0f61h0/fjxZWVnBaraIiMgJmTnzPgDeeusNvv56\nD/fcMyug7dLTrw5ms4Aghn1aWhorVqwgIyOD/Px8XC4X0dHRAPTr14+qqioKCws5/fTT2bRpE08+\n+ST//Oc/2b9/P/Pnz6ekpITq6moSEhKYOXMmc+fO5cwzzyQvL4+BAwcGq9kiIiJd5pNPtvHqq2up\nrq7mnnvu49NPP+b999/D6/UyYcJ4brrpP1m16gXi4+NJSUllw4Y/Yxg29u79hrFjr+D22+/qknYE\nLexHjBjB0KFDycjIwDAMFi1axIYNG4iJiWHixIlkZWUxZ84cANLT00lJSSE5OZn58+czZcoUamtr\nWbhwITabjVtuuYVZs2YRGRlJVFQUjz32WLCa3Y7Xa5KzdR+XXdSPuF6h85hGEZFQ9tRftrNjT2mX\n7nN4ahKzbrjguLfbs2c369ZtIDw8nE8//Zg//vElbDYbGRnX8rOf/aLNup9/ns8rr7yG1+vlhhuu\n7vlhD3D//fe3mR48eLDv+8UXX9zmVjyAiIgIfv/737fbz6WXXsprr70WnEb68fWBw/zl/T18/d0R\nZlw7rFvaICIi1jVgwEDCw8OBxpy75567sNvtlJeXc/jw4TbrDho0mIiIiC5vQ0i+9a4rNbg9ANQ3\neLu5JSIiEqgT6YEHS1hYGADffVdEdvbLrF79MlFRUfzylze3W9duD84Ish6X649hAGAS+N0EIiIi\n31dRUUFCQgJRUVF8+eW/2b9/Pw0NDSfltxX2ftgas57juHNQRESknYEDzyUyMorf/OZ23nvvHTIy\nMvj975eelN/WMH6Ajuc5ASIicupqfSvdiBE/ZsSIHwONQ/TLlz/rW9bRMwOa1wV48833uqxN6tn7\nYTQP4yvrRUTEohT2IiIiIU5h74fhO2evrr2IiFiTwt4Pg+ar8UVERKxJYe9Hc89eaS8iIlalsPen\nKey9GsYXERGL0q13fmgYX0REjsf+/YU8/fTvKSsrxev1cP75FzB9+m/p1avjx+Bu2vQu48ZNCGqb\n1LP3o2UYX3EvIiLH5vV6mT9/LjfeeDMvvfR/Wb36ZU4/vS/Llj3a6TZr1/5X0Nulnn2AFPUiIuLP\n1q0fceaZZ/HjH1/im5eRcQs33/wL7r//t/TtewZ79+6lsrKCJ55Yyt//vondu3eRmfkAS5Y8EbR2\nKez9MPS4XBERy/nj9tXkl/67S/c5NGkw0y+4/Zjr7Nv3LeeeO6jNPMMwOOecVKqrj+LxeHj66T/y\n4Yf/w3PPPUdW1uO8/PJ/BTXoQcP4fjWfs1fXXkRE/DPweDzt5pqmic1m9/X4hw0bzjfffHPSWqWe\nvR++nr3SXkTEMvz1wIOlf/+z+etf17eZZ5om33zzNf3798frNX3zDN9FYcGnnn2ANIwvIiL+XHzx\nTzhw4AC5uR/65mVnv8wFF1xIbGwcO3Z8CkB+/g5SU1MBfAcAwaSw96PlRThKexEROTabzcby5St4\n/fX/xx133Mrtt9/C3r17mTXrAQDq6+uZO3cWL774PDNmzADg3HMHceed04LaLg3j+6EH6ImIyPFI\nSjqNxx77fYfLLr98DGlpo4CWV9w+/fT/CXqb1LP3R1fji4iIxaln70fL5RNKexEROXHz52d122+r\nZ+9Hyzn7bm6IiIjICVLY+6H32YuIiNUp7AOkrBcREatS2PvhG8bv5naIiIicKIW9H74L9JT2IiJi\nUQp7f/S4XBERsTiFvR96nb2IiFidwt4PnbMXERGrU9j70dKzV9yLiIg1Kez90eNyRUTE4oL6uNwl\nS5awfft2DMMgMzOT4cOH+5Zt2bKF5cuXY7fbGT16NDNmzKCmpoZ58+ZRWlpKXV0d06dPZ9y4cRQV\nFTF37lw8Hg9Op5MnnniC8PDwYDbdx1Dai4iIxQWtZ79161b27t1LdnY2jz76KI8++mib5YsXL2bF\nihWsW7eOzZs3s3v3bjZt2sSwYcNYu3YtTz31FI8//jgAzzzzDFOmTOGVV16hf//+rF+/PljNbsf3\nBL2T9osiIiJdK2hhn5uby4QJEwBITU2lsrKSqqoqAAoKCoiLiyM5ORmbzcaYMWPIzc0lPT2dO++8\nE4CioiL69OkDQF5eHldccQUA48aNIzc3N1jN7pQ69iIiYlVBG8YvKSlh6NChvunExESKi4uJjo6m\nuLiYxMTENssKCgp80xkZGXz33Xc8//zzANTU1PiG7ZOSkiguLj7mbyckROFw2LukDlt481+RidMZ\n0yX77AlCpZZQqQNUS08VKrWESh2gWk7ESXvF7fFczf7qq6/yxRdf8MADD/D6668f937Ky6uPu32d\nqaiqA8BrQnHxkS7bb3dyOmNCopZQqQNUS08VKrWESh2gWvztrzNBG8Z3uVyUlJT4pg8dOoTT6exw\n2cGDB3G5XOzcuZOioiIAhgwZgsfjoaysjKioKGpra9use7LocbkiImJ1QQv7tLQ0cnJyAMjPz8fl\nchEdHQ1Av379qKqqorCwELfbzaZNm0hLS2Pbtm2sXr0aaDwNUF1dTUJCApdddplvX++88w6jRo0K\nVrPb8z1UR2kvIiLWFLRh/BEjRjB06FAyMjIwDINFixaxYcMGYmJimDhxIllZWcyZMweA9PR0UlJS\nSE5OZv78+UyZMoXa2loWLlyIzWZj5syZPPjgg2RnZ9O3b1+uvfbaYDW7HT0uV0RErM4wQ/DRcF15\nDuRwdT2znvmQmKhwnv7t5V223+4UKue8QqUOUC09VajUEip1gGrxt7/O6Al6fvjO2WsYX0RELEph\n74fvRTjKehERsSiFvR++J+gp7UVExKIU9n74LtDr1laIiIicOIW9XxrGFxERa1PY+2HoqToiImJx\nCvsAqWcvIiJWpbD3w+Z7gp6IiIg1Kez9ab4a36u4FxERa1LY+6Gr8UVExOoU9n603Gffve0QERE5\nUQp7v9S3FxERa1PY+6GevYiIWJ3CPkDKehERsSqFvR82de1FRMTiFPb+NGW97rwTERGrUtj7Yfhf\nRUREpEdT2PthtDwcX6+5FRERS1LYHwdFvYiIWJHCPgB68Z2IiFiZwj4Ahu9lOEp7ERGxHoV9AHT3\nnYiIWJnC/jgo7EVExIoU9gEwdNJeREQsTGEfkKZz9sp6ERGxIIV9AHzn7Lu3GSIiIidEYR8AjeKL\niIiVKewDoFvvRETEyhT2gdCtdyIiYmEK+wA0D+Mr7EVExIoU9gHQrXciImJljmDufMmSJWzfvh3D\nMMjMzGT48OG+ZVu2bGH58uXY7XZGjx7NjBkzAFi2bBkff/wxbrebu+++m0mTJjFv3jzy8/OJj48H\n4I477mDs2LHBbLqPaZoQewgqeivqRUTEkoIW9lu3bmXv3r1kZ2ezZ88eMjMzyc7O9i1fvHgxq1at\nok+fPkydOpXJkydTUlLCV199RXZ2NuXl5Vx33XVMmjQJgNmzZzNu3LhgNbdThVUHIOV/CS/rg2lO\nPOm/LyIi8kMFLexzc3OZMGECAKmpqVRWVlJVVUV0dDQFBQXExcWRnJwMwJgxY8jNzWXKlCm+3n9s\nbCw1NTV4PJ5gNTEgNe6axi+Ohm5th4iIyIkK2jn7kpISEhISfNOJiYkUFxcDUFxcTGJiYrtldrud\nqKgoANavX8/o0aOx2+0ArF27lmnTpnHfffdRVlYWrGZ3wHd5Hl5doSciIhYU1HP2rZnHEZTvvvsu\n69evZ/Xq1QBcc801xMfHM2TIEFauXMmzzz7LwoULO90+ISEKh8P+g9sMUGz29n1PSowmPqZXl+y3\nuzmdMd3dhC4RKnWAaumpQqWWUKkDVMuJCFrYu1wuSkpKfNOHDh3C6XR2uOzgwYO4XC4APvjgA55/\n/nleeuklYmIa/xJGjhzpW3f8+PFkZWUd87fLy6u7qgwqK5uG8Q2TktIqGmrru2zf3cXpjKG4+Eh3\nN+MHC5U6QLX0VKFSS6jUAarF3/46E7Rh/LS0NHJycgDIz8/H5XIRHR0NQL9+/aiqqqKwsBC3282m\nTZtIS0vjyJEjLFu2jBdeeMF35T3AzJkzKSgoACAvL4+BAwcGq9ntGC0Py9WN9iIiYklB69mPGDGC\noUOHkpGRgWEYLFq0iA0bNhATE8PEiRPJyspizpw5AKSnp5OSkuK7Cn/WrFm+/SxdupRbbrmFWbNm\nERkZSVRUFI899liwmt1O86NyDUMPyxUREWsyzOM5mW4RXTks8k3lPp78+Fm8VXEsGXc/CSFwzj5U\nhsFCpQ5QLT1VqNQSKnWAavG3v87oCXp+6Ol5IiJidQp7P3zn7I3ju6NARESkp1DY+2G0dO11fZ6I\niFiSwt4Po9VDdXSJnoiIWJHC3o/Ww/jKehERsSKFvR8tw/jq14uIiDUp7P1o/VAdhb2IiFiRwt4P\nX8/eMHU1voiIWJLC3g+j9YSyXkRELEhh70fbq/FFRESsR2HvR8swvh6qIyIi1qSw96ulZy8iImJF\nCns/bHqCnoiIWJzC3q9WV+N3b0NEREROiMLejzb32atrLyIiFqSw96P1ML669iIiYkUK+wAZGsYX\nERGLUtj70fYVt4p7ERGxHoW9H4bvr0hBLyIi1qSw98PXsTd0652IiFiTwt6Pto/LVdqLiIj1KOz9\naPu43O5ti4iIyIlQ2Pth6HG5IiJicQp7P9o+VKcbGyIiInKCFPZ+tFygp3P2IiJiTQp7P1r37JX1\nIiJiRQp7Pwyj5a9IWS8iIlaksPfD1683TD1BT0RELElh74cu0BMREatT2PvR8mx8Jb2IiFiTwt4P\nX8/e0ItwRETEmhzB3PmSJUvYvn07hmGQmZnJ8OHDfcu2bNnC8uXLsdvtjB49mhkzZgCwbNkyPv74\nY9xuN3fffTeTJk2iqKiIuXPn4vF4cDqdPPHEE4SHhwez6T7q2YuIiNUFrWe/detW9u7dS3Z2No8+\n+iiPPvpom+WLFy9mxYoVrFu3js2bN7N7924++ugjvvrqK7Kzs3nppZdYsmQJAM888wxTpkzhlVde\noX///qxfvz5YzW6nuWdvGOBV3ouIiAUFLexzc3OZMGECAKmpqVRWVlJVVQVAQUEBcXFxJCcnY7PZ\nGDNmDLm5uVx88cU8/fTTAMTGxlJTU4PH4yEvL48rrrgCgHHjxpGbmxusZrfT5n32Xu9J+10REZGu\nErSwLykpISEhwTedmJhIcXExAMXFxSQmJrZbZrfbiYqKAmD9+vWMHj0au91OTU2Nb9g+KSnJt5+T\npqlHr6gXERErCuo5+9aO5+K2d999l/Xr17N69eoT2k9CQhQOh/242ndsBmASE9MLpzOmC/fbfVRH\nz6NaeqZQqSVU6gDVciKCFvYul4uSkhLf9KFDh3A6nR0uO3jwIC6XC4APPviA559/npdeeomYmMa/\nhKioKGpra4mIiGizbmfKy6u7uhwAyiqqKS4+EpR9n0xOZ4zq6GFUS88UKrWESh2gWvztrzNBG8ZP\nS0sjJycHgPz8fFwuF9HR0QD069ePqqoqCgsLcbvdbNq0ibS0NI4cOcKyZct44YUXiI+P9+3rsssu\n8+3rnXfeYdSoUcFqdoeaL9IzTQ3ki4iI9QStZz9ixAiGDh1KRkYGhmGwaNEiNmzYQExMDBMnTiQr\nK4s5c+YAkJ6eTkpKCtnZ2ZSXlzNr1izffpYuXcrMmTN58MEHyc7Opm/fvlx77bXBavYxeXU5voiI\nWJBhhuCTYrp6iOee936HaXi4re8sLh7ct0v33R1CZRgsVOoA1dJThUotoVIHqBZ/++uMnqAXkMZh\nfE/oHReJiMgpQGEfgOY77b06Zy8iIhaksA9I0wV63dwKERGRE6GwPw7q2YuIiBUp7APQfOudwl5E\nRKxIYR+Q5rDv5maIiIicgIDCfufOnWzatAmAP/zhD/znf/4n27ZtC2rDeiI9VEdERKwooLBfvHgx\nKSkpbNu2jc8++4wFCxbwzDPPBLttPUbLML669iIiYj0BhX2vXr04++yzee+997jxxhsZMGAANtup\ndAZAYS8iItYVUGLX1NTw9ttv8+6773L55ZdTUVHB4cOHg922HsN3n71O2ouIiAUFFPazZ8/mjTfe\n4L777iM6Opo1a9Zw2223BblpPUnzffY6Zy8iItYT0ItwLr30UoYNG0Z0dDQlJSWMHDmSESNGBLtt\nPYbO2YuIiJUF1LN/5JFHePvtt6moqCAjI4O1a9eSlZUV5Kb1PBrGFxERKwoo7D///HNuuOEG3n77\nba677jqeeuop9u7dG+y29Ri+99nrgbkiImJBAYV981tw33//fcaPHw9AfX198FrV42gYX0RErCug\nsE9JSSE9PZ2jR48yZMgQ/vrXvxIXFxfstvUYOmcvIiJWFtAFeosXL2bXrl2kpqYCMGDAAJYtWxbU\nhvUkzbfe6Ql6IiJiRQGFfW1tLf/4xz94+umnMQyDCy+8kAEDBgS7bT2HoWfji4iIdQU0jL9gwQKq\nqqrIyMjgxhtvpKSkhIceeijYbesxfBfoqWcvIiIWFFDPvqSkhOXLl/umx40bx6233hq0RvU8Omcv\nIiLWFfDjcmtqanzT1dXV1NXVBa1RPU3LOXuFvYiIWE9APfubbrqJq666imHDhgGQn5/PvffeG9SG\n9SQtw/gKexERsZ6Awv76668nLS2N/Px8DMNgwYIFrFmzJtht60E0jC8iItYVUNgDJCcnk5yc7Jve\nsWNHUBrUE/neeqcX4YiIiAWd8EvpT6UhbcNoHsbv5oaIiIicgBMO++YAPDXoFbciImJdxxzGHzNm\nTIehbpom5eXlQWtUT9NygV43N0REROQEHDPsX3nllZPVjh6tOew9XvXsRUTEeo4Z9mecccbJakfP\nZgCmXnErIiLWdMLn7E8leuudiIhYmcI+AHqojoiIWFlQw37JkiXcdNNNZGRktLsvf8uWLVx//fXc\ndNNNPPfcc775u3btYsKECaxdu9Y3b968eVx99dXceuut3Hrrrbz//vvBbHY7LbfeKexFRMR6An6o\nzvHaunUre/fuJTs7mz179pCZmUl2drZv+eLFi1m1ahV9+vRh6tSpTJ48mb59+/LII48wcuTIdvub\nPXs248aNC1Zzj8nXs9c5exERsaCg9exzc3OZMGECAKmpqVRWVlJVVQVAQUEBcXFxJCcnY7PZGDNm\nDLm5uYSHh/Piiy/icrmC1awT4jtnr/vsRUTEgoIW9iUlJSQkJPimExMTKS4uBqC4uJjExMR2yxwO\nBxERER3ub+3atUybNo377ruPsrKyYDW7Q83D+LpAT0RErChow/jf90POd19zzTXEx8czZMgQVq5c\nybPPPsvChQs7XT8hIQqHw37Cv/d9YQ4HuCEszIbTGdNl++1OqqPnUS09U6jUEip1gGo5EUELe5fL\nRUlJiW/60KFDOJ3ODpcdPHjwmEP3rc/hjx8/nqysrGP+dnl59Qm2umNeT+OBSl19A8XFR7p0393B\n6YxRHT2MaumZQqWWUKkDVIu//XUmaMP4aWlp5OTkAJCfn4/L5SI6OhqAfv36UVVVRWFhIW63m02b\nNpGWltbpvmbOnElBQQEAeXl5DBw4MFjN7pDR9NfkNXXOXkRErCdoPfsRI0YwdOhQMjIyMAyDRYsW\nsWHDBmJiYpg4cSJZWVnMmTMHgPT0dFJSUti5cydLly5l//79OBwOcnJyWLFiBbfccguzZs0iMjKS\nqKgoHnvssWA1u0O69U5ERKzMMEMwwbp6iOfxD1dSUL+bs+vG8sBV6V267+4QKsNgoVIHqJaeKlRq\nCZU6QLX4219n9AS9ALT07DWMLyIi1qOwD0DzOXs9VEdERKxIYR8Am+6zFxERC1PYB8Aw1LMXERHr\nUtgHwOZ7xa3O2YuIiPUo7ANgU89eREQsTGEfAF2NLyIiVqawD4BecSsiIlamsA+AhvFFRMTKFPYB\n8IW9br0TERELUtgHoPk+exOdsxcREetR2AdA99mLiIiVKewD0HKfvcJeRESsR2EfgOZz9mgYX0RE\nLEhhH4CWc/bq2YuIiPUo7APQ3LP3KuxFRMSCFPYBaLn1TsP4IiJiPQr7AOihOiIiYmUK+wDYbU3D\n+OrZi4iIBSnsA2DXE/RERMTCFPYB0NX4IiJiZQr7ANhsOmcvIiLWpbAPgN0X9jpnLyIi1qOwD4DO\n2YuIiJUp7AOgW+9ERMTKFPYBcNj0BD0REbEuhX0Ami/QQ8P4IiJiQQr7AOgCPRERsTKFfQDsOmcv\nIiIWprAPgF332YuIiIUp7AOgsBcRESsLatgvWbKEm266iYyMDHbs2NFm2ZYtW7j++uu56aabeO65\n53zzd+3axYQJE1i7dq1vXlFREbfeeitTpkzh3nvvpb6+PpjNbqc57FHYi4iIBQUt7Ldu3crevXvJ\nzs7m0Ucf5dFHH22zfPHixaxYsYJ169axefNmdu/eTXV1NY888ggjR45ss+4zzzzDlClTeOWVV+jf\nvz/r168PVrM7ZLfZm76ZeHVFvoiIWEzQwj43N5cJEyYAkJqaSmVlJVVVVQAUFBQQFxdHcnIyNpuN\nMWPGkJubS3h4OC+++CIul6vNvvLy8rjiiisAGDduHLm5ucFqdocMjKYvJl6vwl5ERKwlaGFfUlJC\nQkKCbzoxMZHi4mIAiouLSUxPl5PkAAAd8klEQVRMbLfM4XAQERHRbl81NTWEh4cDkJSU5NvPydL8\n1jsM8CjsRUTEYhwn64e66rnygewnISEKh8Pud71AJbh7N/86iYm9iYoI67J9dxenM6a7m9AlQqUO\nUC09VajUEip1gGo5EUELe5fLRUlJiW/60KFDOJ3ODpcdPHiw3dB9a1FRUdTW1hIREeF3XYDy8uof\n2Pq2jlTWNX4xTA4VH6G3xcPe6YyhuPhIdzfjBwuVOkC19FShUkuo1AGqxd/+OhO0Yfy0tDRycnIA\nyM/Px+VyER0dDUC/fv2oqqqisLAQt9vNpk2bSEtL63Rfl112mW9f77zzDqNGjQpWsztkNA/jY2oY\nX0RELCdoPfsRI0YwdOhQMjIyMAyDRYsWsWHDBmJiYpg4cSJZWVnMmTMHgPT0dFJSUti5cydLly5l\n//79OBwOcnJyWLFiBTNnzuTBBx8kOzubvn37cu211war2R1qOWdvYirsRUTEYoJ6zv7+++9vMz14\n8GDf94svvpjs7Ow2y4cNG8aaNWs63Nef/vSnrm9ggGxG0/l/Qz17ERGxHj1BLwDNz8Y3dOudiIhY\nkMI+AL6H6hgmHj1UR0RELEZhHwC7bxjfq569iIhYjsI+AAp7ERGxMoV9ABw2XaAnIiLWpbAPQHPP\n3rB50Sl7ERGxGoV9AHTrnYiIWJnCPgAtw/g6Zy8iItajsA9A8332jT17b/c2RkRE5Dgp7ANgtzU9\naNDwoo69iIhYjcI+AL4n6NlM3G5PN7dGRETk+CjsA2AzbGA2vgyn3qOwFxERa1HYB8ho+quqczd0\nc0tERESOj8I+QM1hX+92d3NLREREjo/CPkA2Q2EvIiLWpLAPkI3Ge+3rPQp7ERGxFoV9gNSzFxER\nq1LYB0g9exERsSqFfYB8PXuFvYiIWIzCPkDNb75z6z57ERGxGIV9gOw2DeOLiIg1KewD1Nyzb1DY\ni4iIxSjsA9T8mlu3V2EvIiLWorAPkKPpzXfq2YuIiNUo7AMUbg8DoMGrZ+OLiIi1KOwD1MveCwC3\nqbAXERFrUdgHKMLRGPYNZn03t0REROT4KOwDFBGmnr2IiFiTwj5AvcMjAajzqGcvIiLWorAPUExE\nBKAL9ERExHoU9gGKiYgCwG3WY5pmN7dGREQkcI5g7nzJkiVs374dwzDIzMxk+PDhvmVbtmxh+fLl\n2O12Ro8ezYwZMzrdZt68eeTn5xMfHw/AHXfcwdixY4PZ9HZ692rs2Zs2D/UNXnqF20/q74uIiJyo\noIX91q1b2bt3L9nZ2ezZs4fMzEyys7N9yxcvXsyqVavo06cPU6dOZfLkyZSVlXW6zezZsxk3blyw\nmutX89X4hs3D0doGhb2IiFhG0MI+NzeXCRMmAJCamkplZSVVVVVER0dTUFBAXFwcycnJAIwZM4bc\n3FzKyso63KYnaA577G6q69wkdm9zREREAha0c/YlJSUkJCT4phMTEykuLgaguLiYxMTEdsuOtc3a\ntWuZNm0a9913H2VlZcFqdqciHI3D+IbdTXWtHpkrIiLWEdRz9q2dyEVtzdtcc801xMfHM2TIEFau\nXMmzzz7LwoULO90uISEKh6Nrh9k9R2oav4TV47XZcDpjunT/J5vV298sVOoA1dJThUotoVIHqJYT\nEbSwd7lclJSU+KYPHTqE0+nscNnBgwdxuVyEhYV1uE1KSopv3vjx48nKyjrmb5eXV3dRFS3iE+IA\nMMLq2L23lMFnxHb5b5wsTmcMxcVHursZP1io1AGqpacKlVpCpQ5QLf7215mgDeOnpaWRk5MDQH5+\nPi6Xi+joaAD69etHVVUVhYWFuN1uNm3aRFpaWqfbzJw5k4KCAgDy8vIYOHBgsJrdqQhHLxyEY9i8\nHKioPOm/LyIicqKC1rMfMWIEQ4cOJSMjA8MwWLRoERs2bCAmJoaJEyeSlZXFnDlzAEhPTyclJYWU\nlJR22wDccsstzJo1i8jISKKionjssceC1exjig6LoaKhlKLK0m75fRERkRNhmCH4hJhgDPE4nTFk\nbnyC3ZVf0/DVj3j6tl8Q2eukXfLQpUJlGCxU6gDV0lOFSi2hUgeoFn/764yeoHcczozt2/glspLc\n/O+6tzEiIiIBUtgfh7NjzwLAFl1B9j9286+vSvxsISIi0v0U9sdhYHwqNsOGPa4Ut+0oz7y2gz/+\ndSfflXX91f8iIiJdRWF/HOJ6xTDCNRwME9dF+YRH1rHt34eY/+JHvPB6PvuLe8bT/kRERFqz5hVm\n3egXA6/mm8p9lNaWEHHBh5xZfx7f7nCS9/lBtn5+kB8NcvKzy87mrD6h89AHERGxNvXsj1NseAxz\nL57JBc5hNHgbOODYTtzFmxn4o0PYw91s+7KYrD/9L8+s38HXBw53d3NFRETUsz8R0WG9uev8aeyu\n+Ib/3vM2X1d+S6H9E2J+FIHTPYRvdyTxr90l/Gt3CUNTErn6srM598z47m62iIicohT2P8CA+BRm\nj/gNX1Xs4a1v3uWriq8pMD4l8qJwUrzn8e3O08j/poz8b8oYdGY8V6edzZD+CRiG0d1NFxGRU4jC\n/gcyDINzEwZwbsIAdld8w8Zv3+OLsl3s41/0Gh7G2Qxh32cuviyo4MtX/8W5/eK4ZtQ5DOmf4H/n\nIiIiXUBh34UGxKdwz4W/4pvKfWz89j12ln7BPnbgGOZgsG0I+3a62FVYyRPrPmXQmfFcOyqFQWcp\n9EVEJLgU9kGQEncWv7ngl+w7UsjGb//B9uKd7PV8hv08OwNtg9i/83S+LKhg6SufMviseK4ddY7O\n6YuISNAo7IPorJh+3HX+NPZXFZHz7T/45NAOCj2fYzvv3wwwzmV/fjL/3lfB4y9/wpD+CVw7KoWB\n/RT6IiLStRT2J8EZ0cncPuwW0o9OJGfvP9h28F/s9/4b25BdpNoGciC/L1/sLeeLveUMPTuBa0ad\nw4Az4rq72SIiEiIU9ifR6b1d/Od5GaSf3Rj6ed99zAHvl9jO+4oBDKJw5+nkf1tO/rcfMywlkWtG\npZDaV6EvIiI/jMK+Gzijkpg65AYm9x/P29++y9bvPmE/X+AYtosBDKJg5+ns/KaMnd+UMTw1iWsu\nTyElOba7my0iIhalsO9Gzqgkpp13E5PPHs/b37zbOLzP54QN+5Kzmm7Z27GnlB17ShmemsT4Ef0Y\nlpKIzab79EVEJHAK+x6gT5ST24bezJVnj+etb95tvJCPnTjOt3G6eQ5F/3axYw/s2FNKUmwEoy/s\ny+XnJ5MQ06u7my4iIhagsO9BTu/dh9uH3cLkqvHkfPsPPi3+jEPsxj54N31tyVQX9qN0fwL/73++\n5q//8zUD+8VxyXl9+NEgF3G9w7u7+SIi0kMp7Hug5qv3S2vK+WfhZjYf2Eq5pwj6FhHfL4Komv4c\n+vo0dhWa7Cqs5OV3dnF2cgxDU5IYlpJI6hmx2G16x5GIiDRS2PdgSZEJ/Hzgz0hPmcBH333MRwf+\nl4KqA9T1+pKwIV8Sb4sl7GhfSvbF8U2RyTdFR/jblm+J7GXnnORYzukbR+oZcZzTN5boyLDuLkdE\nRLqJwt4CIhwRjO2Xxth+aRQcOcBHRf/Lx4e2c6T+MEQexjEIom2R9PacztGSWCq+iyb/Wzf535b7\n9pEQ04sznL3pd1o0g89Jone4DWdcJDFRYXoxj4hIiFPYW8yZMX05M+YafjHwar49vI9/Fe9ke3E+\nJTWl1BrfgBMinNDLFkmU9zTcR6KpKI6g4kgM5V/XsvPrMjZu3efbX68wO6fFReCMj+S0uAgSYnoR\nFx1OXHQv4ns3fvaOcOiAQETEwhT2FmUzbJwTdzbnxJ3Ndak/5VBNCbvLv2ZXxR6+Kv+ayvrD1FEA\nMWCPATtgx0GkEYvDHUt9VQTVlb2oqw7nwJEI9pdGgGnv8LccdhtxvcOJiw4nOjKM3hFhxESF0Tsy\njOjmPxEOoqPCm6YdhDk63peIiJx8CvsQYBgGfaKc9IlyknbGTzBNk9LacgqO7Pf9Kaw6wOH6I1SZ\nZWAvgzggDlrfvBdOBOFmNIYnArM+nIZaB3U1duprwyhvCKesIhyzOBzc4WAe+wLA8DAbUb0cREWE\nNX06iOrlILLps833CAdRvcJa1unlIMyhCwxFRLqKwj4EGYbBaZGJnBaZyEWu833za9w1HKouodp+\nhN0HCyitKaOstoKKugoq6g5Tb9ZSb9Q2/qtwAFGN23V0U5+DMOz0wuYNw/CGY7odeBocuOvs1NfZ\n8TQ4OOIJ43BDGNQ6MEvDMD0O8DjAaweOfVogzGHzHQhEhDuICLc3/XEQ0avxe1JCFJ4GDxHhdiJ9\n87+3brgdh10HDiJyalPYn0IiHZH0jz0TpzOGIb3Pa7PMa3o5Ul9FeVPwV9VXUdVwlCOtPpu/VzUc\nxW024KYBbDT+cQARQEwg/6gMHIRhM8MwvGHgcWB6HHjddjwNdtz1drxuO1UeB1UeB2atA446fAcL\nx3PQAI2nISKbDhB6hTnoFWYjPMxOuKPps2m6l6Ple+tljfObljlaPh0OG2F2gzCHDbvdhk3XNYhI\nD6WwF6DxGoC4XrHE9fL/DH6v6aXOU0d1Qw3V7pqWT3c11Q011Lhrm+ZXN82voaahhhpPLbXuOhq8\nDbipB6O++WKCNjqY1Xm7zTBs2DG8DgyvA9Nrx/TY8XpseN023A028Nqp8Tio8doxvXaot0Nt43p4\n7ZheB3js4LVhmo3r47U1naoIPMDtNqPpAMCGw27gsNsIc9jafjbNb1mv8XtsdC8a6t3Y7QZ2mw27\nzWj5Y2813Xp583e7gcPW+N3WZr3GbR2d7MdmGLrwUuQUobCX42YzbEQ6Iol0RJJ0Att7vB5qPXXU\numup9dRR4671fa911zZONy9311HbdJDQfLDQuG4tdZ56vEYDXho6PUL4of/ADdOGYdrBtGOYNjDt\nmB4bprfVH48Nr9cArx2v10adaaPWawfT8B00mB4D3LbGeaYNvEbjgYXZ0TyjzXzT234epsHxHIh0\nWp8BNqPxAMCwGdgNA5vNwGbQ+GlrPChoPohoPkCw2wxsNnzL7baW+RERYbgbPO32Yzcaf6N5/eb9\ntt5Pm7a0XsdoPD3V5rvRuG3z9+ZabLaOvzeuR9O8VtvT+Tamw0754dpW2zRtT1O7m/fbqh0iPZHC\nXk46u81Ob1sUvcOiftB+kpJ6U3iwlHpPPXVNf+qb/tR5mz49da3mN7SZblmn8bPB624cdfC6afC6\ncXvdmIYX0/ACDS0/3Or5RM2R2y1XBZiNoeMbgWg6ADBMA7N5uumP6fuk8bu3aR5t13HTdn2+tx86\nW+5uNV3TannrdZra3GYfbebRpo6207T/Td+2tPkt//v93rZdcNDUmu/Aw9b2IKTdvHbLT/BgBtof\njDR/b9pvZGQYdXVu376NVp+2zqY51vIA9oG/3zDatcfftGFAeY2byspqP+0LsL0B7CNUKOzFsmw2\nG5GOCCIdEUHZv9f04vZ6cHsbmg4E3B1/93S0vAG314PH9ODxenCbLd9bfzavY3NAbV09btODt3n9\n763Tel9e0wuGiYkJhrdNu81j1NQ14wEh5vsHA/4OPEyjcbLNQQjfW6/pE/CaBt5OlrXff8uydgct\nrX6T5t90t19mdnQwU9bB9s3Lvr9dR+05xrJjbhdQjcfaroPtA/m7abes1X6P8/8A2zEOPtodjECb\ng4bWBxN8b5teYTZm3HAhiVEn5+mmCnuRTtgMG+F2G+H24P/P6HTGUFx8JOD1TdPEY3rwmiZe36cX\nL97Gz6Y/HtOL2fTZsq630+08Ztvt2/9p2g4Tr7dp/e/9Zq8IB0era9tuh4nZ9FsmTZ+m2bgf3/fG\nz3brYfra+f1tzebvna3XNK9l/623bT5YAjDBONZhUltGJ9/FIlodXJit/wt2eDDE9763PojreFmH\nBy3fP2Dx2Pnfb6OYfN7QLirq2IIa9kuWLGH79u0YhkFmZibDhw/3LduyZQvLly/HbrczevRoZsyY\n0ek2RUVFzJ07F4/Hg9Pp5IknniA8XG95k1OXYRg4jOb/fXvWew+O98ClO33/QKHlYKLxACUxMYri\nkirMpgOaxvUbDxzwrd/4aX7vs/187/Gv/70Dk9afHc/3djg/MiqcqqO1nezHe+z9d9hOP9t0ON97\nnOu3+p1Wv22zGbg9Hv/bdPAbPr4DPDAI/CCvta44yHP2reuCvQQmaGG/detW9u7dS3Z2Nnv27CEz\nM5Ps7Gzf8sWLF7Nq1Sr69OnD1KlTmTx5MmVlZR1u88wzzzBlyhSuuuoqli9fzvr165kyZUqwmi4i\npwjDMLAbnd/7ERcRQ30v6/fdrXQA5s8PqaV16Lc9yKDdwQONhw1Nvf/2ByQAXtO3ZeP1MI1btGzf\n6vd86zbNC7OFMfzsAZSUVP3Qv5KABC3sc3NzmTBhAgCpqalUVlZSVVVFdHQ0BQUFxMXFkZycDMCY\nMWPIzc2lrKysw23y8vJ4+OGHARg3bhyrV69W2IuIyHFpPK/edPDWA47hTuYFgEG7iLikpISEhATf\ndGJiIsXFxQAUFxeTmJjYblln29TU1PiG7ZOSknz7EREREf9O2gV6zcMeP3SbQPaTkBCFIwgvYnE6\nY7p8n90lVGoJlTpAtfRUoVJLqNQBquVEBC3sXS4XJSUlvulDhw7hdDo7XHbw4EFcLhdhYWEdbhMV\nFUVtbS0RERG+dY+lvLy6i6vROa+eKFTqANXSU4VKLaFSB6gWf/vrTNCG8dPS0sjJyQEgPz8fl8tF\ndHQ0AP369aOqqorCwkLcbjebNm0iLS2t020uu+wy3/x33nmHUaNGBavZIiIiISdoPfsRI0YwdOhQ\nMjIyMAyDRYsWsWHDBmJiYpg4cSJZWVnMmTMHgPT0dFJSUkhJSWm3DcDMmTN58MEHyc7Opm/fvlx7\n7bXBaraIiEjIMcwTOZnewwVjiEdDRz1PqNQBqqWnCpVaQqUOUC3+9tcZvehbREQkxCnsRUREQpzC\nXkREJMQp7EVEREKcwl5ERCTEheTV+CIiItJCPXsREZEQp7AXEREJcQp7ERGREKewFxERCXEKexER\nkRCnsBcREQlxQXvrXahYsmQJ27dvxzAMMjMzGT58eHc3KSC7du1i+vTp3HbbbUydOpWioiLmzp2L\nx+PB6XTyxBNPEB4ezuuvv85//dd/YbPZuPHGG7nhhhu6u+ltLFu2jI8//hi3283dd9/N+eefb8k6\nampqmDdvHqWlpdTV1TF9+nQGDx5syVqa1dbW8rOf/Yzp06czcuRIS9aSl5fHvffey8CBAwE499xz\n+dWvfmXJWgBef/11XnrpJRwOB7/97W8ZNGiQ5Wr5y1/+wuuvv+6b3rlzJ+vWrSMrKwuAQYMG8fDD\nDwPw0ksvsXHjRgzD4J577mHMmDHd0eROHT16lAcffJDKykoaGhqYMWMGTqeze2oxpVN5eXnmXXfd\nZZqmae7evdu88cYbu7lFgTl69Kg5depU86GHHjLXrFljmqZpzps3z3zrrbdM0zTN3//+9+bLL79s\nHj161Jw0aZJ5+PBhs6amxvzpT39qlpeXd2fT28jNzTV/9atfmaZpmmVlZeaYMWMsWYdpmuabb75p\nrly50jRN0ywsLDQnTZpk2VqaLV++3Pz5z39uvvbaa5at5aOPPjJnzpzZZp5VaykrKzMnTZpkHjly\nxDx48KD50EMPWbaWZnl5eWZWVpY5depUc/v27aZpmubs2bPN999/39y3b5953XXXmXV1dWZpaak5\nefJk0+12d3OL21qzZo355JNPmqZpmt999505efLkbqtFw/jHkJuby4QJEwBITU2lsrKSqqqqbm6V\nf+Hh4bz44ou4XC7fvLy8PK644goAxo0bR25uLtu3b+f8888nJiaGiIgIRowYwSeffNJdzW7n4osv\n5umnnwYgNjaWmpoaS9YBkJ6ezp133glAUVERffr0sWwtAHv27GH37t2MHTsWsOa/r85YtZbc3FxG\njhxJdHQ0LpeLRx55xLK1NHvuuee488472b9/v29UtbmOvLw8Ro0aRXh4OImJiZxxxhns3r27m1vc\nVkJCAhUVFQAcPnyY+Pj4bqtFYX8MJSUlJCQk+KYTExMpLi7uxhYFxuFwEBER0WZeTU0N4eHhACQl\nJVFcXExJSQmJiYm+dXpafXa7naioKADWr1/P6NGjLVlHaxkZGdx///1kZmZaupalS5cyb94837SV\na9m9eze//vWvufnmm9m8ebNlayksLKS2tpZf//rXTJkyhdzcXMvWArBjxw6Sk5Ox2+3Exsb65lup\njp/+9KccOHCAiRMnMnXqVObOndttteic/XEwQ+TJwp3V0VPre/fdd1m/fj2rV69m0qRJvvlWqwPg\n1Vdf5YsvvuCBBx5o004r1fLXv/6VCy+8kDPPPLPD5Vaq5eyzz+aee+7hqquuoqCggGnTpuHxeHzL\nrVQLQEVFBc8++ywHDhxg2rRplv03Bo0H+Nddd127+Vaq47//+7/p27cvq1at4t///jczZswgJibG\nt/xk1qKe/TG4XC5KSkp804cOHcLpdHZji05cVFQUtbW1ABw8eBCXy9Vhfa2H/nuCDz74gOeff54X\nX3yRmJgYy9axc+dOioqKABgyZAgej4fevXtbspb333+f9957jxtvvJG//OUv/PGPf7Tsf5c+ffqQ\nnp6OYRicddZZnHbaaVRWVlqylqSkJC666CIcDgdnnXUWvXv3tuy/MWg8nXLRRReRmJjoGwqHzuto\nnt+TfPLJJ1x++eUADB48mLq6OsrLy33LT2YtCvtjSEtLIycnB4D8/HxcLhfR0dHd3KoTc9lll/lq\neeeddxg1ahQXXHABn332GYcPH+bo0aN88skn/PjHP+7mlrY4cuQIy5Yt44UXXiA+Ph6wZh0A27Zt\nY/Xq1UDj6aHq6mrL1vLUU0/x2muv8ec//5kbbriB6dOnW7aW119/nVWrVgFQXFxMaWkpP//5zy1Z\ny+WXX85HH32E1+ulvLzc0v/GDh48SO/evQkPDycsLIxzzjmHbdu2AS11XHrppbz//vvU19dz8OBB\nDh06xIABA7q55W3179+f7du3A7B//3569+5Nampqt9Sit9758eSTT7Jt2zYMw2DRokUMHjy4u5vk\n186dO1m6dCn79+/H4XDQp08fnnzySebNm0ddXR19+/blscceIywsjI0bN7Jq1SoMw2Dq1Kn8x3/8\nR3c33yc7O5sVK1aQkpLim/f444/z0EMPWaoOaLxNbf78+RQVFVFbW8s999zDsGHDePDBBy1XS2sr\nVqzgjDPO4PLLL7dkLVVVVdx///0cPnyYhoYG7rnnHoYMGWLJWqDxNNH69esB+M1vfsP5559vyVp2\n7tzJU089xUsvvQQ0XlexcOFCvF4vF1xwAb/73e8AWLNmDW+88QaGYTBr1ixGjhzZnc1u5+jRo2Rm\nZlJaWorb7ebee+/F6XR2Sy0KexERkRCnYXwREZEQp7AXEREJcQp7ERGREKewFxERCXEKexERkRCn\nJ+iJSDuFhYVceeWVXHTRRW3mjxkzhl/96lc/eP95eXk89dRTrFu37gfvS0T8U9iLSIcSExNZs2ZN\ndzdDRLqAwl5Ejst5553H9OnTycvL4+jRozz++OOce+65bN++nccffxyHw4FhGCxcuJABAwbw7bff\nsmDBArxeL7169eKxxx4DwOv1smjRIr744gvCw8N54YUX6N27dzdXJxKadM5eRI6Lx+Nh4MCBrFmz\nhptvvplnnnkGgLlz5/K73/2ONWvW8Mtf/pKHH34YgEWLFnHHHXfw8ssv84tf/IK3334baHxN7syZ\nM/nzn/+Mw+Hgww8/7LaaREKdevYi0qGysjJuvfXWNvMeeOABAN/LPUaMGMGqVas4fPgwpaWlvvd0\nX3LJJcyePRtofFXpJZdcAjS+8hMaz9mfc845nHbaaQCcfvrpHD58OPhFiZyiFPYi0qFjnbNv/ZRt\nwzAwDKPT5dA4ZP99dru9C1opIoHQML6IHLePPvoIgI8//phBgwYRExOD0+n0veErNzeXCy+8EGjs\n/X/wwQcAvPXWWyxfvrx7Gi1yClPPXkQ61NEwfr9+/QD4/PPPWbduHZWVlSxduhSApUuX8vjjj2O3\n27HZbGRlZQGwYMECFixYwCuvvILD4WDJkiXs27fvpNYicqrTW+9E5LgMGjSI/Px8HA71FUSsQsP4\nIiIiIU49exERkRCnnr2IiEiIU9iLiIiEOIW9iIhIiFPYi4iIhDiFvYiISIhT2IuIiIS4/w+VWw8v\ngKjjWgAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x7f2112fadf28>"
      ]
     },
     "metadata": {
      "tags": []
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "plt.plot(history['loss'], linewidth=2, label='Train')\n",
    "plt.plot(history['val_loss'], linewidth=2, label='Opt')\n",
    "plt.legend(loc='upper right')\n",
    "plt.title('Model loss')\n",
    "plt.ylabel('Loss')\n",
    "plt.xlabel('Epoch')\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "46rGA9-SOfFf"
   },
   "outputs": [],
   "source": [
    "x_opt_predictions = autoencoder.predict(x_opt)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "TJOTw8HCOuPx"
   },
   "outputs": [],
   "source": [
    "mse = np.mean(np.power(x_opt - x_opt_predictions, 2), axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 59,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 34
    },
    "colab_type": "code",
    "id": "qWT6ZK8nYUdN",
    "outputId": "7ab31272-ceb3-4724-b483-48b54f545fb8"
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(13465,)"
      ]
     },
     "execution_count": 59,
     "metadata": {
      "tags": []
     },
     "output_type": "execute_result"
    }
   ],
   "source": [
    "mse.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 72,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 51
    },
    "colab_type": "code",
    "id": "kDobg2zfabLr",
    "outputId": "b8ae7128-b6aa-43d1-b5c4-5e92299c3a4d"
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([5.67163702e-04, 9.09609758e-05, 7.24533507e-05, ...,\n",
       "       2.23024920e-04, 8.57839622e-04, 4.44349298e-04])"
      ]
     },
     "execution_count": 72,
     "metadata": {
      "tags": []
     },
     "output_type": "execute_result"
    }
   ],
   "source": [
    "mse"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 74,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 85
    },
    "colab_type": "code",
    "id": "EzH0oKfdaiK1",
    "outputId": "b5b85ac8-2752-4ef0-b75d-f420521494cc"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.00046100249961346953\n",
      "5.424287459070545e-06\n",
      "0.1799505412418943\n",
      "0.003796464687653877\n"
     ]
    }
   ],
   "source": [
    "print(mse.mean())\n",
    "print(mse.min())\n",
    "print(mse.max())\n",
    "print(mse.std())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 75,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 34
    },
    "colab_type": "code",
    "id": "8hqFl0W1YMsC",
    "outputId": "28ddd439-faca-48d0-f3c9-1b5f667366b4"
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.004257467187267347"
      ]
     },
     "execution_count": 75,
     "metadata": {
      "tags": []
     },
     "output_type": "execute_result"
    }
   ],
   "source": [
    "tr = mse.mean() + mse.std()\n",
    "tr"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "k4LbbOcfgMpx"
   },
   "source": [
    "Claimed tr for this dataset is 0.042"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 0,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "56evRpROg0Q3"
   },
   "outputs": [],
   "source": [
    "x_test_predictions = autoencoder.predict(x_test)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 84,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 34
    },
    "colab_type": "code",
    "id": "An-S8wOfg9Ix",
    "outputId": "175cd7ba-d25e-4d7f-f0e7-b75475c362c4"
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(13465,)"
      ]
     },
     "execution_count": 84,
     "metadata": {
      "tags": []
     },
     "output_type": "execute_result"
    }
   ],
   "source": [
    "mse_test = np.mean(np.power(x_test - x_test_predictions, 2), axis=1)\n",
    "mse_test.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 89,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 51
    },
    "colab_type": "code",
    "id": "HFlAKFHehDaE",
    "outputId": "068e77bd-49ee-4b12-d837-c03b049cb5e3"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "187\n",
      "0.013887857408095061\n"
     ]
    }
   ],
   "source": [
    "over_tr = mse_test > tr\n",
    "false_positives = sum(over_tr)\n",
    "print(false_positives)\n",
    "fpr = false_positives / mse_test.shape[0]\n",
    "print(fpr)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "bxgVHQkRih9v"
   },
   "source": [
    "Claimed window size is 82"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 93,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 34
    },
    "colab_type": "code",
    "id": "TYlzNfgqjpqy",
    "outputId": "95811a44-c7f2-4963-fa66-5699db7664da"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "False alarms 0\n"
     ]
    }
   ],
   "source": [
    "false_alarms = 0\n",
    "w = 82\n",
    "for i in range(0, len(over_tr) - w):\n",
    "  fp = sum(over_tr[i:i+w])\n",
    "  if fp/2 > w:\n",
    "    false_alarms = false_alarms + 1\n",
    "    \n",
    "print('False alarms %d' % false_alarms)\n",
    "    "
   ]
  }
 ],
 "metadata": {
  "colab": {
   "name": "Anomaly_detection.ipynb",
   "provenance": [],
   "toc_visible": true,
   "version": "0.3.2"
  },
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}
