{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 60,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from tensorflow import keras"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 61,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import pandas as pd\n",
    "import matplotlib.pyplot as plt\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 62,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "data = pd.read_csv('./credit-a.csv',header=None)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "数据没有标题 "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 63,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([-1,  1], dtype=int64)"
      ]
     },
     "execution_count": 63,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "data.iloc[:,-1].unique()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 64,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "x = data.iloc[:,:-1].values"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 65,
   "metadata": {},
   "outputs": [],
   "source": [
    "y = data.iloc[:,-1].replace(-1,0).values.reshape(-1,1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 66,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "((653, 15), (653, 1))"
      ]
     },
     "execution_count": 66,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x.shape,y.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 67,
   "metadata": {},
   "outputs": [],
   "source": [
    "model = keras.Sequential()\n",
    "model.add(keras.layers.Dense(128,input_shape=(x.shape[1],),activation='relu'))\n",
    "model.add(keras.layers.Dense(128,activation='relu'))\n",
    "model.add(keras.layers.Dense(128,activation='relu'))\n",
    "model.add(keras.layers.Dense(1,activation='sigmoid'))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 68,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "dense_23 (Dense)             (None, 128)               2048      \n",
      "_________________________________________________________________\n",
      "dense_24 (Dense)             (None, 128)               16512     \n",
      "_________________________________________________________________\n",
      "dense_25 (Dense)             (None, 128)               16512     \n",
      "_________________________________________________________________\n",
      "dense_26 (Dense)             (None, 1)                 129       \n",
      "=================================================================\n",
      "Total params: 35,201\n",
      "Trainable params: 35,201\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "model.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 69,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "model.compile(optimizer='adam',loss='binary_crossentropy',metrics=['acc'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 70,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1/1000\n",
      "653/653 [==============================] - 1s 2ms/step - loss: 3.7957 - acc: 0.6126\n",
      "Epoch 2/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 2.0971 - acc: 0.6371\n",
      "Epoch 3/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 1.3134 - acc: 0.6723\n",
      "Epoch 4/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 2.1416 - acc: 0.6325\n",
      "Epoch 5/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 2.3322 - acc: 0.6616\n",
      "Epoch 6/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 1.7547 - acc: 0.6983\n",
      "Epoch 7/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 2.3455 - acc: 0.6845\n",
      "Epoch 8/1000\n",
      "653/653 [==============================] - 0s 144us/step - loss: 1.8748 - acc: 0.6907\n",
      "Epoch 9/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 1.8015 - acc: 0.6953\n",
      "Epoch 10/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 1.3897 - acc: 0.7044\n",
      "Epoch 11/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 1.6060 - acc: 0.7519\n",
      "Epoch 12/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 1.2689 - acc: 0.7075\n",
      "Epoch 13/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 1.9095 - acc: 0.6861\n",
      "Epoch 14/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 1.7563 - acc: 0.7458\n",
      "Epoch 15/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 1.6316 - acc: 0.7504\n",
      "Epoch 16/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 1.0899 - acc: 0.7136\n",
      "Epoch 17/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 1.0330 - acc: 0.7519\n",
      "Epoch 18/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 1.2227 - acc: 0.7642\n",
      "Epoch 19/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.9019 - acc: 0.7381\n",
      "Epoch 20/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.8959 - acc: 0.7412\n",
      "Epoch 21/1000\n",
      "653/653 [==============================] - 0s 144us/step - loss: 1.2666 - acc: 0.7305\n",
      "Epoch 22/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.8503 - acc: 0.7534\n",
      "Epoch 23/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.7780 - acc: 0.7734\n",
      "Epoch 24/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.5591 - acc: 0.8055\n",
      "Epoch 25/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.5966 - acc: 0.7825\n",
      "Epoch 26/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.7727 - acc: 0.7825\n",
      "Epoch 27/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.8721 - acc: 0.7703\n",
      "Epoch 28/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.5432 - acc: 0.8055\n",
      "Epoch 29/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.5123 - acc: 0.7887\n",
      "Epoch 30/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.6720 - acc: 0.8086\n",
      "Epoch 31/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.5573 - acc: 0.7933\n",
      "Epoch 32/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.8161 - acc: 0.7565\n",
      "Epoch 33/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.6512 - acc: 0.7718\n",
      "Epoch 34/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.5545 - acc: 0.8116\n",
      "Epoch 35/1000\n",
      "653/653 [==============================] - 0s 147us/step - loss: 0.5585 - acc: 0.8055\n",
      "Epoch 36/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.9602 - acc: 0.7764\n",
      "Epoch 37/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.8760 - acc: 0.7933\n",
      "Epoch 38/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.7569 - acc: 0.7703\n",
      "Epoch 39/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.6107 - acc: 0.7825\n",
      "Epoch 40/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.5856 - acc: 0.8116\n",
      "Epoch 41/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.4729 - acc: 0.8239\n",
      "Epoch 42/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.4606 - acc: 0.8208\n",
      "Epoch 43/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.5410 - acc: 0.8101\n",
      "Epoch 44/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.6208 - acc: 0.7963\n",
      "Epoch 45/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.5114 - acc: 0.7902\n",
      "Epoch 46/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.4664 - acc: 0.8270\n",
      "Epoch 47/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.4479 - acc: 0.8331\n",
      "Epoch 48/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 1.0248 - acc: 0.7764\n",
      "Epoch 49/1000\n",
      "653/653 [==============================] - 0s 134us/step - loss: 0.7128 - acc: 0.7917\n",
      "Epoch 50/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.6376 - acc: 0.7917\n",
      "Epoch 51/1000\n",
      "653/653 [==============================] - 0s 139us/step - loss: 0.6050 - acc: 0.7933\n",
      "Epoch 52/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.6211 - acc: 0.8132\n",
      "Epoch 53/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.5354 - acc: 0.8254\n",
      "Epoch 54/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 1.2747 - acc: 0.7198\n",
      "Epoch 55/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.6711 - acc: 0.8009\n",
      "Epoch 56/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.5608 - acc: 0.7902\n",
      "Epoch 57/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.4897 - acc: 0.8040\n",
      "Epoch 58/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.4401 - acc: 0.8101\n",
      "Epoch 59/1000\n",
      "653/653 [==============================] - 0s 134us/step - loss: 0.4517 - acc: 0.8361\n",
      "Epoch 60/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.4501 - acc: 0.8132\n",
      "Epoch 61/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.4664 - acc: 0.8101\n",
      "Epoch 62/1000\n",
      "653/653 [==============================] - 0s 137us/step - loss: 0.4592 - acc: 0.8147\n",
      "Epoch 63/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.4461 - acc: 0.8224\n",
      "Epoch 64/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.4260 - acc: 0.7979\n",
      "Epoch 65/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.5335 - acc: 0.8208\n",
      "Epoch 66/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.4236 - acc: 0.8224\n",
      "Epoch 67/1000\n",
      "653/653 [==============================] - 0s 112us/step - loss: 0.5635 - acc: 0.8193\n",
      "Epoch 68/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.5292 - acc: 0.8086\n",
      "Epoch 69/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.4363 - acc: 0.8224\n",
      "Epoch 70/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.4513 - acc: 0.8208\n",
      "Epoch 71/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.3898 - acc: 0.8300\n",
      "Epoch 72/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.3798 - acc: 0.8300\n",
      "Epoch 73/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.3685 - acc: 0.8606\n",
      "Epoch 74/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.3803 - acc: 0.8484\n",
      "Epoch 75/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.3462 - acc: 0.8668\n",
      "Epoch 76/1000\n",
      "653/653 [==============================] - 0s 139us/step - loss: 0.3824 - acc: 0.8407\n",
      "Epoch 77/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.4101 - acc: 0.8484\n",
      "Epoch 78/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.3851 - acc: 0.8438\n",
      "Epoch 79/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.4284 - acc: 0.8331\n",
      "Epoch 80/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.3920 - acc: 0.8392\n",
      "Epoch 81/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.3447 - acc: 0.8560\n",
      "Epoch 82/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.3597 - acc: 0.8545\n",
      "Epoch 83/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "653/653 [==============================] - 0s 116us/step - loss: 0.3982 - acc: 0.8331\n",
      "Epoch 84/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.4109 - acc: 0.8407\n",
      "Epoch 85/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.5548 - acc: 0.8315\n",
      "Epoch 86/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.5016 - acc: 0.8132\n",
      "Epoch 87/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.4529 - acc: 0.8162\n",
      "Epoch 88/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.3938 - acc: 0.8331\n",
      "Epoch 89/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.3670 - acc: 0.8515\n",
      "Epoch 90/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.3548 - acc: 0.8530\n",
      "Epoch 91/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.3497 - acc: 0.8760\n",
      "Epoch 92/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.3289 - acc: 0.8560\n",
      "Epoch 93/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.3349 - acc: 0.8606\n",
      "Epoch 94/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.3594 - acc: 0.8530\n",
      "Epoch 95/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.3905 - acc: 0.8484\n",
      "Epoch 96/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.4336 - acc: 0.8484\n",
      "Epoch 97/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.4768 - acc: 0.8407\n",
      "Epoch 98/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.4556 - acc: 0.8515\n",
      "Epoch 99/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.3566 - acc: 0.8637\n",
      "Epoch 100/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.3640 - acc: 0.8453\n",
      "Epoch 101/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.3954 - acc: 0.8423\n",
      "Epoch 102/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.3377 - acc: 0.8484\n",
      "Epoch 103/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.3166 - acc: 0.8790\n",
      "Epoch 104/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.3472 - acc: 0.8545\n",
      "Epoch 105/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.3765 - acc: 0.8515\n",
      "Epoch 106/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.3421 - acc: 0.8469\n",
      "Epoch 107/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.3162 - acc: 0.8683\n",
      "Epoch 108/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.3512 - acc: 0.8729\n",
      "Epoch 109/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.3411 - acc: 0.8729\n",
      "Epoch 110/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.3304 - acc: 0.8530\n",
      "Epoch 111/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.3102 - acc: 0.8760\n",
      "Epoch 112/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.3223 - acc: 0.8652\n",
      "Epoch 113/1000\n",
      "653/653 [==============================] - 0s 137us/step - loss: 0.3709 - acc: 0.8377\n",
      "Epoch 114/1000\n",
      "653/653 [==============================] - 0s 139us/step - loss: 0.3218 - acc: 0.8790\n",
      "Epoch 115/1000\n",
      "653/653 [==============================] - 0s 134us/step - loss: 0.2954 - acc: 0.8851\n",
      "Epoch 116/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.2823 - acc: 0.8851\n",
      "Epoch 117/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.4445 - acc: 0.8224\n",
      "Epoch 118/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.3523 - acc: 0.8560\n",
      "Epoch 119/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.3581 - acc: 0.8423\n",
      "Epoch 120/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.3084 - acc: 0.8714\n",
      "Epoch 121/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.3157 - acc: 0.8928\n",
      "Epoch 122/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.3091 - acc: 0.8867\n",
      "Epoch 123/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.3001 - acc: 0.8928\n",
      "Epoch 124/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.3233 - acc: 0.8545\n",
      "Epoch 125/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.2965 - acc: 0.8851\n",
      "Epoch 126/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2765 - acc: 0.8913\n",
      "Epoch 127/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.3027 - acc: 0.8806\n",
      "Epoch 128/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.2828 - acc: 0.8867\n",
      "Epoch 129/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.3734 - acc: 0.8668\n",
      "Epoch 130/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.5227 - acc: 0.8331\n",
      "Epoch 131/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.3134 - acc: 0.8698\n",
      "Epoch 132/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2853 - acc: 0.8851\n",
      "Epoch 133/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.2680 - acc: 0.8928\n",
      "Epoch 134/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2692 - acc: 0.8897\n",
      "Epoch 135/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.2755 - acc: 0.8897\n",
      "Epoch 136/1000\n",
      "653/653 [==============================] - 0s 104us/step - loss: 0.2595 - acc: 0.8882\n",
      "Epoch 137/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2660 - acc: 0.8943\n",
      "Epoch 138/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.2917 - acc: 0.8775\n",
      "Epoch 139/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.2678 - acc: 0.8928\n",
      "Epoch 140/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.2608 - acc: 0.8882\n",
      "Epoch 141/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2718 - acc: 0.8897\n",
      "Epoch 142/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.2662 - acc: 0.8959\n",
      "Epoch 143/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.2462 - acc: 0.9020\n",
      "Epoch 144/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.3095 - acc: 0.8851\n",
      "Epoch 145/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2529 - acc: 0.8882\n",
      "Epoch 146/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.2550 - acc: 0.9051\n",
      "Epoch 147/1000\n",
      "653/653 [==============================] - 0s 142us/step - loss: 0.2550 - acc: 0.9051\n",
      "Epoch 148/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.2305 - acc: 0.9081\n",
      "Epoch 149/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.2849 - acc: 0.8851\n",
      "Epoch 150/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2565 - acc: 0.8897\n",
      "Epoch 151/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.5795 - acc: 0.8499\n",
      "Epoch 152/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.3092 - acc: 0.8821\n",
      "Epoch 153/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.2730 - acc: 0.8959\n",
      "Epoch 154/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2656 - acc: 0.8913\n",
      "Epoch 155/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.2538 - acc: 0.8974\n",
      "Epoch 156/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.2433 - acc: 0.8959\n",
      "Epoch 157/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.2440 - acc: 0.9005\n",
      "Epoch 158/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.2274 - acc: 0.9081\n",
      "Epoch 159/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.2762 - acc: 0.9051\n",
      "Epoch 160/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.3255 - acc: 0.8760\n",
      "Epoch 161/1000\n",
      "653/653 [==============================] - 0s 144us/step - loss: 0.2553 - acc: 0.9066\n",
      "Epoch 162/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.2574 - acc: 0.8959\n",
      "Epoch 163/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.4309 - acc: 0.8622\n",
      "Epoch 164/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "653/653 [==============================] - 0s 113us/step - loss: 0.3326 - acc: 0.8545\n",
      "Epoch 165/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.2447 - acc: 0.9081\n",
      "Epoch 166/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.2482 - acc: 0.8913\n",
      "Epoch 167/1000\n",
      "653/653 [==============================] - 0s 137us/step - loss: 0.2476 - acc: 0.9035\n",
      "Epoch 168/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.3628 - acc: 0.8729\n",
      "Epoch 169/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2922 - acc: 0.8622\n",
      "Epoch 170/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.2565 - acc: 0.8974\n",
      "Epoch 171/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.2432 - acc: 0.8989\n",
      "Epoch 172/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.2704 - acc: 0.8821\n",
      "Epoch 173/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2271 - acc: 0.9127\n",
      "Epoch 174/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.2443 - acc: 0.8989\n",
      "Epoch 175/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.2771 - acc: 0.8882\n",
      "Epoch 176/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.3361 - acc: 0.8453\n",
      "Epoch 177/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2666 - acc: 0.8882\n",
      "Epoch 178/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.2561 - acc: 0.8882\n",
      "Epoch 179/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.2376 - acc: 0.8989\n",
      "Epoch 180/1000\n",
      "653/653 [==============================] - 0s 134us/step - loss: 0.2512 - acc: 0.9096\n",
      "Epoch 181/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2297 - acc: 0.9066\n",
      "Epoch 182/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2395 - acc: 0.9066\n",
      "Epoch 183/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2535 - acc: 0.8897\n",
      "Epoch 184/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2571 - acc: 0.8806\n",
      "Epoch 185/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.2257 - acc: 0.9066\n",
      "Epoch 186/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2388 - acc: 0.8913\n",
      "Epoch 187/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.2700 - acc: 0.9005\n",
      "Epoch 188/1000\n",
      "653/653 [==============================] - 0s 145us/step - loss: 0.2642 - acc: 0.8959\n",
      "Epoch 189/1000\n",
      "653/653 [==============================] - 0s 148us/step - loss: 0.2461 - acc: 0.9020\n",
      "Epoch 190/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2172 - acc: 0.9112\n",
      "Epoch 191/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2570 - acc: 0.9035\n",
      "Epoch 192/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.2339 - acc: 0.9173\n",
      "Epoch 193/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.2251 - acc: 0.9112\n",
      "Epoch 194/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.2393 - acc: 0.8928\n",
      "Epoch 195/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.2053 - acc: 0.9173\n",
      "Epoch 196/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.2301 - acc: 0.9020\n",
      "Epoch 197/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.2193 - acc: 0.9112\n",
      "Epoch 198/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.2535 - acc: 0.9142\n",
      "Epoch 199/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2267 - acc: 0.9112\n",
      "Epoch 200/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.2560 - acc: 0.8897\n",
      "Epoch 201/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2042 - acc: 0.9173 0s - loss: 0.2115 - acc: 0.911\n",
      "Epoch 202/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.2061 - acc: 0.9158\n",
      "Epoch 203/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.1899 - acc: 0.9326\n",
      "Epoch 204/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2004 - acc: 0.9158\n",
      "Epoch 205/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2195 - acc: 0.9112\n",
      "Epoch 206/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.2013 - acc: 0.9188\n",
      "Epoch 207/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.2177 - acc: 0.9188\n",
      "Epoch 208/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.2418 - acc: 0.8928\n",
      "Epoch 209/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2596 - acc: 0.9035\n",
      "Epoch 210/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.2308 - acc: 0.9051\n",
      "Epoch 211/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.2222 - acc: 0.9219\n",
      "Epoch 212/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.2978 - acc: 0.8851\n",
      "Epoch 213/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.2796 - acc: 0.8775\n",
      "Epoch 214/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.2558 - acc: 0.9096\n",
      "Epoch 215/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.2832 - acc: 0.8744\n",
      "Epoch 216/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.2771 - acc: 0.8928\n",
      "Epoch 217/1000\n",
      "653/653 [==============================] - 0s 136us/step - loss: 0.2924 - acc: 0.8959\n",
      "Epoch 218/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.2358 - acc: 0.9158\n",
      "Epoch 219/1000\n",
      "653/653 [==============================] - 0s 150us/step - loss: 0.2500 - acc: 0.9127\n",
      "Epoch 220/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.2849 - acc: 0.9158\n",
      "Epoch 221/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.3024 - acc: 0.9020\n",
      "Epoch 222/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2659 - acc: 0.8989\n",
      "Epoch 223/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.2077 - acc: 0.9204\n",
      "Epoch 224/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.2212 - acc: 0.9234\n",
      "Epoch 225/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.2132 - acc: 0.9280\n",
      "Epoch 226/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.2217 - acc: 0.9020\n",
      "Epoch 227/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.2285 - acc: 0.9142\n",
      "Epoch 228/1000\n",
      "653/653 [==============================] - 0s 112us/step - loss: 0.2042 - acc: 0.9280\n",
      "Epoch 229/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.2052 - acc: 0.9127\n",
      "Epoch 230/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.2114 - acc: 0.9112\n",
      "Epoch 231/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.2208 - acc: 0.9173\n",
      "Epoch 232/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.2346 - acc: 0.8989\n",
      "Epoch 233/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2697 - acc: 0.8851\n",
      "Epoch 234/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2652 - acc: 0.8851\n",
      "Epoch 235/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.2226 - acc: 0.9234\n",
      "Epoch 236/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.2344 - acc: 0.9020\n",
      "Epoch 237/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.2334 - acc: 0.9066\n",
      "Epoch 238/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.2073 - acc: 0.9173\n",
      "Epoch 239/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.2069 - acc: 0.9158\n",
      "Epoch 240/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2012 - acc: 0.9173\n",
      "Epoch 241/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.2374 - acc: 0.8989\n",
      "Epoch 242/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1982 - acc: 0.9265\n",
      "Epoch 243/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1858 - acc: 0.9265\n",
      "Epoch 244/1000\n",
      "653/653 [==============================] - 0s 150us/step - loss: 0.1849 - acc: 0.9234\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 245/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1938 - acc: 0.9326\n",
      "Epoch 246/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.1773 - acc: 0.9326\n",
      "Epoch 247/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.2104 - acc: 0.9219\n",
      "Epoch 248/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2034 - acc: 0.9219\n",
      "Epoch 249/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.1948 - acc: 0.9173\n",
      "Epoch 250/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2353 - acc: 0.9066\n",
      "Epoch 251/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.2325 - acc: 0.9066\n",
      "Epoch 252/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.2068 - acc: 0.9081\n",
      "Epoch 253/1000\n",
      "653/653 [==============================] - 0s 104us/step - loss: 0.1854 - acc: 0.9280\n",
      "Epoch 254/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1913 - acc: 0.9280\n",
      "Epoch 255/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.1912 - acc: 0.9280\n",
      "Epoch 256/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.3426 - acc: 0.8729\n",
      "Epoch 257/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.7765 - acc: 0.8208\n",
      "Epoch 258/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.6857 - acc: 0.8392\n",
      "Epoch 259/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.4842 - acc: 0.8423\n",
      "Epoch 260/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.6022 - acc: 0.8683\n",
      "Epoch 261/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.3915 - acc: 0.8790\n",
      "Epoch 262/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.2852 - acc: 0.8913\n",
      "Epoch 263/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.2608 - acc: 0.8928\n",
      "Epoch 264/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.2249 - acc: 0.9250\n",
      "Epoch 265/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.2601 - acc: 0.8913\n",
      "Epoch 266/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.4796 - acc: 0.8744\n",
      "Epoch 267/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.3073 - acc: 0.8897\n",
      "Epoch 268/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.3784 - acc: 0.8897\n",
      "Epoch 269/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.5407 - acc: 0.8652\n",
      "Epoch 270/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.3373 - acc: 0.8729\n",
      "Epoch 271/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.3115 - acc: 0.8851\n",
      "Epoch 272/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.3649 - acc: 0.8897\n",
      "Epoch 273/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.5129 - acc: 0.8683\n",
      "Epoch 274/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.3300 - acc: 0.9005\n",
      "Epoch 275/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.2304 - acc: 0.9081\n",
      "Epoch 276/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2106 - acc: 0.9234\n",
      "Epoch 277/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.2009 - acc: 0.9158\n",
      "Epoch 278/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2015 - acc: 0.9265\n",
      "Epoch 279/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.2037 - acc: 0.9142\n",
      "Epoch 280/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.2162 - acc: 0.9127\n",
      "Epoch 281/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2683 - acc: 0.9051\n",
      "Epoch 282/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2081 - acc: 0.9204\n",
      "Epoch 283/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1875 - acc: 0.9265\n",
      "Epoch 284/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.1855 - acc: 0.9311\n",
      "Epoch 285/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1728 - acc: 0.9326\n",
      "Epoch 286/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.1708 - acc: 0.9387\n",
      "Epoch 287/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.1675 - acc: 0.9311\n",
      "Epoch 288/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1677 - acc: 0.9387\n",
      "Epoch 289/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.1751 - acc: 0.9357\n",
      "Epoch 290/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1658 - acc: 0.9372\n",
      "Epoch 291/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1561 - acc: 0.9372\n",
      "Epoch 292/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2522 - acc: 0.9234\n",
      "Epoch 293/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.2471 - acc: 0.8943\n",
      "Epoch 294/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.2085 - acc: 0.9219\n",
      "Epoch 295/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1734 - acc: 0.9296\n",
      "Epoch 296/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1806 - acc: 0.9265\n",
      "Epoch 297/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.1880 - acc: 0.9326\n",
      "Epoch 298/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.1705 - acc: 0.9387\n",
      "Epoch 299/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.2245 - acc: 0.9112\n",
      "Epoch 300/1000\n",
      "653/653 [==============================] - 0s 141us/step - loss: 0.1939 - acc: 0.9127\n",
      "Epoch 301/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.1956 - acc: 0.9204\n",
      "Epoch 302/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1627 - acc: 0.9326\n",
      "Epoch 303/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.1778 - acc: 0.9250\n",
      "Epoch 304/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.1847 - acc: 0.9219\n",
      "Epoch 305/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1617 - acc: 0.9357\n",
      "Epoch 306/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1654 - acc: 0.9387\n",
      "Epoch 307/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1576 - acc: 0.9357\n",
      "Epoch 308/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1605 - acc: 0.9403\n",
      "Epoch 309/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.1557 - acc: 0.9372\n",
      "Epoch 310/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.1787 - acc: 0.9326\n",
      "Epoch 311/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.1560 - acc: 0.9403\n",
      "Epoch 312/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1481 - acc: 0.9433\n",
      "Epoch 313/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1481 - acc: 0.9464\n",
      "Epoch 314/1000\n",
      "653/653 [==============================] - 0s 137us/step - loss: 0.1809 - acc: 0.9296\n",
      "Epoch 315/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.2258 - acc: 0.9112\n",
      "Epoch 316/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.1735 - acc: 0.9342\n",
      "Epoch 317/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.1697 - acc: 0.9326\n",
      "Epoch 318/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.1481 - acc: 0.9403\n",
      "Epoch 319/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1511 - acc: 0.9418\n",
      "Epoch 320/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1392 - acc: 0.9449\n",
      "Epoch 321/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.1586 - acc: 0.9296\n",
      "Epoch 322/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.1698 - acc: 0.9280\n",
      "Epoch 323/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.1532 - acc: 0.9403\n",
      "Epoch 324/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.1468 - acc: 0.9418\n",
      "Epoch 325/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.1944 - acc: 0.9204\n",
      "Epoch 326/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "653/653 [==============================] - 0s 116us/step - loss: 0.1483 - acc: 0.9433\n",
      "Epoch 327/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1390 - acc: 0.9449\n",
      "Epoch 328/1000\n",
      "653/653 [==============================] - 0s 147us/step - loss: 0.1917 - acc: 0.9280\n",
      "Epoch 329/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1700 - acc: 0.9311\n",
      "Epoch 330/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.1276 - acc: 0.9495\n",
      "Epoch 331/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1385 - acc: 0.9372\n",
      "Epoch 332/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1266 - acc: 0.9495\n",
      "Epoch 333/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.2028 - acc: 0.8989\n",
      "Epoch 334/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1547 - acc: 0.9387\n",
      "Epoch 335/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.2168 - acc: 0.9219\n",
      "Epoch 336/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.1868 - acc: 0.9265\n",
      "Epoch 337/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.2373 - acc: 0.8974\n",
      "Epoch 338/1000\n",
      "653/653 [==============================] - 0s 134us/step - loss: 0.1662 - acc: 0.9234\n",
      "Epoch 339/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1607 - acc: 0.9342\n",
      "Epoch 340/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1397 - acc: 0.9418\n",
      "Epoch 341/1000\n",
      "653/653 [==============================] - 0s 137us/step - loss: 0.1378 - acc: 0.9449\n",
      "Epoch 342/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.1272 - acc: 0.9479\n",
      "Epoch 343/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.1342 - acc: 0.9449\n",
      "Epoch 344/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.1327 - acc: 0.9449\n",
      "Epoch 345/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.1379 - acc: 0.9403\n",
      "Epoch 346/1000\n",
      "653/653 [==============================] - 0s 104us/step - loss: 0.1537 - acc: 0.9403\n",
      "Epoch 347/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1989 - acc: 0.9173\n",
      "Epoch 348/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.2714 - acc: 0.8913\n",
      "Epoch 349/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1739 - acc: 0.9234\n",
      "Epoch 350/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.1431 - acc: 0.9464\n",
      "Epoch 351/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1344 - acc: 0.9464\n",
      "Epoch 352/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1477 - acc: 0.9372\n",
      "Epoch 353/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1435 - acc: 0.9433\n",
      "Epoch 354/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.1614 - acc: 0.9403\n",
      "Epoch 355/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1659 - acc: 0.9311\n",
      "Epoch 356/1000\n",
      "653/653 [==============================] - 0s 141us/step - loss: 0.1437 - acc: 0.9449\n",
      "Epoch 357/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.2200 - acc: 0.9173\n",
      "Epoch 358/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1731 - acc: 0.9250\n",
      "Epoch 359/1000\n",
      "653/653 [==============================] - 0s 144us/step - loss: 0.1448 - acc: 0.9342\n",
      "Epoch 360/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.1350 - acc: 0.9418\n",
      "Epoch 361/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1155 - acc: 0.9602\n",
      "Epoch 362/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1523 - acc: 0.9357\n",
      "Epoch 363/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1174 - acc: 0.9556\n",
      "Epoch 364/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1530 - acc: 0.9387\n",
      "Epoch 365/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1287 - acc: 0.9495\n",
      "Epoch 366/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1443 - acc: 0.9464\n",
      "Epoch 367/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.1277 - acc: 0.9495\n",
      "Epoch 368/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.1201 - acc: 0.9479\n",
      "Epoch 369/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.4082 - acc: 0.8606\n",
      "Epoch 370/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.6162 - acc: 0.8499\n",
      "Epoch 371/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.3931 - acc: 0.8729\n",
      "Epoch 372/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.7944 - acc: 0.8652\n",
      "Epoch 373/1000\n",
      "653/653 [==============================] - 0s 134us/step - loss: 0.6411 - acc: 0.8714\n",
      "Epoch 374/1000\n",
      "653/653 [==============================] - 0s 139us/step - loss: 0.5845 - acc: 0.8744\n",
      "Epoch 375/1000\n",
      "653/653 [==============================] - 0s 145us/step - loss: 0.9981 - acc: 0.8361\n",
      "Epoch 376/1000\n",
      "653/653 [==============================] - 0s 153us/step - loss: 0.6995 - acc: 0.8560\n",
      "Epoch 377/1000\n",
      "653/653 [==============================] - 0s 147us/step - loss: 0.4666 - acc: 0.8576\n",
      "Epoch 378/1000\n",
      "653/653 [==============================] - 0s 136us/step - loss: 0.4343 - acc: 0.8790\n",
      "Epoch 379/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.4475 - acc: 0.8760\n",
      "Epoch 380/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.5496 - acc: 0.8913\n",
      "Epoch 381/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.4251 - acc: 0.9173\n",
      "Epoch 382/1000\n",
      "653/653 [==============================] - 0s 145us/step - loss: 0.3939 - acc: 0.9173\n",
      "Epoch 383/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.3069 - acc: 0.9280\n",
      "Epoch 384/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.2421 - acc: 0.9357\n",
      "Epoch 385/1000\n",
      "653/653 [==============================] - 0s 150us/step - loss: 0.1969 - acc: 0.9495\n",
      "Epoch 386/1000\n",
      "653/653 [==============================] - 0s 148us/step - loss: 0.2041 - acc: 0.9326\n",
      "Epoch 387/1000\n",
      "653/653 [==============================] - 0s 151us/step - loss: 0.2241 - acc: 0.9280\n",
      "Epoch 388/1000\n",
      "653/653 [==============================] - 0s 141us/step - loss: 0.1589 - acc: 0.9387\n",
      "Epoch 389/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.1342 - acc: 0.9541\n",
      "Epoch 390/1000\n",
      "653/653 [==============================] - 0s 134us/step - loss: 0.1450 - acc: 0.9510\n",
      "Epoch 391/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1331 - acc: 0.9510\n",
      "Epoch 392/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1272 - acc: 0.9541\n",
      "Epoch 393/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.1439 - acc: 0.9464\n",
      "Epoch 394/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.1384 - acc: 0.9479\n",
      "Epoch 395/1000\n",
      "653/653 [==============================] - 0s 147us/step - loss: 0.1185 - acc: 0.9525\n",
      "Epoch 396/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.1532 - acc: 0.9372\n",
      "Epoch 397/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1383 - acc: 0.9479\n",
      "Epoch 398/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.1157 - acc: 0.9617\n",
      "Epoch 399/1000\n",
      "653/653 [==============================] - 0s 137us/step - loss: 0.1221 - acc: 0.9541\n",
      "Epoch 400/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1200 - acc: 0.9510\n",
      "Epoch 401/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1203 - acc: 0.9510\n",
      "Epoch 402/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1183 - acc: 0.9510\n",
      "Epoch 403/1000\n",
      "653/653 [==============================] - 0s 142us/step - loss: 0.1124 - acc: 0.9541\n",
      "Epoch 404/1000\n",
      "653/653 [==============================] - 0s 142us/step - loss: 0.1115 - acc: 0.9571\n",
      "Epoch 405/1000\n",
      "653/653 [==============================] - 0s 137us/step - loss: 0.1833 - acc: 0.9464\n",
      "Epoch 406/1000\n",
      "653/653 [==============================] - 0s 139us/step - loss: 0.1423 - acc: 0.9387\n",
      "Epoch 407/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "653/653 [==============================] - 0s 139us/step - loss: 0.1380 - acc: 0.9479\n",
      "Epoch 408/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.1118 - acc: 0.9541\n",
      "Epoch 409/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.1104 - acc: 0.9556\n",
      "Epoch 410/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.1050 - acc: 0.9587\n",
      "Epoch 411/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.1050 - acc: 0.9510\n",
      "Epoch 412/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.1356 - acc: 0.9510\n",
      "Epoch 413/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1056 - acc: 0.9648\n",
      "Epoch 414/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1083 - acc: 0.9587\n",
      "Epoch 415/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.1080 - acc: 0.9617\n",
      "Epoch 416/1000\n",
      "653/653 [==============================] - 0s 112us/step - loss: 0.1073 - acc: 0.9632\n",
      "Epoch 417/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1124 - acc: 0.9556\n",
      "Epoch 418/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1324 - acc: 0.9556\n",
      "Epoch 419/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1329 - acc: 0.9495\n",
      "Epoch 420/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1249 - acc: 0.9632\n",
      "Epoch 421/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.1338 - acc: 0.9510\n",
      "Epoch 422/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.1396 - acc: 0.9387\n",
      "Epoch 423/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.1156 - acc: 0.9541\n",
      "Epoch 424/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.1181 - acc: 0.9464\n",
      "Epoch 425/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1127 - acc: 0.9495\n",
      "Epoch 426/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1152 - acc: 0.9525\n",
      "Epoch 427/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1137 - acc: 0.9556\n",
      "Epoch 428/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1088 - acc: 0.9571\n",
      "Epoch 429/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1095 - acc: 0.9587\n",
      "Epoch 430/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.1031 - acc: 0.9632\n",
      "Epoch 431/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1892 - acc: 0.9326\n",
      "Epoch 432/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1617 - acc: 0.9372\n",
      "Epoch 433/1000\n",
      "653/653 [==============================] - 0s 112us/step - loss: 0.1205 - acc: 0.9510\n",
      "Epoch 434/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.1162 - acc: 0.9587\n",
      "Epoch 435/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1311 - acc: 0.9510\n",
      "Epoch 436/1000\n",
      "653/653 [==============================] - 0s 147us/step - loss: 0.1298 - acc: 0.9495\n",
      "Epoch 437/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.1134 - acc: 0.9587\n",
      "Epoch 438/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1042 - acc: 0.9571\n",
      "Epoch 439/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1016 - acc: 0.9571\n",
      "Epoch 440/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1050 - acc: 0.9632\n",
      "Epoch 441/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.1133 - acc: 0.9602\n",
      "Epoch 442/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0990 - acc: 0.9724\n",
      "Epoch 443/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0974 - acc: 0.9648\n",
      "Epoch 444/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.0962 - acc: 0.9678\n",
      "Epoch 445/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0960 - acc: 0.9663\n",
      "Epoch 446/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0921 - acc: 0.9663\n",
      "Epoch 447/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.0943 - acc: 0.9694\n",
      "Epoch 448/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0783 - acc: 0.9724\n",
      "Epoch 449/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1114 - acc: 0.9587\n",
      "Epoch 450/1000\n",
      "653/653 [==============================] - 0s 141us/step - loss: 0.1135 - acc: 0.9617\n",
      "Epoch 451/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.0917 - acc: 0.9663\n",
      "Epoch 452/1000\n",
      "653/653 [==============================] - 0s 112us/step - loss: 0.0913 - acc: 0.9678\n",
      "Epoch 453/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0860 - acc: 0.9740\n",
      "Epoch 454/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0872 - acc: 0.9724\n",
      "Epoch 455/1000\n",
      "653/653 [==============================] - 0s 112us/step - loss: 0.1045 - acc: 0.9587\n",
      "Epoch 456/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1027 - acc: 0.9632\n",
      "Epoch 457/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0791 - acc: 0.9740\n",
      "Epoch 458/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1098 - acc: 0.9617\n",
      "Epoch 459/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1246 - acc: 0.9587\n",
      "Epoch 460/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.1054 - acc: 0.9617\n",
      "Epoch 461/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.3656 - acc: 0.9096\n",
      "Epoch 462/1000\n",
      "653/653 [==============================] - 0s 145us/step - loss: 0.2117 - acc: 0.9173\n",
      "Epoch 463/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.1563 - acc: 0.9403\n",
      "Epoch 464/1000\n",
      "653/653 [==============================] - 0s 150us/step - loss: 0.1347 - acc: 0.9510\n",
      "Epoch 465/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.2326 - acc: 0.9357\n",
      "Epoch 466/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.1830 - acc: 0.9280\n",
      "Epoch 467/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.1578 - acc: 0.9464\n",
      "Epoch 468/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1638 - acc: 0.9525\n",
      "Epoch 469/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1239 - acc: 0.9495\n",
      "Epoch 470/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.1123 - acc: 0.9556\n",
      "Epoch 471/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.1054 - acc: 0.9617\n",
      "Epoch 472/1000\n",
      "653/653 [==============================] - 0s 137us/step - loss: 0.1307 - acc: 0.9525\n",
      "Epoch 473/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1160 - acc: 0.9510\n",
      "Epoch 474/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.0939 - acc: 0.9663\n",
      "Epoch 475/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.0897 - acc: 0.9694\n",
      "Epoch 476/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.0879 - acc: 0.9678\n",
      "Epoch 477/1000\n",
      "653/653 [==============================] - 0s 139us/step - loss: 0.0948 - acc: 0.9663\n",
      "Epoch 478/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.1081 - acc: 0.9617\n",
      "Epoch 479/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0872 - acc: 0.9724\n",
      "Epoch 480/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0889 - acc: 0.9694\n",
      "Epoch 481/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0867 - acc: 0.9663\n",
      "Epoch 482/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0832 - acc: 0.9709\n",
      "Epoch 483/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0885 - acc: 0.9663\n",
      "Epoch 484/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0833 - acc: 0.9678\n",
      "Epoch 485/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0923 - acc: 0.9648\n",
      "Epoch 486/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1134 - acc: 0.9587\n",
      "Epoch 487/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.2262 - acc: 0.9296\n",
      "Epoch 488/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "653/653 [==============================] - 0s 111us/step - loss: 0.1279 - acc: 0.9418\n",
      "Epoch 489/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0933 - acc: 0.9632\n",
      "Epoch 490/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.0977 - acc: 0.9709\n",
      "Epoch 491/1000\n",
      "653/653 [==============================] - 0s 137us/step - loss: 0.1075 - acc: 0.9617\n",
      "Epoch 492/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0952 - acc: 0.9632\n",
      "Epoch 493/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0851 - acc: 0.9648\n",
      "Epoch 494/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0884 - acc: 0.9663\n",
      "Epoch 495/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.1121 - acc: 0.9602\n",
      "Epoch 496/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1627 - acc: 0.9387\n",
      "Epoch 497/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.1247 - acc: 0.9587\n",
      "Epoch 498/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.1143 - acc: 0.9617\n",
      "Epoch 499/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0938 - acc: 0.9663\n",
      "Epoch 500/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0904 - acc: 0.9678\n",
      "Epoch 501/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0798 - acc: 0.9709\n",
      "Epoch 502/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.1228 - acc: 0.9541\n",
      "Epoch 503/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0955 - acc: 0.9648\n",
      "Epoch 504/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0881 - acc: 0.9709\n",
      "Epoch 505/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.0761 - acc: 0.9694\n",
      "Epoch 506/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0755 - acc: 0.9770\n",
      "Epoch 507/1000\n",
      "653/653 [==============================] - 0s 137us/step - loss: 0.1068 - acc: 0.9556\n",
      "Epoch 508/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0962 - acc: 0.9632\n",
      "Epoch 509/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.1525 - acc: 0.9433\n",
      "Epoch 510/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1166 - acc: 0.9449\n",
      "Epoch 511/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0958 - acc: 0.9587\n",
      "Epoch 512/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0932 - acc: 0.9694\n",
      "Epoch 513/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0824 - acc: 0.9648\n",
      "Epoch 514/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0753 - acc: 0.9786\n",
      "Epoch 515/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.0962 - acc: 0.9617\n",
      "Epoch 516/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1042 - acc: 0.9663\n",
      "Epoch 517/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1438 - acc: 0.9495\n",
      "Epoch 518/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1065 - acc: 0.9617\n",
      "Epoch 519/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.1152 - acc: 0.9648\n",
      "Epoch 520/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0925 - acc: 0.9678\n",
      "Epoch 521/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1246 - acc: 0.9479\n",
      "Epoch 522/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0900 - acc: 0.9663\n",
      "Epoch 523/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0838 - acc: 0.9648\n",
      "Epoch 524/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.0949 - acc: 0.9617\n",
      "Epoch 525/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.1372 - acc: 0.9372\n",
      "Epoch 526/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1057 - acc: 0.9541\n",
      "Epoch 527/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0832 - acc: 0.9632\n",
      "Epoch 528/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1500 - acc: 0.9479\n",
      "Epoch 529/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.1061 - acc: 0.9541\n",
      "Epoch 530/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0996 - acc: 0.9571\n",
      "Epoch 531/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.0909 - acc: 0.9663\n",
      "Epoch 532/1000\n",
      "653/653 [==============================] - 0s 147us/step - loss: 0.0777 - acc: 0.9740\n",
      "Epoch 533/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.0891 - acc: 0.9617\n",
      "Epoch 534/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1174 - acc: 0.9541\n",
      "Epoch 535/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0929 - acc: 0.9663\n",
      "Epoch 536/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.1866 - acc: 0.9188\n",
      "Epoch 537/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1970 - acc: 0.9495\n",
      "Epoch 538/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0908 - acc: 0.9678\n",
      "Epoch 539/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1431 - acc: 0.9464\n",
      "Epoch 540/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1302 - acc: 0.9479\n",
      "Epoch 541/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0975 - acc: 0.9602\n",
      "Epoch 542/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0746 - acc: 0.9740\n",
      "Epoch 543/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.1771 - acc: 0.9510\n",
      "Epoch 544/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.2850 - acc: 0.9112\n",
      "Epoch 545/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.2207 - acc: 0.9234\n",
      "Epoch 546/1000\n",
      "653/653 [==============================] - 0s 137us/step - loss: 0.1376 - acc: 0.9525\n",
      "Epoch 547/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.1197 - acc: 0.9556\n",
      "Epoch 548/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1066 - acc: 0.9632\n",
      "Epoch 549/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0841 - acc: 0.9678\n",
      "Epoch 550/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0855 - acc: 0.9694\n",
      "Epoch 551/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.1716 - acc: 0.9663\n",
      "Epoch 552/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0912 - acc: 0.9648\n",
      "Epoch 553/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0796 - acc: 0.9709\n",
      "Epoch 554/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0787 - acc: 0.9724\n",
      "Epoch 555/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.0836 - acc: 0.9724\n",
      "Epoch 556/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.0759 - acc: 0.9709\n",
      "Epoch 557/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0769 - acc: 0.9755\n",
      "Epoch 558/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0850 - acc: 0.9724\n",
      "Epoch 559/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0791 - acc: 0.9709\n",
      "Epoch 560/1000\n",
      "653/653 [==============================] - 0s 136us/step - loss: 0.0769 - acc: 0.9724\n",
      "Epoch 561/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.0917 - acc: 0.9648\n",
      "Epoch 562/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0754 - acc: 0.9724\n",
      "Epoch 563/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0759 - acc: 0.9724\n",
      "Epoch 564/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0717 - acc: 0.9740\n",
      "Epoch 565/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0776 - acc: 0.9694\n",
      "Epoch 566/1000\n",
      "653/653 [==============================] - 0s 144us/step - loss: 0.0748 - acc: 0.9724\n",
      "Epoch 567/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.0694 - acc: 0.9709\n",
      "Epoch 568/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.0729 - acc: 0.9755\n",
      "Epoch 569/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "653/653 [==============================] - 0s 110us/step - loss: 0.0777 - acc: 0.9632\n",
      "Epoch 570/1000\n",
      "653/653 [==============================] - 0s 150us/step - loss: 0.0738 - acc: 0.9694\n",
      "Epoch 571/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.0723 - acc: 0.9755\n",
      "Epoch 572/1000\n",
      "653/653 [==============================] - 0s 134us/step - loss: 0.0684 - acc: 0.9755\n",
      "Epoch 573/1000\n",
      "653/653 [==============================] - 0s 148us/step - loss: 0.0678 - acc: 0.9709\n",
      "Epoch 574/1000\n",
      "653/653 [==============================] - 0s 134us/step - loss: 0.0650 - acc: 0.9755\n",
      "Epoch 575/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0598 - acc: 0.9740\n",
      "Epoch 576/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.2068 - acc: 0.9541\n",
      "Epoch 577/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.1741 - acc: 0.9556\n",
      "Epoch 578/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.0690 - acc: 0.9694\n",
      "Epoch 579/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0730 - acc: 0.9786\n",
      "Epoch 580/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0606 - acc: 0.9770\n",
      "Epoch 581/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1474 - acc: 0.9479\n",
      "Epoch 582/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1068 - acc: 0.9495\n",
      "Epoch 583/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0955 - acc: 0.9663\n",
      "Epoch 584/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1433 - acc: 0.9495\n",
      "Epoch 585/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.2368 - acc: 0.9234\n",
      "Epoch 586/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.1154 - acc: 0.9571\n",
      "Epoch 587/1000\n",
      "653/653 [==============================] - 0s 139us/step - loss: 0.0834 - acc: 0.9694\n",
      "Epoch 588/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0691 - acc: 0.9740\n",
      "Epoch 589/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0664 - acc: 0.9786\n",
      "Epoch 590/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0586 - acc: 0.9786\n",
      "Epoch 591/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0618 - acc: 0.9740\n",
      "Epoch 592/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0723 - acc: 0.9678\n",
      "Epoch 593/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.0706 - acc: 0.9709\n",
      "Epoch 594/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.4657 - acc: 0.8836\n",
      "Epoch 595/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.6244 - acc: 0.9066\n",
      "Epoch 596/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.7552 - acc: 0.8729\n",
      "Epoch 597/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.8571 - acc: 0.8622\n",
      "Epoch 598/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.6314 - acc: 0.8683\n",
      "Epoch 599/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.5366 - acc: 0.9142\n",
      "Epoch 600/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.3518 - acc: 0.9342\n",
      "Epoch 601/1000\n",
      "653/653 [==============================] - 0s 150us/step - loss: 0.1954 - acc: 0.9571\n",
      "Epoch 602/1000\n",
      "653/653 [==============================] - 0s 148us/step - loss: 0.1495 - acc: 0.9541\n",
      "Epoch 603/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1691 - acc: 0.9464\n",
      "Epoch 604/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.0914 - acc: 0.9694\n",
      "Epoch 605/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.1030 - acc: 0.9663\n",
      "Epoch 606/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.2817 - acc: 0.9449\n",
      "Epoch 607/1000\n",
      "653/653 [==============================] - 0s 144us/step - loss: 0.1494 - acc: 0.9602\n",
      "Epoch 608/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.1568 - acc: 0.9602\n",
      "Epoch 609/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1228 - acc: 0.9541\n",
      "Epoch 610/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.2253 - acc: 0.9541\n",
      "Epoch 611/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.2756 - acc: 0.9403\n",
      "Epoch 612/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.1704 - acc: 0.9433\n",
      "Epoch 613/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0921 - acc: 0.9724\n",
      "Epoch 614/1000\n",
      "653/653 [==============================] - 0s 141us/step - loss: 0.1063 - acc: 0.9663\n",
      "Epoch 615/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.2100 - acc: 0.9571\n",
      "Epoch 616/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2777 - acc: 0.9418\n",
      "Epoch 617/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1823 - acc: 0.9464\n",
      "Epoch 618/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.1503 - acc: 0.9602\n",
      "Epoch 619/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1161 - acc: 0.9617\n",
      "Epoch 620/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0930 - acc: 0.9648\n",
      "Epoch 621/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.0899 - acc: 0.9740\n",
      "Epoch 622/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.0890 - acc: 0.9724\n",
      "Epoch 623/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0770 - acc: 0.9724\n",
      "Epoch 624/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0819 - acc: 0.9740\n",
      "Epoch 625/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0916 - acc: 0.9709\n",
      "Epoch 626/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0797 - acc: 0.9724\n",
      "Epoch 627/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0723 - acc: 0.9755\n",
      "Epoch 628/1000\n",
      "653/653 [==============================] - 0s 136us/step - loss: 0.0698 - acc: 0.9770\n",
      "Epoch 629/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0733 - acc: 0.9755\n",
      "Epoch 630/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0751 - acc: 0.9740\n",
      "Epoch 631/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0727 - acc: 0.9770\n",
      "Epoch 632/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0640 - acc: 0.9786\n",
      "Epoch 633/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0878 - acc: 0.9648\n",
      "Epoch 634/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0770 - acc: 0.9724\n",
      "Epoch 635/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0687 - acc: 0.9755\n",
      "Epoch 636/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2912 - acc: 0.9510\n",
      "Epoch 637/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.3223 - acc: 0.9066\n",
      "Epoch 638/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1282 - acc: 0.9556\n",
      "Epoch 639/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.0956 - acc: 0.9602\n",
      "Epoch 640/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0996 - acc: 0.9617\n",
      "Epoch 641/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.1009 - acc: 0.9617\n",
      "Epoch 642/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.1008 - acc: 0.9556\n",
      "Epoch 643/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0680 - acc: 0.9755\n",
      "Epoch 644/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.0995 - acc: 0.9617\n",
      "Epoch 645/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0852 - acc: 0.9724\n",
      "Epoch 646/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1705 - acc: 0.9510\n",
      "Epoch 647/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0943 - acc: 0.9694\n",
      "Epoch 648/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.0736 - acc: 0.9663\n",
      "Epoch 649/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0637 - acc: 0.9770\n",
      "Epoch 650/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "653/653 [==============================] - 0s 125us/step - loss: 0.0722 - acc: 0.9678\n",
      "Epoch 651/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0647 - acc: 0.9770\n",
      "Epoch 652/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0789 - acc: 0.9755\n",
      "Epoch 653/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0707 - acc: 0.9709\n",
      "Epoch 654/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0626 - acc: 0.9786\n",
      "Epoch 655/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0551 - acc: 0.9801\n",
      "Epoch 656/1000\n",
      "653/653 [==============================] - 0s 139us/step - loss: 0.0561 - acc: 0.9801\n",
      "Epoch 657/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.0569 - acc: 0.9770\n",
      "Epoch 658/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0554 - acc: 0.9755\n",
      "Epoch 659/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0598 - acc: 0.9755\n",
      "Epoch 660/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.0694 - acc: 0.9678\n",
      "Epoch 661/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0686 - acc: 0.9755\n",
      "Epoch 662/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0683 - acc: 0.9724\n",
      "Epoch 663/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0587 - acc: 0.9786\n",
      "Epoch 664/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0545 - acc: 0.9816\n",
      "Epoch 665/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1830 - acc: 0.9510\n",
      "Epoch 666/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2068 - acc: 0.9464\n",
      "Epoch 667/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1810 - acc: 0.9311\n",
      "Epoch 668/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.1368 - acc: 0.9418\n",
      "Epoch 669/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.1291 - acc: 0.9525\n",
      "Epoch 670/1000\n",
      "653/653 [==============================] - 0s 150us/step - loss: 0.1176 - acc: 0.9525\n",
      "Epoch 671/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1337 - acc: 0.9510\n",
      "Epoch 672/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1072 - acc: 0.9525\n",
      "Epoch 673/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.2566 - acc: 0.9096\n",
      "Epoch 674/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1592 - acc: 0.9464\n",
      "Epoch 675/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1081 - acc: 0.9571\n",
      "Epoch 676/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.0867 - acc: 0.9632\n",
      "Epoch 677/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0645 - acc: 0.9801\n",
      "Epoch 678/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.0596 - acc: 0.9832\n",
      "Epoch 679/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0560 - acc: 0.9816\n",
      "Epoch 680/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0607 - acc: 0.9786\n",
      "Epoch 681/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0537 - acc: 0.9801\n",
      "Epoch 682/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0492 - acc: 0.9816\n",
      "Epoch 683/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0883 - acc: 0.9709\n",
      "Epoch 684/1000\n",
      "653/653 [==============================] - 0s 150us/step - loss: 0.0948 - acc: 0.9770\n",
      "Epoch 685/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.0651 - acc: 0.9740\n",
      "Epoch 686/1000\n",
      "653/653 [==============================] - 0s 134us/step - loss: 0.0561 - acc: 0.9770\n",
      "Epoch 687/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0569 - acc: 0.9801\n",
      "Epoch 688/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.0549 - acc: 0.9786\n",
      "Epoch 689/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0613 - acc: 0.9755\n",
      "Epoch 690/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0529 - acc: 0.9801\n",
      "Epoch 691/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0561 - acc: 0.9786\n",
      "Epoch 692/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0548 - acc: 0.9801\n",
      "Epoch 693/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0558 - acc: 0.9801\n",
      "Epoch 694/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.2642 - acc: 0.9495\n",
      "Epoch 695/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2004 - acc: 0.9510\n",
      "Epoch 696/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1169 - acc: 0.9617\n",
      "Epoch 697/1000\n",
      "653/653 [==============================] - 0s 150us/step - loss: 0.0791 - acc: 0.9786\n",
      "Epoch 698/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.0996 - acc: 0.9632\n",
      "Epoch 699/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1952 - acc: 0.9449\n",
      "Epoch 700/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.2168 - acc: 0.9326\n",
      "Epoch 701/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.1309 - acc: 0.9617\n",
      "Epoch 702/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0946 - acc: 0.9709\n",
      "Epoch 703/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0798 - acc: 0.9709\n",
      "Epoch 704/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0730 - acc: 0.9740\n",
      "Epoch 705/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0705 - acc: 0.9786\n",
      "Epoch 706/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0595 - acc: 0.9755\n",
      "Epoch 707/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0573 - acc: 0.9755\n",
      "Epoch 708/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0623 - acc: 0.9770\n",
      "Epoch 709/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0523 - acc: 0.9801\n",
      "Epoch 710/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.0565 - acc: 0.9801\n",
      "Epoch 711/1000\n",
      "653/653 [==============================] - 0s 139us/step - loss: 0.0599 - acc: 0.9770\n",
      "Epoch 712/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0585 - acc: 0.9755\n",
      "Epoch 713/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.0548 - acc: 0.9801\n",
      "Epoch 714/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0493 - acc: 0.9801\n",
      "Epoch 715/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0477 - acc: 0.9801\n",
      "Epoch 716/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0478 - acc: 0.9816\n",
      "Epoch 717/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0526 - acc: 0.9786\n",
      "Epoch 718/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0515 - acc: 0.9770\n",
      "Epoch 719/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0529 - acc: 0.9816\n",
      "Epoch 720/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0555 - acc: 0.9786\n",
      "Epoch 721/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0651 - acc: 0.9740\n",
      "Epoch 722/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0481 - acc: 0.9786\n",
      "Epoch 723/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0483 - acc: 0.9816\n",
      "Epoch 724/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0445 - acc: 0.9816\n",
      "Epoch 725/1000\n",
      "653/653 [==============================] - 0s 136us/step - loss: 0.0551 - acc: 0.9816\n",
      "Epoch 726/1000\n",
      "653/653 [==============================] - 0s 134us/step - loss: 0.0608 - acc: 0.9755\n",
      "Epoch 727/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0862 - acc: 0.9678\n",
      "Epoch 728/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0632 - acc: 0.9724\n",
      "Epoch 729/1000\n",
      "653/653 [==============================] - 0s 130us/step - loss: 0.0520 - acc: 0.9755\n",
      "Epoch 730/1000\n",
      "653/653 [==============================] - 0s 137us/step - loss: 0.2206 - acc: 0.9510\n",
      "Epoch 731/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "653/653 [==============================] - 0s 133us/step - loss: 0.4862 - acc: 0.8928\n",
      "Epoch 732/1000\n",
      "653/653 [==============================] - 0s 141us/step - loss: 0.2566 - acc: 0.9112\n",
      "Epoch 733/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.1854 - acc: 0.9311\n",
      "Epoch 734/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1867 - acc: 0.9387\n",
      "Epoch 735/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1891 - acc: 0.9265\n",
      "Epoch 736/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1863 - acc: 0.9326\n",
      "Epoch 737/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1273 - acc: 0.9510\n",
      "Epoch 738/1000\n",
      "653/653 [==============================] - 0s 137us/step - loss: 0.0875 - acc: 0.9663\n",
      "Epoch 739/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0972 - acc: 0.9724\n",
      "Epoch 740/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0954 - acc: 0.9724\n",
      "Epoch 741/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.1320 - acc: 0.9602\n",
      "Epoch 742/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1576 - acc: 0.9403\n",
      "Epoch 743/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1226 - acc: 0.9510\n",
      "Epoch 744/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1660 - acc: 0.9479\n",
      "Epoch 745/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.1251 - acc: 0.9525\n",
      "Epoch 746/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.4219 - acc: 0.9204\n",
      "Epoch 747/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.2882 - acc: 0.9204\n",
      "Epoch 748/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.1707 - acc: 0.9234\n",
      "Epoch 749/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.1625 - acc: 0.9449\n",
      "Epoch 750/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.2634 - acc: 0.9234\n",
      "Epoch 751/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.3970 - acc: 0.9158\n",
      "Epoch 752/1000\n",
      "653/653 [==============================] - 0s 134us/step - loss: 0.2041 - acc: 0.9372\n",
      "Epoch 753/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1328 - acc: 0.9541\n",
      "Epoch 754/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1085 - acc: 0.9663\n",
      "Epoch 755/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1098 - acc: 0.9602\n",
      "Epoch 756/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0917 - acc: 0.9632\n",
      "Epoch 757/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1028 - acc: 0.9694\n",
      "Epoch 758/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0833 - acc: 0.9709\n",
      "Epoch 759/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0808 - acc: 0.9755\n",
      "Epoch 760/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0681 - acc: 0.9740\n",
      "Epoch 761/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0691 - acc: 0.9740\n",
      "Epoch 762/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0776 - acc: 0.9724\n",
      "Epoch 763/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0690 - acc: 0.9770\n",
      "Epoch 764/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0627 - acc: 0.9801\n",
      "Epoch 765/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0694 - acc: 0.9740\n",
      "Epoch 766/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.0607 - acc: 0.9740\n",
      "Epoch 767/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0637 - acc: 0.9786\n",
      "Epoch 768/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0754 - acc: 0.9770\n",
      "Epoch 769/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0667 - acc: 0.9740\n",
      "Epoch 770/1000\n",
      "653/653 [==============================] - 0s 141us/step - loss: 0.0701 - acc: 0.9740\n",
      "Epoch 771/1000\n",
      "653/653 [==============================] - 0s 148us/step - loss: 0.1014 - acc: 0.9587\n",
      "Epoch 772/1000\n",
      "653/653 [==============================] - 0s 139us/step - loss: 0.1148 - acc: 0.9541\n",
      "Epoch 773/1000\n",
      "653/653 [==============================] - 0s 139us/step - loss: 0.1410 - acc: 0.9479\n",
      "Epoch 774/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1487 - acc: 0.9418\n",
      "Epoch 775/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1413 - acc: 0.9495\n",
      "Epoch 776/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0884 - acc: 0.9678\n",
      "Epoch 777/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.2898 - acc: 0.9326\n",
      "Epoch 778/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.5313 - acc: 0.9005\n",
      "Epoch 779/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.2587 - acc: 0.9005\n",
      "Epoch 780/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.1840 - acc: 0.9326\n",
      "Epoch 781/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.2042 - acc: 0.9372\n",
      "Epoch 782/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1947 - acc: 0.9449\n",
      "Epoch 783/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.1643 - acc: 0.9617\n",
      "Epoch 784/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1588 - acc: 0.9571\n",
      "Epoch 785/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1631 - acc: 0.9525\n",
      "Epoch 786/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.1120 - acc: 0.9556\n",
      "Epoch 787/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1060 - acc: 0.9571\n",
      "Epoch 788/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1289 - acc: 0.9571\n",
      "Epoch 789/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1211 - acc: 0.9571\n",
      "Epoch 790/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0919 - acc: 0.9648\n",
      "Epoch 791/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1080 - acc: 0.9694\n",
      "Epoch 792/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0824 - acc: 0.9617\n",
      "Epoch 793/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.0849 - acc: 0.9709\n",
      "Epoch 794/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0858 - acc: 0.9663\n",
      "Epoch 795/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0787 - acc: 0.9694\n",
      "Epoch 796/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1296 - acc: 0.9648\n",
      "Epoch 797/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1827 - acc: 0.9617\n",
      "Epoch 798/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.0909 - acc: 0.9678\n",
      "Epoch 799/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0733 - acc: 0.9740\n",
      "Epoch 800/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0677 - acc: 0.9786\n",
      "Epoch 801/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0746 - acc: 0.9724\n",
      "Epoch 802/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0689 - acc: 0.9755\n",
      "Epoch 803/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0696 - acc: 0.9740\n",
      "Epoch 804/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0592 - acc: 0.9786\n",
      "Epoch 805/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0654 - acc: 0.9709\n",
      "Epoch 806/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0609 - acc: 0.9755\n",
      "Epoch 807/1000\n",
      "653/653 [==============================] - 0s 136us/step - loss: 0.0642 - acc: 0.9740\n",
      "Epoch 808/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.0564 - acc: 0.9801\n",
      "Epoch 809/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0620 - acc: 0.9755\n",
      "Epoch 810/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0597 - acc: 0.9724\n",
      "Epoch 811/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0602 - acc: 0.9770\n",
      "Epoch 812/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "653/653 [==============================] - 0s 111us/step - loss: 0.0671 - acc: 0.9724\n",
      "Epoch 813/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0584 - acc: 0.9755\n",
      "Epoch 814/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.0698 - acc: 0.9740\n",
      "Epoch 815/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0743 - acc: 0.9648\n",
      "Epoch 816/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.0579 - acc: 0.9770\n",
      "Epoch 817/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0663 - acc: 0.9678\n",
      "Epoch 818/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0808 - acc: 0.9709\n",
      "Epoch 819/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0743 - acc: 0.9648\n",
      "Epoch 820/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0890 - acc: 0.9632\n",
      "Epoch 821/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.0548 - acc: 0.9770\n",
      "Epoch 822/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0574 - acc: 0.9694\n",
      "Epoch 823/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0487 - acc: 0.9816\n",
      "Epoch 824/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0530 - acc: 0.9770\n",
      "Epoch 825/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.0476 - acc: 0.9816\n",
      "Epoch 826/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0458 - acc: 0.9832\n",
      "Epoch 827/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0485 - acc: 0.9786\n",
      "Epoch 828/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.0420 - acc: 0.9786\n",
      "Epoch 829/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0660 - acc: 0.9724\n",
      "Epoch 830/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0719 - acc: 0.9724\n",
      "Epoch 831/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0610 - acc: 0.9786\n",
      "Epoch 832/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0623 - acc: 0.9755\n",
      "Epoch 833/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0630 - acc: 0.9740\n",
      "Epoch 834/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.0468 - acc: 0.9770\n",
      "Epoch 835/1000\n",
      "653/653 [==============================] - 0s 127us/step - loss: 0.0476 - acc: 0.9786\n",
      "Epoch 836/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0515 - acc: 0.9786\n",
      "Epoch 837/1000\n",
      "653/653 [==============================] - 0s 112us/step - loss: 0.0532 - acc: 0.9786\n",
      "Epoch 838/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0456 - acc: 0.9832\n",
      "Epoch 839/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0743 - acc: 0.9724\n",
      "Epoch 840/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0540 - acc: 0.9755\n",
      "Epoch 841/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0473 - acc: 0.9786\n",
      "Epoch 842/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0436 - acc: 0.9816\n",
      "Epoch 843/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0417 - acc: 0.9801\n",
      "Epoch 844/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0441 - acc: 0.9786\n",
      "Epoch 845/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0426 - acc: 0.9801\n",
      "Epoch 846/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0541 - acc: 0.9786\n",
      "Epoch 847/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0557 - acc: 0.9801\n",
      "Epoch 848/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.0529 - acc: 0.9740\n",
      "Epoch 849/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.0644 - acc: 0.9740\n",
      "Epoch 850/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0687 - acc: 0.9770\n",
      "Epoch 851/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0749 - acc: 0.9678\n",
      "Epoch 852/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.1417 - acc: 0.9648\n",
      "Epoch 853/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.2128 - acc: 0.9479\n",
      "Epoch 854/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.1052 - acc: 0.9602\n",
      "Epoch 855/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.0833 - acc: 0.9617\n",
      "Epoch 856/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0938 - acc: 0.9648\n",
      "Epoch 857/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0766 - acc: 0.9724\n",
      "Epoch 858/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0933 - acc: 0.9724\n",
      "Epoch 859/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.1509 - acc: 0.9617\n",
      "Epoch 860/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.0885 - acc: 0.9617\n",
      "Epoch 861/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1489 - acc: 0.9510\n",
      "Epoch 862/1000\n",
      "653/653 [==============================] - 0s 136us/step - loss: 0.1236 - acc: 0.9571\n",
      "Epoch 863/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.1155 - acc: 0.9694\n",
      "Epoch 864/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.1146 - acc: 0.9663\n",
      "Epoch 865/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0712 - acc: 0.9740\n",
      "Epoch 866/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0559 - acc: 0.9755\n",
      "Epoch 867/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0554 - acc: 0.9770\n",
      "Epoch 868/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0620 - acc: 0.9755\n",
      "Epoch 869/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0475 - acc: 0.9801\n",
      "Epoch 870/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0543 - acc: 0.9724\n",
      "Epoch 871/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0508 - acc: 0.9801\n",
      "Epoch 872/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0504 - acc: 0.9786\n",
      "Epoch 873/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0563 - acc: 0.9755\n",
      "Epoch 874/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0499 - acc: 0.9755\n",
      "Epoch 875/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0465 - acc: 0.9847\n",
      "Epoch 876/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.0434 - acc: 0.9801\n",
      "Epoch 877/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.0642 - acc: 0.9832\n",
      "Epoch 878/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.0600 - acc: 0.9770\n",
      "Epoch 879/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.0474 - acc: 0.9801\n",
      "Epoch 880/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0512 - acc: 0.9801\n",
      "Epoch 881/1000\n",
      "653/653 [==============================] - 0s 147us/step - loss: 0.0774 - acc: 0.9740\n",
      "Epoch 882/1000\n",
      "653/653 [==============================] - 0s 139us/step - loss: 0.0927 - acc: 0.9740\n",
      "Epoch 883/1000\n",
      "653/653 [==============================] - 0s 144us/step - loss: 0.0571 - acc: 0.9801\n",
      "Epoch 884/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0413 - acc: 0.9832\n",
      "Epoch 885/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0371 - acc: 0.9832\n",
      "Epoch 886/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.0699 - acc: 0.9663\n",
      "Epoch 887/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.1158 - acc: 0.9571\n",
      "Epoch 888/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1012 - acc: 0.9648\n",
      "Epoch 889/1000\n",
      "653/653 [==============================] - 0s 136us/step - loss: 0.0709 - acc: 0.9648\n",
      "Epoch 890/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.1668 - acc: 0.9556\n",
      "Epoch 891/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.3652 - acc: 0.9035\n",
      "Epoch 892/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.2094 - acc: 0.9357\n",
      "Epoch 893/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "653/653 [==============================] - 0s 111us/step - loss: 0.2089 - acc: 0.9372\n",
      "Epoch 894/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.1694 - acc: 0.9495\n",
      "Epoch 895/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1360 - acc: 0.9495\n",
      "Epoch 896/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.1306 - acc: 0.9525\n",
      "Epoch 897/1000\n",
      "653/653 [==============================] - 0s 104us/step - loss: 0.1104 - acc: 0.9541\n",
      "Epoch 898/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0840 - acc: 0.9709\n",
      "Epoch 899/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1262 - acc: 0.9525\n",
      "Epoch 900/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.1015 - acc: 0.9632\n",
      "Epoch 901/1000\n",
      "653/653 [==============================] - 0s 114us/step - loss: 0.0905 - acc: 0.9694\n",
      "Epoch 902/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.0814 - acc: 0.9709\n",
      "Epoch 903/1000\n",
      "653/653 [==============================] - 0s 136us/step - loss: 0.0686 - acc: 0.9740\n",
      "Epoch 904/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.0627 - acc: 0.9801\n",
      "Epoch 905/1000\n",
      "653/653 [==============================] - 0s 112us/step - loss: 0.0522 - acc: 0.9801\n",
      "Epoch 906/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0708 - acc: 0.9724\n",
      "Epoch 907/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0676 - acc: 0.9709\n",
      "Epoch 908/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0751 - acc: 0.9724\n",
      "Epoch 909/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0588 - acc: 0.9770\n",
      "Epoch 910/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0501 - acc: 0.9832\n",
      "Epoch 911/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0522 - acc: 0.9786\n",
      "Epoch 912/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0462 - acc: 0.9847\n",
      "Epoch 913/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0483 - acc: 0.9832\n",
      "Epoch 914/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0443 - acc: 0.9801\n",
      "Epoch 915/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.0471 - acc: 0.9801\n",
      "Epoch 916/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0676 - acc: 0.9724\n",
      "Epoch 917/1000\n",
      "653/653 [==============================] - 0s 144us/step - loss: 0.0574 - acc: 0.9786\n",
      "Epoch 918/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0508 - acc: 0.9801\n",
      "Epoch 919/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0433 - acc: 0.9801\n",
      "Epoch 920/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0407 - acc: 0.9832\n",
      "Epoch 921/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0558 - acc: 0.9755\n",
      "Epoch 922/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.1363 - acc: 0.9678\n",
      "Epoch 923/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0974 - acc: 0.9678\n",
      "Epoch 924/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0618 - acc: 0.9755\n",
      "Epoch 925/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0460 - acc: 0.9816\n",
      "Epoch 926/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0445 - acc: 0.9786\n",
      "Epoch 927/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.0418 - acc: 0.9801\n",
      "Epoch 928/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0379 - acc: 0.9832\n",
      "Epoch 929/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0379 - acc: 0.9847\n",
      "Epoch 930/1000\n",
      "653/653 [==============================] - 0s 139us/step - loss: 0.0370 - acc: 0.9832\n",
      "Epoch 931/1000\n",
      "653/653 [==============================] - 0s 136us/step - loss: 0.0388 - acc: 0.9801\n",
      "Epoch 932/1000\n",
      "653/653 [==============================] - 0s 108us/step - loss: 0.0371 - acc: 0.9786\n",
      "Epoch 933/1000\n",
      "653/653 [==============================] - 0s 105us/step - loss: 0.0335 - acc: 0.9816\n",
      "Epoch 934/1000\n",
      "653/653 [==============================] - 0s 107us/step - loss: 0.0336 - acc: 0.9847\n",
      "Epoch 935/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0295 - acc: 0.9862\n",
      "Epoch 936/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.1011 - acc: 0.9847\n",
      "Epoch 937/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1196 - acc: 0.9724\n",
      "Epoch 938/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1601 - acc: 0.9602\n",
      "Epoch 939/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0966 - acc: 0.9663\n",
      "Epoch 940/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0705 - acc: 0.9648\n",
      "Epoch 941/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0596 - acc: 0.9724\n",
      "Epoch 942/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0641 - acc: 0.9724\n",
      "Epoch 943/1000\n",
      "653/653 [==============================] - 0s 134us/step - loss: 0.0554 - acc: 0.9770\n",
      "Epoch 944/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0806 - acc: 0.9694\n",
      "Epoch 945/1000\n",
      "653/653 [==============================] - 0s 137us/step - loss: 0.0781 - acc: 0.9678\n",
      "Epoch 946/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0818 - acc: 0.9648\n",
      "Epoch 947/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1147 - acc: 0.9479\n",
      "Epoch 948/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.1438 - acc: 0.9510\n",
      "Epoch 949/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0897 - acc: 0.9709\n",
      "Epoch 950/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0845 - acc: 0.9678\n",
      "Epoch 951/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0661 - acc: 0.9709\n",
      "Epoch 952/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0567 - acc: 0.9786\n",
      "Epoch 953/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0603 - acc: 0.9801\n",
      "Epoch 954/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0462 - acc: 0.9832\n",
      "Epoch 955/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.0509 - acc: 0.9832\n",
      "Epoch 956/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.1285 - acc: 0.9724\n",
      "Epoch 957/1000\n",
      "653/653 [==============================] - 0s 125us/step - loss: 0.3901 - acc: 0.9158\n",
      "Epoch 958/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.1762 - acc: 0.9418\n",
      "Epoch 959/1000\n",
      "653/653 [==============================] - 0s 131us/step - loss: 0.1466 - acc: 0.9541\n",
      "Epoch 960/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1189 - acc: 0.9632\n",
      "Epoch 961/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.0942 - acc: 0.9694\n",
      "Epoch 962/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0776 - acc: 0.9786\n",
      "Epoch 963/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0741 - acc: 0.9755\n",
      "Epoch 964/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0768 - acc: 0.9740\n",
      "Epoch 965/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0631 - acc: 0.9786\n",
      "Epoch 966/1000\n",
      "653/653 [==============================] - 0s 119us/step - loss: 0.0647 - acc: 0.9694\n",
      "Epoch 967/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0624 - acc: 0.9724\n",
      "Epoch 968/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0730 - acc: 0.9755\n",
      "Epoch 969/1000\n",
      "653/653 [==============================] - 0s 124us/step - loss: 0.0693 - acc: 0.9755\n",
      "Epoch 970/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0535 - acc: 0.9786\n",
      "Epoch 971/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0754 - acc: 0.9724\n",
      "Epoch 972/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0713 - acc: 0.9755\n",
      "Epoch 973/1000\n",
      "653/653 [==============================] - 0s 141us/step - loss: 0.0572 - acc: 0.9816\n",
      "Epoch 974/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "653/653 [==============================] - 0s 115us/step - loss: 0.0540 - acc: 0.9801\n",
      "Epoch 975/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0543 - acc: 0.9816\n",
      "Epoch 976/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0752 - acc: 0.9801\n",
      "Epoch 977/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0926 - acc: 0.9709\n",
      "Epoch 978/1000\n",
      "653/653 [==============================] - 0s 115us/step - loss: 0.0603 - acc: 0.9755\n",
      "Epoch 979/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0497 - acc: 0.9786\n",
      "Epoch 980/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0426 - acc: 0.9832\n",
      "Epoch 981/1000\n",
      "653/653 [==============================] - 0s 118us/step - loss: 0.0519 - acc: 0.9847\n",
      "Epoch 982/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.0545 - acc: 0.9816\n",
      "Epoch 983/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0506 - acc: 0.9832\n",
      "Epoch 984/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0492 - acc: 0.9801\n",
      "Epoch 985/1000\n",
      "653/653 [==============================] - 0s 116us/step - loss: 0.0463 - acc: 0.9847\n",
      "Epoch 986/1000\n",
      "653/653 [==============================] - 0s 121us/step - loss: 0.0461 - acc: 0.9832\n",
      "Epoch 987/1000\n",
      "653/653 [==============================] - 0s 133us/step - loss: 0.0417 - acc: 0.9832\n",
      "Epoch 988/1000\n",
      "653/653 [==============================] - 0s 122us/step - loss: 0.0420 - acc: 0.9877\n",
      "Epoch 989/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0486 - acc: 0.9832\n",
      "Epoch 990/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0509 - acc: 0.9832\n",
      "Epoch 991/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0631 - acc: 0.9832\n",
      "Epoch 992/1000\n",
      "653/653 [==============================] - 0s 113us/step - loss: 0.0432 - acc: 0.9847\n",
      "Epoch 993/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.0540 - acc: 0.9816\n",
      "Epoch 994/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0741 - acc: 0.9755\n",
      "Epoch 995/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.0699 - acc: 0.9709\n",
      "Epoch 996/1000\n",
      "653/653 [==============================] - 0s 110us/step - loss: 0.0606 - acc: 0.9755\n",
      "Epoch 997/1000\n",
      "653/653 [==============================] - 0s 114us/step - loss: 0.0584 - acc: 0.9755\n",
      "Epoch 998/1000\n",
      "653/653 [==============================] - 0s 112us/step - loss: 0.0840 - acc: 0.9663\n",
      "Epoch 999/1000\n",
      "653/653 [==============================] - 0s 111us/step - loss: 0.1658 - acc: 0.9449\n",
      "Epoch 1000/1000\n",
      "653/653 [==============================] - 0s 128us/step - loss: 0.1449 - acc: 0.9556\n"
     ]
    }
   ],
   "source": [
    "history = model.fit(x,y,epochs=1000)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 71,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[<matplotlib.lines.Line2D at 0x22e9627d5f8>]"
      ]
     },
     "execution_count": 71,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAD8CAYAAACMwORRAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXeYFUXW/7+HJCAiKCMSRFAx4IqBETGzRhDMec2irIq7\npjWhi+/+TLvrgu8qKqAYMOBiQl4EFVcFUQEHhCEjQWFGlCQDw8DE8/vj3J7um+b2nbkzd27P9/M8\n/XR1dXV1Vd/b3z59qrpKVBWEEEKCRaN0F4AQQkjqobgTQkgAobgTQkgAobgTQkgAobgTQkgAobgT\nQkgAobgTQkgAobgTQkgAobgTQkgAaZKuE7dr1067du2artMTQkhGMnfu3E2qmpUoXdrEvWvXrsjJ\nyUnX6QkhJCMRkZ/8pKNbhhBCAgjFnRBCAgjFnRBCAgjFnRBCAgjFnRBCAgjFnRBCAgjFnRBCAkjm\nifuiRcCwYcCGDekuCSGE1FsyT9yXLAEefRTYuDHdJSGEkHpL5om7iK05sTchhMSF4k4IIQEkc8Wd\nEEJIXDJP3B1ouRNCSFwyT9zpliGEkIRQ3AkhJIBQ3AkhJIBQ3AkhJIBQ3AkhJIBQ3AkhJIBkrrgT\nQgiJS0JxF5HmIjJHRBaIyGIR+VuMNH1FpEBE5oeWYbVTXA+03AkhJC5NfKQpBnCaqhaKSFMAM0Vk\nqqrOikj3laoOTH0RI6BbhhBCEpJQ3FVVARSGNpuGlvQpK8WdEEIS4svnLiKNRWQ+gA0Apqnq7BjJ\nThCRXBGZKiKHp7SU4YWxNcWdEELi4kvcVbVcVY8C0BlAbxH5XUSSeQC6qGpPAM8CmBgrHxEZLCI5\nIpKzsbrjsVPcCSEkIUn1llHVrQC+ANAvIn6bqhaGwlMANBWRdjGOH6Oq2aqanZWVVb0SU9wJISQh\nfnrLZIlIm1C4BYAzASyLSLOviKmuiPQO5bs59cUFxZ0QQnzgp7dMBwCviUhjmGhPUNXJInILAKjq\nKACXALhVRMoA7ARwRaghNvVQ3AkhJCF+esvkAjg6RvwoT3gkgJGpLVoc+BETIYQkJPO+UHWg5U4I\nIXHJPHGnW4YQQhJCcSeEkABCcSeEkABCcSeEkABCcSeEkABCcSeEkACSueJOCCEkLpkn7g603Akh\nJC6ZJ+50yxBCSEIo7oQQEkAo7oQQEkAo7oQQEkAo7oQQEkAo7oQQEkAyV9wJIYTEJfPE3YGWOyGE\nxMXPHKrNRWSOiCwQkcUi8rcYaUREnhGRlSKSKyLH1E5xQbcMIYT4wM8cqsUATlPVQhFpCmCmiExV\n1VmeNP0BdA8txwF4IbROPRR3QghJSELLXY3C0GbT0BKprOcDGBdKOwtAGxHpkNqihqC4E0JIQnz5\n3EWksYjMB7ABwDRVnR2RpBOAdZ7tvFBc6qG4E0JIQnyJu6qWq+pRADoD6C0iv6vOyURksIjkiEjO\nxo0bq5MFxZ0QQnyQVG8ZVd0K4AsA/SJ25QPYz7PdORQXefwYVc1W1eysrKxky2pQ3AkhJCF+estk\niUibULgFgDMBLItINgnAtaFeM30AFKjq+pSX1gpRK9kSQkiQ8NNbpgOA10SkMexhMEFVJ4vILQCg\nqqMATAFwDoCVAIoA3FBL5XWh5U4IIXFJKO6qmgvg6BjxozxhBTAktUWLA90yhBCSkMz7QpXiTggh\nCaG4E0JIAKG4E0JIAKG4E0JIAKG4E0JIAKG4E0JIAMlccSeEEBKXzBN3B1ruhBASl8wTd7plCCEk\nIRR3QggJIBR3QggJIBR3QggJIBR3QggJIBR3QggJIJkr7oQQQuKSeeLuQMudEELiknniTrcMIYQk\nhOJOCCEBxM8E2fuJyBciskREFovIHTHS9BWRAhGZH1qG1U5xQXEnhBAf+JkguwzAPao6T0T2ADBX\nRKap6pKIdF+p6sDUFzECijshhCQkoeWuqutVdV4ovB3AUgCdartgcaG4E0JIQpLyuYtIVwBHA5gd\nY/cJIpIrIlNF5PA4xw8WkRwRydm4cWPShQ1lYmuKOyGExMW3uItIKwDvAbhTVbdF7J4HoIuq9gTw\nLICJsfJQ1TGqmq2q2VlZWdUrMfu5E0JIQnyJu4g0hQn7m6r6fuR+Vd2mqoWh8BQATUWkXUpLGn3S\nWs2eEEIyGT+9ZQTAWABLVXVEnDT7htJBRHqH8t2cyoJ6TmZrijshhMTFT2+ZEwFcA2ChiMwPxQ0F\n0AUAVHUUgEsA3CoiZQB2ArhCtZbUl+JOCCEJSSjuqjoTQJWOblUdCWBkqgpVJRR3QghJCL9QJYSQ\nAJK54v7bb8B336W3LIQQUk/JXHEfNgzo3ZsWPCGExCBzxd2hoiI95SCEkHpM5ol7JOXl6S4BIYTU\nOzJP3CMtd4o7IYREQXEnhJAAQnEnhJAAQnEnhJAAkvniXlaWnnIQQkg9JvPFnZY7IYREQXEnhJAA\nQnEnhJAAknniHgnFnRBCosg8caflTgghCck8cW8UUWSKOyGERJH54s6ukIQQEoWfOVT3E5EvRGSJ\niCwWkTtipBEReUZEVopIrogcUzvFBd0yhBDiAz9zqJYBuEdV54nIHgDmisg0VV3iSdMfQPfQchyA\nF0Lr1EO3DCGEJCSh5a6q61V1Xii8HcBSAJ0ikp0PYJwaswC0EZEOKS8tQMudEEJ8kJTPXUS6Ajga\nwOyIXZ0ArPNs5yH6AQARGSwiOSKSs3HjxuRK6mYSvk1xJ4SQKHyLu4i0AvAegDtVdVt1TqaqY1Q1\nW1Wzs7KyqpNFNBR3QgiJwpe4i0hTmLC/qarvx0iSD2A/z3bnUFztw94yhBAShZ/eMgJgLIClqjoi\nTrJJAK4N9ZrpA6BAVdensJzxoeVOCCFR+OktcyKAawAsFJH5obihALoAgKqOAjAFwDkAVgIoAnBD\n6osahwULgGbNgJNOqrNTEkJIfSehuKvqTACSII0CGJKqQiXF3Xc7hUjL6QkhpD6SeV+oEkIISQjF\nnRBCAgjFnRBCAkhwxL2iIt0lIISQekNwxH3nznSXgBBC6g3BEfcdO9JdAkIIqTdQ3AkhJIAER9yL\nitJdAkIIqTdktrgfdJAbpuVOCCGVZLa4N2vmhinuhBBSSWaLe+PGbphuGUIIqSSzxb2JZ2gcWu6E\nEFIJxZ0QQgJIZou71y1DcSeEkEoyW9xLS2OHCSGkgZPZ4j53rhvmjEyEEFJJZou7Fw4cRgghlfiZ\nQ/VlEdkgIovi7O8rIgUiMj+0DEt9MX1Ay50QQirxM4fqqwBGAhhXRZqvVHVgSkpUXSjuhBBSSULL\nXVVnANhSB2WpGXTLEEJIJanyuZ8gIrkiMlVEDk9RnslBy50QQirx45ZJxDwAXVS1UETOATARQPdY\nCUVkMIDBANClS5cUnNoDxZ0QQiqpseWuqttUtTAUngKgqYi0i5N2jKpmq2p2VlZWTU8dDsWdEEIq\nqbG4i8i+IiKhcO9Qnptrmm/S0OdOCCGVJHTLiMh4AH0BtBORPACPAGgKAKo6CsAlAG4VkTIAOwFc\noapaayWOBy13QgipJKG4q+qVCfaPhHWVTC8Ud0IIqSSzv1Bt53HtU9wJIaSSzBb3775zw/S5E0JI\nJZkt7m3bumFa7oQQUklmi7t3sg6KOyGEVJLZ4t60qRumuBNCSCWp+EK17tm0ycTca7kn43PfvBnY\ne+/Ul4sQQuoJmWm57703sM8+QCNP8X/7DbjqKmBLgjHOvv/eetm8/nrtlpEQQtJIZoq7lxdftPWH\nHwJvvQUMH151+oULbT1tWu2WixBC0kjmi/tNNwEHH+xu20gIiUnDR7SEEFJXZL64A8CKFekuASGE\n1CuCIe5eElnufi17QgjJYIIn7n6hW4YQEmCCJ+603AkhJIDi7hda7oSQANPwxJ2WOyGkARA8cWdX\nSEIICaC4J4KWOyGkAZBQ3EXkZRHZICKL4uwXEXlGRFaKSK6IHJP6YiYBLXdCCPFlub8KoF8V+/sD\n6B5aBgN4oebFShLv6JCEEEISi7uqzgBQ1Whc5wMYp8YsAG1EpEOqCuiLZ591w/Es94oK2zdsWN2U\niRBC0kgqfO6dAKzzbOeF4uqOP/4xcZqyMluvWlW7ZSH+yM8HHnqI0yOSeo9qzf6m5eVAUVHqyuOX\nOh3PXUQGw1w36NKlS12eOnoyD/rc08u11wKffw6cey7Qp09Ksly+HNhzT2DlSqBzZ2D+fKBZM+Cc\nc8LT/fabDemfnw8ccgjQvj2Qlwfst5/tX7MG+OEHy2/wYHvZu/56S+uMMv3LL5buuONslOklS4BT\nTgHWr7e8cnKAFi3sb7dxo/3d1q4FTjoJmD0buPFGICsLePxx4He/s+3mzYGSEqC4GNhjD2DZMhvd\nukkTYNIkoHFjYMMGIDcX6NgR+OtfgcWLgZkzgcJC4J13gN13Bw491Mqx++7Aq68CrVrZ8EtXXAFs\n3w6sWweccYbVpU0bq/+ddwJvvGG2z8qVdt3KyuxaAcD++wM33wz072/n+uor4Mwzw6dUWL0a+Ogj\nYNEioKDA6nXnnUDLluGjc5eVWd323RcYMwaYNcuudZcu9ruVl9u1HDwY2Gsv4NhjgU6dTCCnTrVw\n585Ahw7AgAHA2WcD8+YBvXsDb74JvPwy8O23VoeSEuD0063OTz4JXHSRleHHH21aiKeesv/Miy/a\nubZvB3r0sLIVFlqaLVtsRs8xY4A5c+w6TJhgo4Z37w7ccAPw/PNWvlat7L+wYQNQWmrXr6zMftuD\nDrL0//iHrWsdVU24AOgKYFGcfaMBXOnZXg6gQ6I8e/XqpSnF7h/Vv/3NjfvkE9VXX7VwYaGbBlC9\n/PLUnr8Bs2OH6s6dVaeZPFn13HNVV61S3bVLVU880X6Hr75SVft5ZsxQ/fBD1bffVr3+etUJE+zn\ne/pp1bZtVdu3VxVRHThQ9fPPVYcMUb3oItU//EG1b9/wn9e7XHmlal6e6qefqh56aPx0NV1uu636\nxz7+uOrq1arNm9v2GWe4+/bfP/YxLVpEx4mEb7/2murHH6v27x8e37hx/LJ07KjapYstgGrr1u6+\nggLVs8+28M03q/78s2ppqeqxx8bO65hjbP3FF6o33WTXyMk32aVZM1tnZal27ly9PO65RzUnJ/a+\nDh3smrZsadvt2qmefrrqkUdW71xZWXatBg6069+jh7tv0aLq328AclR96LavRFWL+wAAUwEIgD4A\n5vjJs07E3YlTtX+l98pnmLhv2qS6dWtyx6xbp/rbb/7Tz52resopqk8+aSL85puqX35ponvddaqH\nHKLatavqpZeqHnyw6l57hV/SmTNVL7jAxOC88+xmifWn79ZNVU8+2TamT9exY6t388RabrxRdZ99\n7EHSsWP8dOeeawLas2fqzg2onnaa6mOPqc6erfrMM6rPP686bpw9iGLV8+mnbT1okOpVV8XP95ln\nVL/+2sT0//4vdpr331f95RfV/PzY57r/ftXiYtWfflKtqLD8vPsfe8zivezcqVpe7j64Tj01fhlb\nt1b9/nvVDRvsdrvlFndfLEE/80zVp56y8i5apLpli123QYNUL7xQ9a9/tfKsX2/5lZXZfVBaamXL\nz3fzatvW1scfb/sLCtw6bNum+s47scucl6c6Z44dU1Zm99iSJaoTJ4Zfi3vusf/90KHusRMmuOEF\nC+wBtmmTGSo//mjryGvpXMebb/Z/X0aSMnEHMB7AegClMH/6IAC3ALgltF8APAdgFYCFALL9nLjW\nxH3YsOg4VVM576962WWpPX8N2LTJbjovpaVmxY4ZY+LqFHvECNW777b0EyeqPvGEu+8vf1H94x9t\n3aSJxTVrZs8xQPWFF1Q3b1bt18/+fKpmzQKqRx+dOoHzYx0XnWim6eq3vg2Lv+YaWw8ZovrBB3az\nPvqoWerFxSZsImapjx6tlUL9ww+xr23kTf3RR6qLF7v733zT4h9/XPX11028du5ULSpS/ewzu54F\nBarTpqlOnWqWWKtWqvPnq55/friwAPYWUxVbtqguX+6m/+47u14XX6y6774Wd8MNqg8/bA/aiy92\nXz69OMevW6e6dq0JaiSDBoXXfd268P0VFfZG5YhwvGuoavu8eQ0bFv2b7toVfkxpqf1Xnf09eqiu\nWVP19UmWnBxbKipMYCMfTl7eeMMty8yZyRk+DhUVJup5efZA8kqMH7ZutQeE9+GTLCm13GtjqTVx\nHzo0Os555NeSuO/aZX+U8nLb/u03O11+vuq996oecIBZiXPmmFgA7mvxJZfYum/f8FfxVC3e1++r\nr1YdPtzCgwfbzbfHHu7+bt3M+jvssOh8PvhAdeFCd/urr8z4Puwwu5TetBUV4eIlonrOOXadRo2y\n+PzjL1YF9MZ++QqYgM+Ykdx1r6hQXbGi6ht6zhxXgD/5JHp/WZnqe+/ZOlm+/tqu77p1yd/kS5aY\n9VZSorr77u7xL77o73jvXzseRUWq33zj5h/vOpWXqy5bVvX5vF7Nt9+2uDlz7C0JsLeOeDhpnOPS\nxXvvaaUxkAocZ8Ajj6QmP780PHF3FOn++71XwZbNm8208SrQpZf6ynbdOtXt2y1cXm7WckmJicpz\nz9nr4+GHu9lmZak2bZp6kQbM5RArvn178ytHxs+Zo5qba5ZZkyZmCXbvHp3uiSdU77rLLB9Vez0H\nzCXz0UfuQ0vVLP7c3PBrtGaNm9ef/mRxjt+yvNwWR1j+8x+Ln519m65DJwVUzzor+Z/bL+Xl5nfe\ntq32zqFq1mN+fvWO9f4WoSaIhMybZ291fli92n7LmuL81yIpLIx+8/Ty9NPmIsvLq3kZasL27WZM\nOW+tqWDbtqqNi9qg4Yl7aalVxzFFt25175iVK81xV4W4FxSo9uljN0x+vv1oZWVu8vnzzc+crCCf\nemq473HEiKrT33uvrefNszeAefNMpL/80oTqv/910956a/gleP1183nHcg04rplYS+Srcnm56tKl\nyf8ExcXug+DHH1W//TY6jeMG8i7jxkWna0h4XWtr16a7NPHJzbU3UpJeGp64q4Y363sdm/Pnq+bl\naYVHUXZdeEWlRa5qDSaxLNrqWNitW5t177UWnX0VFdbo5WwfeKD7uti0qb9qFhWZzzAZV4JTv1at\nVH/91fzyAwZYHevS8lixIvxaddt3R51bPvWRG/GSXoa3eS1IQvyKe532c691WrcGtm2zcOPGAIAy\nNMaY1/fAhvJWeA4bMAGXYSGOwIQZt+HrPayfcXk5MHx4dHZDhwLt2ll/2yeesLisLOtX27gx8Npr\nbtqZM61v8pFHAj172ogI3lERvv3W+taKWL/in36yfrQ9etgHEpdfbufxQ4sWwIknJndpnH7G2dnA\nPvtY/9500L279Ree+NAcTNnYG+Pu/B4iSVYmgIzFTRaQy9NbEBIc/DwBamOpDcu9oOdJ+i/crZfh\nbT0eX2s25ugDeMK3xT1ihBn8r7/u+oz/9S/L2+l+5cXpQeBtUHn/fbOM6xsffqiV7p76QHG/8zQH\nx9hrC3H/hIQkAA3Rch+29S78GxeFxeXgWF/HvvQSMGiQhQ8+2L7gmzDBHdmgSYwrddBBdkd6ufDC\nZEtdN5x3HrBjh30tmHbmzEGzjyehF2Cf8RFCUk4gxnNXBT75BFheekDM/Y2kArdfvTXmcbNmmb44\nwu6w777An/8M7LZbbZQ4DUydipa7i/mG0s0rr7jhkpL0laO+EGkhEJICMlrcN2ywsSLatwf69QM+\nXn8UsrABV+KtyjQHYBVm3jsJzz78K/4PA/EM/gQAuKTTNwBsbJBYVnngePNNW8+Ykd5yADbohwMt\nd2DnznSXgASQjBb3v/8d+OADG5jJ4T78E2/hKkxFPyxGD6zCQTi+/WqgvBwD8RHawkZCaiplaSp1\nmthjD1tv357ecgDh4k7L3VrWCUkxGW2zlkXoc+dWv+HSwncAAP3wibtjx47KUSHbYRMAoHur9XVS\nxnpDfRJ3r6+L4p6e8WBJ4MlYy/3qq8Pn6HjpJWDdkH9gf6yNTrxjR+WAzGfjE3yAC/Dwwe/UUUnr\nCfVJ3L19ROmWobiTWiEjxX36dNeFDFgf7kGDEL/102O5C4AL8CGaNiqPnTaotGpl6/rgAqBbJpwd\nO9JdAhJAMlLc+/a19V/+Ylq1xZkEsCpxj5xKpaFZjE6rcaQvKx2wQTUcr+XOmalIisg4n7u319h5\n59lsM5XEE/fCwuiZmJwvWRsKzoWrb+JOyz1c3IuL7RNkQmpIxlnuS5e64SOPjNjpFQ0vP/wQPZVb\nQxN3R9Trg7i/+qobpriHizu7RZIUkXHiPneurVessKFkwogn7vPnR8c1NHF33lxeftmsw3Qyb54b\nplsm3OdeUJC+cpBAkXHifs01NgHxQQfF2JmMYNf1TfTf/wKjR9ftOb143VJrY/QoShe03MMtd4o7\nSREZJ+6AzX4uEmNHMp/WOw+CTZuAE06wqdu9/PCDvR6kijPOAG65JXX5JYvXHVOfPsktKopuD2lo\neMV9a/QwGYRUB1/iLiL9RGS5iKwUkQdi7O8rIgUiMj+0DEt9UX1w5502Jq+X006LnbaszKzG0aNt\nPN5hEUU++GDgkENqp5zpwCugJSXAtGnAW2/FT19beFvEO3Uy//vpp9d9OeoTQRf30lJ290wDCcVd\nRBrDJsDuD6AHgCtFpEeMpF+p6lGh5f+luJz+6N7dBkp3KC8H/vSn8DTHHeeGt24FHn7Ywt7jqosq\n8OCDwMKFNc8r1Xgt9+Ji4KyzgKuuAjZvrttyeB8yzZvbevr0ui1DfcMrfEFsCzrtNPc7C1Jn+LHc\newNYqaqrVbUEwNsAzq/dYtUApxtZy5b2dZPzZabD00+7s2x4raR4vRSS6V2ydasNeHPmmf6PqSu8\nouptUG3Xrm6/kPT62OM1gDc0gt5bZubMdJegQeJH3DsBWOfZzgvFRXKCiOSKyFQROTxWRiIyWERy\nRCRno3e0r1Tz00+uDz1S3Js0Afbe28KVXz8hfsPer7/aOjc3cc8OJ4/60N0wEm+ZVq4M31eXguK9\nzhzq1igqct9idu1Kb1mq4s9/Bj78MN2lID5JVYPqPABdVLUngGcBTIyVSFXHqGq2qmZnRfrGU0mX\nLjYOMAC0aRO+r1EjoGNHC593nhsfT7jz8oBffrFO9YkaRB2RDE3xV6/wWu5/+EP4vrrsGukV9yBa\nqdWhqMg1OOrzNXn2WeCCC9JdCuITP+KeD2A/z3bnUFwlqrpNVQtD4SkAmopIu5SVsiYcdBDQubO7\nXVxs4g+EjxW8bJlrpXvJz7eeMwAwblz0/tGj3V41zut1VeKers/Lq+qRUpfWolfcAzMTSg0pLnY/\n2qjP4k4yCj/i/h2A7iLSTUSaAbgCwCRvAhHZV8Q6J4pI71C+ddxSF4dGjaxf988/A7fdBvTqBey1\nV2wBnjvXpnR68EE3Li/PdfHsuWd4+ooKs+azs23bj7g7bwh9+oTPsF3bVOUqSpe4H+tvCsTAU1Zm\n7Q+77ZZZ4l5cDEyZ4j893XB1SkJxV9UyALcD+ATAUgATVHWxiNwiIo6f4hIAi0RkAYBnAFwRmsi1\nfiACdOgAPPec3UAi1kPB63MHzFffr581ijrk57viHunicW5EZxhdv+JeUQHMng1cf321q5Q09dFy\nf+YZ+8agUaPMErVUU1ZmbUEtWtRfn3us2/nBB4EBA6zB1M8baX1siwowvnzuqjpFVQ9W1QNV9fFQ\n3ChVHRUKj1TVw1X1SFXto6rf1GahU8JuuwFt29qf1vkqcOjQ6HRr1wKrVll41Sp3xmwgWpD8iHtJ\nSXo+/6+PlvteewH33GPCsHhx3ZWhvlFWZv+ZFi3q70MuVpvUmjW2Pvlk4PjjE+cRtK+RKyqAIUOA\nJUvSXZKYZOQXqilnjz1sifyA5KyzbByUBQvcuDFjrD/99u3ApZeGp3fEvVEVl7W0NPwGXrSoZmX3\nS3l5+CQZXtIh7jfcYGun/ePnn2vnfKrA+PH1W1gcy7158/or7rGun7fNZM6cxHmkexwhVeD991P3\nX1i1Cnj++fCOGfUIijtgbppYXbz697fG0kgBXrkSOOYY4Msv3bgtW9w3gKr6b5eUhIvpEUcAv/1W\n7aL7przcLOVYpEPcr7jC1u1C7e619THVlCnWO+iRR2on/1RQXl7/3TKxhDnZBvF0P2CnTQMuvjh1\n/wVnDJR6OgY/xd3h97+3Rs4BA2z7mmuAm25y948eHS7akX3FZ8xwB+Rq2TL+eUpK3KEtHTZsAHr0\nAP7zn+qXPxGlpfaVYKxxZdLRz925lk4XwE2baud8zttYKr5Ari28Pne/v0VxcXSbUW2SyHL3Q7ot\nd8eAcNxJNcUR93rUvOilHo0gVQ/49ltb79pl4tOoETByJPDBB/apfl4e8OijsY+98EI37H0IfP11\nuKCWlka/xi1dasv11wOXX55cmbdujW7ojcXOnfbQad06WhTqctyPSHFv1crCtWW516cZqOJRVmbC\nnoxbpl8/e3OsK2GJJe7JfmGcbnF33KWpsrSdTgr11HKnuMfC+VoQsAaTIUMsfP/9ruhPmxbulvHy\n009mnS9eDFx3Xfi+/Pzo9M5YNH5GaywrA045xcbI+fpr4Lvv7Pjf/a7q44qKosW9WTO7aetyPJNI\ncRcx6722LPe6EvcNG2xasLCpwXzitdz9znEb779XWwTBLZNqcXfqU08td7plkmH33W2gsaFDgS++\ncHvOfPMNcOCBbrr8fOv7HinsQOx+wc6IlE4vmyVLLP9Y/PqrvWH87/+asAPA99/bes2a+ON4OJa7\n43e/+mq3C2dBAZCT426ngqVLgY8/jo6PFHfA/O6Zbrm3b2/dOquD1+deVGQPvP79gbvvtjfCd96J\nf2xNhcXvwyQIbhmKO/HNyJEm1n36mNCef37iYXRHjIi/r6AAuPFG4PDDbSS9U0+1xtajj7ZWedXY\nXc6cfvjXXGPd0mKNa+9Y7s4HV82a2dK8ufVUOfZYd1iCnBxgYswRJPzTo4cJVCTODeHtuVOblrvj\nF60Lt0xubvWO81ruThvBxx/bIHcTJwKXXRb/WD+C+emnwOOPR8cvWGC9xKp6eFR1npiTKlRBumcA\nc0Q9VeIvQg0+AAAQi0lEQVTuXJN66pahuNeEJk1MwESsz/zEicCVV5rl/ve/2xexXgYNSpznK6+4\n4RkzzNKeP99cQ4sWAevWRR+zerUNn/D117Z92WX21vDrrzZ0wkMPmXi2aAEceqilcW7M1q3dPuZf\nfeWW88ILYzdCOuPgx+Kvf008AmA8y722xN25AadOtTaTWNT0jaU6k42sXm2WeUVFeFfIZBtJ/fSu\nOftsd2hrL86ExH4a8mP95n4eLF6rNt3DGTsPl1RNDkPLvQHSsaP55597zkT3qqss/sUXXVHbscME\nx/kAIua8gRH07OmGp093hxYeNw447DB333ffWdyjjwLXXgs88YQJdcuWQLdulsZJ3769O5661y0E\nWMOvSPif96STYo/NPX068Nhj9ubgpaQEeOABd1yeWOLeubNdp7POSnwNksUrSsccE71/2jR7wM2Y\nUf1zJPNwcBqvL7vMLPNFi8I/YvLrJnGoStwjP5iLtDCdhvhFi+w3qkr0vNfRSedH3L3H1cYDPDfX\n//wJzrVK1TSTFPcGziGHmLCVlJhQFhSYNd2ypfV4OOww+xhi3jy7ybdsAT77DBg+PH5vhBEjTGQ/\n/dQd/dLB+wXtc88Bs2a52y1bmuto8mSbtQowq87B8aF26GBrx80wdaqbZvZs96ZesQJ4802L69s3\ndlkLCoB//MNtf4gl7p1CI0hPmxY7j3ioJv74ySsusYaZ/vxzW1dnzPGKCntDy8nxl37tWnswjhnj\nirxIuM89WapydfTqFf7Qj/fgWL7cfqOqvhL2nscZKM+PuHuPu/TS1Lswjjwy3OjxU5bc3NQIslN/\ninsDplEj18fcvHm0lX7AAeb7bNzY3Dunn26v7Dt3mj/UK0qffQbcdZfbOLRggTWkPvecWdyjRsV/\nze7QwcRkwADXSr//fuDmm63dYP16E/1I18+AAcDYsTZUgMOmTfbguvpqO9aL1wKMdDPEEnfvA8bL\n2rXWUO0M/xDJ+PH2YHC6sMYikQDVpJFt2jQbX+Uvf3Hj1q+P/RBx9gHWGO60Aezc6bplIgemi4f3\n+u7aZdc4UrjLy80i9/bpjpx8O/LBUNVbg7eLZo8e5mKJvLaxJn2JdOd4HyC33QY89VT8c6Ya71vO\nL7/UPD+nbvS5k6Rp1Ai45BLzSW/YYDdx5Hyj7dsDXbvajeJYaZddZjd1mzbA4MHma3777dhj57Rr\nZ5bkl1+aWP/737HLctNN4Y3B8cbjb9Mm/AMv50tUwMTQERuvuB9xBPD/QjMzlpSYZVVYaOK9erU1\nJsfCGRbC+2YRSaS4fPRR+LZX3KdNC/9wrbAwduP0rl0mjI4P3+tu6NgR2Gef2GVxRDk/3xX3wkJX\n3PfbL/ZxQHiDsLdOxcXWIN29e3j6WK6iyOE1IsW9Kn+/I+6OW+/DD+1/4yXWQ80RfKeX1sUXu2L4\nwgvAfffFP2cybNmSeEYxr7g77Q1+GT06+s3Sebgl09Nr2TL7retgKk6Ke6aQlWVWvV+6drWeNqNH\nm3V7+eVV96PfbTfz1Y8da9tjxpif/t137dXXL1u3hs9TO3++Gz7rLBsJEoh2OR1wgFuOI4+03kLO\nLFrLl9sby86dNobJI4+Y6DvXY8GC+L1hIsX93XfD47ziftZZVn/nZh040MSsqMjaHxYtsqVFC3uj\ncMQy1rcLsXDEZ9s296HhiHvjxu4kMrFYvtwNe0XZEaxISzSWFR4pvpHiXtUwGI64OyOmXnutu8+Z\nSD5W/oeHJmUbPtzWP/xgbzCp6BbpFeu997auylX1WPI+8Koa02nJkmjxveUWt03of/7H7g+v5e73\nQ8BPP7WH/D//6S99TVDVtCy9evVSUk+pqIgd/9NPqmPHqk6dqtq6teqtt6qax9GWLVtU99vPwt26\nqR5xhIVPPjk8HRB9jnXrotNELk7ezjJwoBu+8krV9estr4ICy09VdbfdovPp08c979ChFnfffe7+\nW29Vfe45d/uVV2KX58QT45c1Fh9+GJ3ujTdU995bdcgQuybx8hs3zs3nl1/c+OnTY59z6dLoPF59\nNTzNiy+G7x8+PP5/wkk7f350vrNm2fr998OP+fJLN83YsaqTJrnb3nyqy9q10WW54YbYaYuK7P+Y\nlaXaubOlveuu2Gkjy7VpU/j/Ntb/YsYMf2UePdrSP/tsUlUNLx5y1IfG0nIn0cTrv9yli/XD79fP\n/LfPP29Wy9Ch1lOmbVtrwH3ySbPYv//erNUZM4CjjnLzOfvs6HN07mzWrNcijCSyLWDyZDc8fry1\nKfz977bu0sXaChzrdPNmtxF51izrEjp+vGvxei2pF15wv0oGgFdfjV0ep+tpPMrLw98oYrkNNm1y\n3TLea3LEEW64RQtrtHby8r55nHpq7HM7lqTzXQNg11fV/N6q0Za7M+NYLBzL3Wn8dhg50n2zu+ii\n8HGTvO6yjRvdtzMg/P9QXTZsiI575RW3bcOhsNB6jC1caOW4/XaLf/rpqnsIOe4jb5uOt6eN180V\nOSpmYWHsvB3Xl59u0TXFzxOgNhZa7g2M4mKznvxSVKS6ZInqN9+oPvmkWeB//KNZgzt3qn78sVnY\n991nVvopp8S3ek85xfLcvl31qKMSvyFUd7noIjd8+OG2btJEdeJE1cmTw98GnOXGG1WbNlW95x4r\nY3a2xW/c6KY59dRw67tPn9jnnzLFjlN1rebPPlMdP97CZ55p1xJQ/eIL1X/9K/z4nj1jv7Vt3+6m\nKSx0w1OnumnOPdeNnz3bfr9Ondy4J5+0vEeMiC73zz+7+VRUqC5bZvEPPeTGlZdbeNMm1bIyC0+d\nGvs6TJ8e/r+L96bhvDn9+qubv6q7b+1a237kETdu0CA3fN11qo0aqe6/v+qhh9q1/+UXyw9Qvfvu\n8OtYWKj65z+rNm/u6xaIB3xa7hR3Ehx27VIdOdIVxLZtLbx5c3i6t94yMTr5ZNXdd1f99FPVCRNU\n//AH1dxc1RYt3Bt48mRXaEaMUD37bNVjj1U97jhXHPbaSytdDV26+HsQtGgRLogPPmhl++knE0dV\n1QULTPh+/NF1JXiX4cOj4xo3Vr3llnAhU1W9887wdD16qB5wgLs9eLCtDzlE9dFHVb//XrWkxB46\nt9/upisvV333XXtQeR8EJSWqvXpFl+fee1Wfeir8wR7pDnKWAw+MjisqUj3+eNX27e3hBVh5tm6N\ndos99pitjzxSdcUK1a++ch+W3qW0VPWqq8Lj7r9f9Z13zL3nxH36qZX3ttvi/45t29pvF2ufiJVT\n1YwRbz1rQErFHUA/AMsBrATwQIz9AptebyWAXADHJMqT4k5qjV27anZ8RYXdlCtW2PbKldF5lpWZ\nRauqumOH6vLl4edfuNBE+T//MX/6ww+7bRCAPSiWLlXdd1/VVq1UV62qukzbt1v6YcNMsPr3Nyt2\n0iQTpZNOii0wK1fa8XPn2ltErIdMebnV+eabEz+UqqK0VPWOO9y0jRqp5uXFT19cbNatn4dhvOWR\nR+zBsnGj1WH//eOn3XNPewA7fPNN1Xn//vcm8H36hL+FzJsX/jZVUqJ68cXx8+ndO3z7jjuqvo4J\n8CvuYmnjIyKNAawAcCaAPNiE2Veq6hJPmnMA/AnAOQCOA/BvVT0uRnaVZGdna47fjz8ICRKqrn99\nxw7rIeRnRFA/fPON+eZ79rSPjXr3dvfl5Vk7Sa9e5rf/9VfrleR8BFRRYV8a//yz+afXrLGyLV1q\nXUQ7dgTOPTf5OibCaZtYs8baS0pKrDfVjTfahPUi1g4yfLj1hmnVytLfc48Nuue9dmvX2rcHq1ZZ\nL6a8PPt46p13zO/+4IPh5/74Y6vjo4+ar37rVuvNNGCADfXttHMccIDbiyY72363yy+37zy83X0/\n+cTWu3bZcBydOlk7xMaNVu4FC6w9qAa/t4jMVdXshOl8iPvxAP5HVc8ObT8IAKr6pCfNaABfqur4\n0PZyAH1VdX2MLAFQ3AkhPknmQRGPNWtMaP2MQV9ebl1T162zLqgzZ5qgDxxYvXM7GlvTOsDJxp+4\n+3l8dALg7aaQB7POE6XpBCBM3EVkMIDBANDFmTuTEEKqIhWi6Hx85Qfn6+399rPljDNqdu4UiXqy\n1GlXSFUdo6rZqpqdFe8LR0IIITXGj7jnA/B+F905FJdsGkIIIXWEH3H/DkB3EekmIs0AXAFgUkSa\nSQCuFaMPgIKq/O2EEEJql4Q+d1UtE5HbAXwCoDGAl1V1sYjcEto/CsAUWE+ZlQCKANxQe0UmhBCS\nCF/9cVR1CkzAvXGjPGEFMCTyOEIIIemBY8sQQkgAobgTQkgAobgTQkgASfiFaq2dWGQjgJ+qeXg7\nALUw2269hnVuGLDODYOa1Hl/VU34oVDaxL0miEiOn89vgwTr3DBgnRsGdVFnumUIISSAUNwJISSA\nZKq4j0mcJHCwzg0D1rlhUOt1zkifOyGEkKrJVMudEEJIFWScuItIPxFZLiIrReSBdJcnVYjIfiLy\nhYgsEZHFInJHKH4vEZkmIj+E1m09xzwYug7LReTs9JW++ohIYxH5XkQmh7aDXt82IvKuiCwTkaUi\ncnwDqPNdof/0IhEZLyLNg1ZnEXlZRDaIyCJPXNJ1FJFeIrIwtO8ZkRoMBu9nLr76ssAGLlsF4AAA\nzQAsANAj3eVKUd06IDT3LIA9YFMb9gDwT4TmrQXwAIB/hMI9QvXfDUC30HVpnO56VKPedwN4C8Dk\n0HbQ6/sagJtC4WYA2gS5zrBJe9YAaBHangDg+qDVGcApAI4BsMgTl3QdAcwB0Ac2L/VUAP2rW6ZM\ns9x7A1ipqqtVtQTA2wDOT3OZUoKqrlfVeaHwdgBLYTfG+TBBQGh9QSh8PoC3VbVYVdfARuTsjQxC\nRDoDGADgJU90kOu7J0wExgKAqpao6lYEuM4hmgBoISJNALQE8DMCVmdVnQFgS0R0UnUUkQ4AWqvq\nLDWlH+c5JmkyTdzjTecXKESkK4CjAcwG0F7dsfF/AdA+FA7CtfhfAPcBqPDEBbm+3QBsBPBKyBX1\nkojsjgDXWVXzAfwLwFrYtJsFqvopAlxnD8nWsVMoHBlfLTJN3AOPiLQC8B6AO1V1m3df6GkeiO5N\nIjIQwAZVnRsvTZDqG6IJ7NX9BVU9GsAO2Ot6JUGrc8jPfD7swdYRwO4icrU3TdDqHIt01DHTxD3Q\n0/mJSFOYsL+pqu+Hon8Nva4htN4Qis/0a3EigPNE5EeYe+00EXkDwa0vYJZYnqrODm2/CxP7INf5\nDABrVHWjqpYCeB/ACQh2nR2SrWN+KBwZXy0yTdz9TPmXkYRaxccCWKqqIzy7JgG4LhS+DsCHnvgr\nRGQ3EekGoDusMSYjUNUHVbWzqnaF/Y6fq+rVCGh9AUBVfwGwTkQOCUWdDmAJAlxnmDumj4i0DP3H\nT4e1JwW5zg5J1THkwtkmIn1C1+pazzHJk+5W5mq0Sp8D60myCsBD6S5PCut1Euy1LRfA/NByDoC9\nAfwXwA8APgOwl+eYh0LXYTlq0Kqe7gVAX7i9ZQJdXwBHAcgJ/c4TAbRtAHX+G4BlABYBeB3WSyRQ\ndQYwHtamUAp7QxtUnToCyA5dp1UARiL0oWl1Fn6hSgghASTT3DKEEEJ8QHEnhJAAQnEnhJAAQnEn\nhJAAQnEnhJAAQnEnhJAAQnEnhJAAQnEnhJAA8v8ByKY9HZyZWDEAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x22e9627d6a0>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "plt.plot(history.epoch,history.history.get('loss'),c='r')\n",
    "plt.plot(history.epoch,history.history.get('acc'),c='b')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 72,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "x_train = x[:int(len(x)*0.75)]\n",
    "x_test =  x[int(len(x)*0.75):]\n",
    "y_train= y[:int(len(y)*0.75)]\n",
    "y_test= y[int(len(y)*0.75):]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 73,
   "metadata": {},
   "outputs": [],
   "source": [
    "model = keras.Sequential()\n",
    "model.add(keras.layers.Dense(128,input_shape=(x.shape[1],),activation='relu'))\n",
    "model.add(keras.layers.Dense(128,activation='relu'))\n",
    "model.add(keras.layers.Dense(128,activation='relu'))\n",
    "model.add(keras.layers.Dense(1,activation='sigmoid'))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 74,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "model.compile(optimizer='adam',loss='binary_crossentropy',metrics=['acc'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 75,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train on 489 samples, validate on 164 samples\n",
      "Epoch 1/1000\n",
      "489/489 [==============================] - 1s 1ms/step - loss: 3.5530 - acc: 0.4785 - val_loss: 2.5000 - val_acc: 0.6646\n",
      "Epoch 2/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 1.9517 - acc: 0.6626 - val_loss: 2.0112 - val_acc: 0.7256\n",
      "Epoch 3/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 1.8270 - acc: 0.6074 - val_loss: 2.5500 - val_acc: 0.6646\n",
      "Epoch 4/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 2.0581 - acc: 0.6769 - val_loss: 2.2815 - val_acc: 0.5366\n",
      "Epoch 5/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 1.7188 - acc: 0.6483 - val_loss: 1.5215 - val_acc: 0.5854\n",
      "Epoch 6/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 1.6374 - acc: 0.7198 - val_loss: 2.0685 - val_acc: 0.6951\n",
      "Epoch 7/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 1.3090 - acc: 0.7157 - val_loss: 1.5401 - val_acc: 0.7195\n",
      "Epoch 8/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 1.6555 - acc: 0.6421 - val_loss: 1.1121 - val_acc: 0.7256\n",
      "Epoch 9/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 1.3305 - acc: 0.6442 - val_loss: 1.3353 - val_acc: 0.6707\n",
      "Epoch 10/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 1.0129 - acc: 0.7157 - val_loss: 1.7895 - val_acc: 0.4329\n",
      "Epoch 11/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 1.5771 - acc: 0.6503 - val_loss: 2.2998 - val_acc: 0.3963\n",
      "Epoch 12/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 1.7132 - acc: 0.6483 - val_loss: 2.6297 - val_acc: 0.6707\n",
      "Epoch 13/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 1.8102 - acc: 0.7014 - val_loss: 2.2567 - val_acc: 0.7256\n",
      "Epoch 14/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 1.5893 - acc: 0.7526 - val_loss: 2.0623 - val_acc: 0.7012\n",
      "Epoch 15/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 1.4659 - acc: 0.7628 - val_loss: 1.5375 - val_acc: 0.7439\n",
      "Epoch 16/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 1.1194 - acc: 0.7832 - val_loss: 0.7838 - val_acc: 0.7561\n",
      "Epoch 17/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 1.1305 - acc: 0.6933 - val_loss: 1.5598 - val_acc: 0.7012\n",
      "Epoch 18/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 1.1608 - acc: 0.7280 - val_loss: 1.3327 - val_acc: 0.7195\n",
      "Epoch 19/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 1.0108 - acc: 0.7403 - val_loss: 0.9056 - val_acc: 0.7134\n",
      "Epoch 20/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 1.2640 - acc: 0.7280 - val_loss: 1.1275 - val_acc: 0.7134\n",
      "Epoch 21/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 1.1094 - acc: 0.6994 - val_loss: 0.9366 - val_acc: 0.7256\n",
      "Epoch 22/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 1.0142 - acc: 0.7321 - val_loss: 1.7910 - val_acc: 0.6707\n",
      "Epoch 23/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 1.3476 - acc: 0.7771 - val_loss: 1.4313 - val_acc: 0.7134\n",
      "Epoch 24/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 1.0749 - acc: 0.7403 - val_loss: 0.9565 - val_acc: 0.6829\n",
      "Epoch 25/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 1.6039 - acc: 0.7035 - val_loss: 2.0528 - val_acc: 0.6707\n",
      "Epoch 26/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 1.3677 - acc: 0.7423 - val_loss: 0.9351 - val_acc: 0.7317\n",
      "Epoch 27/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.8946 - acc: 0.7526 - val_loss: 0.9203 - val_acc: 0.7561\n",
      "Epoch 28/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.6854 - acc: 0.7832 - val_loss: 0.9389 - val_acc: 0.5915\n",
      "Epoch 29/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.7962 - acc: 0.7710 - val_loss: 1.0150 - val_acc: 0.7195\n",
      "Epoch 30/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.8738 - acc: 0.7587 - val_loss: 1.3607 - val_acc: 0.6524\n",
      "Epoch 31/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 1.1551 - acc: 0.7751 - val_loss: 1.1566 - val_acc: 0.7561\n",
      "Epoch 32/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.8411 - acc: 0.7689 - val_loss: 0.8255 - val_acc: 0.7317\n",
      "Epoch 33/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.6860 - acc: 0.7914 - val_loss: 0.6879 - val_acc: 0.7500\n",
      "Epoch 34/1000\n",
      "489/489 [==============================] - 0s 183us/step - loss: 0.6307 - acc: 0.7669 - val_loss: 0.7587 - val_acc: 0.7073\n",
      "Epoch 35/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.7842 - acc: 0.7546 - val_loss: 0.7483 - val_acc: 0.7805\n",
      "Epoch 36/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.5574 - acc: 0.7894 - val_loss: 0.6365 - val_acc: 0.7561\n",
      "Epoch 37/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.5992 - acc: 0.7689 - val_loss: 0.8085 - val_acc: 0.7561\n",
      "Epoch 38/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.6888 - acc: 0.7853 - val_loss: 0.6805 - val_acc: 0.7561\n",
      "Epoch 39/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.7453 - acc: 0.7587 - val_loss: 0.7711 - val_acc: 0.7805\n",
      "Epoch 40/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.5711 - acc: 0.7894 - val_loss: 1.0383 - val_acc: 0.7378\n",
      "Epoch 41/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 1.1144 - acc: 0.7362 - val_loss: 1.3970 - val_acc: 0.7256\n",
      "Epoch 42/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.9197 - acc: 0.7628 - val_loss: 0.8321 - val_acc: 0.5671\n",
      "Epoch 43/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.6680 - acc: 0.7894 - val_loss: 0.6549 - val_acc: 0.7500\n",
      "Epoch 44/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.6927 - acc: 0.7914 - val_loss: 0.8485 - val_acc: 0.7988\n",
      "Epoch 45/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.7652 - acc: 0.7751 - val_loss: 1.2535 - val_acc: 0.7012\n",
      "Epoch 46/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.7291 - acc: 0.7771 - val_loss: 1.0710 - val_acc: 0.6646\n",
      "Epoch 47/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.7241 - acc: 0.7689 - val_loss: 0.8136 - val_acc: 0.7256\n",
      "Epoch 48/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.4892 - acc: 0.8016 - val_loss: 0.7205 - val_acc: 0.7195\n",
      "Epoch 49/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.5465 - acc: 0.8016 - val_loss: 0.6712 - val_acc: 0.8049\n",
      "Epoch 50/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.5051 - acc: 0.8241 - val_loss: 0.6584 - val_acc: 0.7744\n",
      "Epoch 51/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.5592 - acc: 0.8098 - val_loss: 1.0948 - val_acc: 0.7134\n",
      "Epoch 52/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.8044 - acc: 0.8037 - val_loss: 0.8780 - val_acc: 0.7683\n",
      "Epoch 53/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.5696 - acc: 0.7832 - val_loss: 0.6831 - val_acc: 0.8049\n",
      "Epoch 54/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.5218 - acc: 0.7935 - val_loss: 0.6868 - val_acc: 0.7744\n",
      "Epoch 55/1000\n",
      "489/489 [==============================] - 0s 118us/step - loss: 0.4254 - acc: 0.8057 - val_loss: 0.6631 - val_acc: 0.8049\n",
      "Epoch 56/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.3909 - acc: 0.8364 - val_loss: 0.7210 - val_acc: 0.7805\n",
      "Epoch 57/1000\n",
      "489/489 [==============================] - 0s 116us/step - loss: 0.5206 - acc: 0.7894 - val_loss: 0.6811 - val_acc: 0.8171\n",
      "Epoch 58/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.3845 - acc: 0.8262 - val_loss: 0.7622 - val_acc: 0.7744\n",
      "Epoch 59/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.3895 - acc: 0.8466 - val_loss: 0.7354 - val_acc: 0.7866\n",
      "Epoch 60/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 122us/step - loss: 0.3721 - acc: 0.8384 - val_loss: 0.7282 - val_acc: 0.7683\n",
      "Epoch 61/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.5053 - acc: 0.8241 - val_loss: 0.7704 - val_acc: 0.7683\n",
      "Epoch 62/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.4281 - acc: 0.8139 - val_loss: 0.7108 - val_acc: 0.7988\n",
      "Epoch 63/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.4228 - acc: 0.8262 - val_loss: 0.7610 - val_acc: 0.7866\n",
      "Epoch 64/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.4365 - acc: 0.8262 - val_loss: 0.8328 - val_acc: 0.7561\n",
      "Epoch 65/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.4024 - acc: 0.8384 - val_loss: 0.7218 - val_acc: 0.8110\n",
      "Epoch 66/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3953 - acc: 0.8466 - val_loss: 0.7664 - val_acc: 0.7805\n",
      "Epoch 67/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.3790 - acc: 0.8303 - val_loss: 0.7603 - val_acc: 0.8293\n",
      "Epoch 68/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3650 - acc: 0.8303 - val_loss: 0.7587 - val_acc: 0.8415\n",
      "Epoch 69/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3604 - acc: 0.8569 - val_loss: 0.7069 - val_acc: 0.7866\n",
      "Epoch 70/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3839 - acc: 0.8425 - val_loss: 0.7261 - val_acc: 0.8110\n",
      "Epoch 71/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.5581 - acc: 0.8139 - val_loss: 0.8265 - val_acc: 0.7500\n",
      "Epoch 72/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.4652 - acc: 0.8303 - val_loss: 0.7970 - val_acc: 0.7317\n",
      "Epoch 73/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.4273 - acc: 0.8139 - val_loss: 1.0972 - val_acc: 0.6890\n",
      "Epoch 74/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.5083 - acc: 0.7935 - val_loss: 0.7342 - val_acc: 0.8110\n",
      "Epoch 75/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.3711 - acc: 0.8323 - val_loss: 0.8658 - val_acc: 0.6951\n",
      "Epoch 76/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.4333 - acc: 0.8160 - val_loss: 0.7326 - val_acc: 0.8415\n",
      "Epoch 77/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3490 - acc: 0.8507 - val_loss: 1.0135 - val_acc: 0.7256\n",
      "Epoch 78/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.4479 - acc: 0.8139 - val_loss: 0.7396 - val_acc: 0.8232\n",
      "Epoch 79/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3819 - acc: 0.8262 - val_loss: 0.7119 - val_acc: 0.8110\n",
      "Epoch 80/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3766 - acc: 0.8344 - val_loss: 0.8875 - val_acc: 0.7439\n",
      "Epoch 81/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3977 - acc: 0.8262 - val_loss: 0.7325 - val_acc: 0.8415\n",
      "Epoch 82/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3522 - acc: 0.8487 - val_loss: 0.7786 - val_acc: 0.8110\n",
      "Epoch 83/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.3473 - acc: 0.8528 - val_loss: 0.7940 - val_acc: 0.8171\n",
      "Epoch 84/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3360 - acc: 0.8569 - val_loss: 0.7654 - val_acc: 0.8354\n",
      "Epoch 85/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.3382 - acc: 0.8671 - val_loss: 0.7764 - val_acc: 0.8049\n",
      "Epoch 86/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3854 - acc: 0.8323 - val_loss: 0.7593 - val_acc: 0.8171\n",
      "Epoch 87/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.4299 - acc: 0.8282 - val_loss: 0.7903 - val_acc: 0.7561\n",
      "Epoch 88/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.4531 - acc: 0.8200 - val_loss: 0.8354 - val_acc: 0.7012\n",
      "Epoch 89/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.4128 - acc: 0.8221 - val_loss: 0.7247 - val_acc: 0.8293\n",
      "Epoch 90/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.3710 - acc: 0.8466 - val_loss: 0.7452 - val_acc: 0.7744\n",
      "Epoch 91/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3461 - acc: 0.8507 - val_loss: 0.8127 - val_acc: 0.7622\n",
      "Epoch 92/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.3398 - acc: 0.8589 - val_loss: 0.7215 - val_acc: 0.7927\n",
      "Epoch 93/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.3550 - acc: 0.8569 - val_loss: 0.7820 - val_acc: 0.7927\n",
      "Epoch 94/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.3250 - acc: 0.8691 - val_loss: 0.8231 - val_acc: 0.8049\n",
      "Epoch 95/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3602 - acc: 0.8528 - val_loss: 0.8215 - val_acc: 0.7561\n",
      "Epoch 96/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3249 - acc: 0.8589 - val_loss: 0.8014 - val_acc: 0.8354\n",
      "Epoch 97/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.3249 - acc: 0.8691 - val_loss: 0.7703 - val_acc: 0.8354\n",
      "Epoch 98/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3261 - acc: 0.8650 - val_loss: 0.8850 - val_acc: 0.7134\n",
      "Epoch 99/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3607 - acc: 0.8630 - val_loss: 0.8335 - val_acc: 0.7866\n",
      "Epoch 100/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3257 - acc: 0.8630 - val_loss: 0.8126 - val_acc: 0.7866\n",
      "Epoch 101/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3215 - acc: 0.8691 - val_loss: 0.7879 - val_acc: 0.8293\n",
      "Epoch 102/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3834 - acc: 0.8548 - val_loss: 0.8671 - val_acc: 0.7256\n",
      "Epoch 103/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.3696 - acc: 0.8466 - val_loss: 0.8604 - val_acc: 0.7927\n",
      "Epoch 104/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3631 - acc: 0.8569 - val_loss: 0.9556 - val_acc: 0.7805\n",
      "Epoch 105/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.7499 - acc: 0.8119 - val_loss: 1.2792 - val_acc: 0.6768\n",
      "Epoch 106/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.8633 - acc: 0.7894 - val_loss: 1.3069 - val_acc: 0.7012\n",
      "Epoch 107/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.9207 - acc: 0.7832 - val_loss: 0.8624 - val_acc: 0.7317\n",
      "Epoch 108/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.4742 - acc: 0.7975 - val_loss: 0.6518 - val_acc: 0.7744\n",
      "Epoch 109/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.4352 - acc: 0.8057 - val_loss: 0.7201 - val_acc: 0.7561\n",
      "Epoch 110/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.4854 - acc: 0.8323 - val_loss: 0.7092 - val_acc: 0.7988\n",
      "Epoch 111/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.4126 - acc: 0.8609 - val_loss: 0.7012 - val_acc: 0.8171\n",
      "Epoch 112/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3514 - acc: 0.8589 - val_loss: 0.6957 - val_acc: 0.7927\n",
      "Epoch 113/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3882 - acc: 0.8384 - val_loss: 0.7596 - val_acc: 0.7500\n",
      "Epoch 114/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3680 - acc: 0.8487 - val_loss: 0.7353 - val_acc: 0.7927\n",
      "Epoch 115/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3579 - acc: 0.8609 - val_loss: 0.7246 - val_acc: 0.8110\n",
      "Epoch 116/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3391 - acc: 0.8528 - val_loss: 0.7651 - val_acc: 0.7012\n",
      "Epoch 117/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.3234 - acc: 0.8793 - val_loss: 0.7449 - val_acc: 0.8293\n",
      "Epoch 118/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3175 - acc: 0.8650 - val_loss: 0.7261 - val_acc: 0.8049\n",
      "Epoch 119/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3327 - acc: 0.8507 - val_loss: 0.7659 - val_acc: 0.8232\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 120/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3163 - acc: 0.8487 - val_loss: 0.7760 - val_acc: 0.7988\n",
      "Epoch 121/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3270 - acc: 0.8425 - val_loss: 0.8107 - val_acc: 0.7317\n",
      "Epoch 122/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3370 - acc: 0.8528 - val_loss: 0.8041 - val_acc: 0.7561\n",
      "Epoch 123/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.3353 - acc: 0.8569 - val_loss: 0.8937 - val_acc: 0.6707\n",
      "Epoch 124/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3482 - acc: 0.8487 - val_loss: 0.8703 - val_acc: 0.7744\n",
      "Epoch 125/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3345 - acc: 0.8589 - val_loss: 0.8005 - val_acc: 0.7744\n",
      "Epoch 126/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.3153 - acc: 0.8630 - val_loss: 0.8152 - val_acc: 0.8110\n",
      "Epoch 127/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2971 - acc: 0.8896 - val_loss: 0.9591 - val_acc: 0.6341\n",
      "Epoch 128/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3172 - acc: 0.8609 - val_loss: 0.8069 - val_acc: 0.8049\n",
      "Epoch 129/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3200 - acc: 0.8671 - val_loss: 0.8410 - val_acc: 0.7744\n",
      "Epoch 130/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2946 - acc: 0.8671 - val_loss: 0.8417 - val_acc: 0.7561\n",
      "Epoch 131/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3154 - acc: 0.8671 - val_loss: 0.8417 - val_acc: 0.7439\n",
      "Epoch 132/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2784 - acc: 0.8814 - val_loss: 0.8440 - val_acc: 0.8049\n",
      "Epoch 133/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.2997 - acc: 0.8589 - val_loss: 0.9210 - val_acc: 0.8049\n",
      "Epoch 134/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3939 - acc: 0.8323 - val_loss: 0.8945 - val_acc: 0.7500\n",
      "Epoch 135/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3258 - acc: 0.8589 - val_loss: 0.8638 - val_acc: 0.7500\n",
      "Epoch 136/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3037 - acc: 0.8814 - val_loss: 0.9282 - val_acc: 0.7622\n",
      "Epoch 137/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.3262 - acc: 0.8732 - val_loss: 0.9925 - val_acc: 0.6463\n",
      "Epoch 138/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3220 - acc: 0.8589 - val_loss: 0.8860 - val_acc: 0.8049\n",
      "Epoch 139/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2929 - acc: 0.8875 - val_loss: 0.8388 - val_acc: 0.8049\n",
      "Epoch 140/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2952 - acc: 0.8875 - val_loss: 0.8794 - val_acc: 0.7683\n",
      "Epoch 141/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3675 - acc: 0.8078 - val_loss: 0.9361 - val_acc: 0.6890\n",
      "Epoch 142/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3174 - acc: 0.8548 - val_loss: 0.9259 - val_acc: 0.7744\n",
      "Epoch 143/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3001 - acc: 0.8855 - val_loss: 0.8427 - val_acc: 0.8293\n",
      "Epoch 144/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3191 - acc: 0.8671 - val_loss: 0.8732 - val_acc: 0.7805\n",
      "Epoch 145/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3162 - acc: 0.8364 - val_loss: 0.8750 - val_acc: 0.7317\n",
      "Epoch 146/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3017 - acc: 0.8753 - val_loss: 0.8075 - val_acc: 0.8049\n",
      "Epoch 147/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2820 - acc: 0.8834 - val_loss: 0.8403 - val_acc: 0.7927\n",
      "Epoch 148/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2837 - acc: 0.8814 - val_loss: 0.8809 - val_acc: 0.7683\n",
      "Epoch 149/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2853 - acc: 0.8834 - val_loss: 0.8627 - val_acc: 0.7866\n",
      "Epoch 150/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3028 - acc: 0.8609 - val_loss: 0.8810 - val_acc: 0.7744\n",
      "Epoch 151/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.3014 - acc: 0.8650 - val_loss: 0.8931 - val_acc: 0.7439\n",
      "Epoch 152/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3242 - acc: 0.8712 - val_loss: 0.9586 - val_acc: 0.7012\n",
      "Epoch 153/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.2955 - acc: 0.8609 - val_loss: 0.8965 - val_acc: 0.7805\n",
      "Epoch 154/1000\n",
      "489/489 [==============================] - 0s 188us/step - loss: 0.2851 - acc: 0.8834 - val_loss: 0.8874 - val_acc: 0.7683\n",
      "Epoch 155/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.2983 - acc: 0.8916 - val_loss: 0.8737 - val_acc: 0.7805\n",
      "Epoch 156/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2663 - acc: 0.9039 - val_loss: 1.0161 - val_acc: 0.7866\n",
      "Epoch 157/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3161 - acc: 0.8855 - val_loss: 0.8923 - val_acc: 0.8110\n",
      "Epoch 158/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2733 - acc: 0.8814 - val_loss: 1.0533 - val_acc: 0.7073\n",
      "Epoch 159/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2802 - acc: 0.8855 - val_loss: 0.9865 - val_acc: 0.7744\n",
      "Epoch 160/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2939 - acc: 0.8732 - val_loss: 0.9306 - val_acc: 0.7683\n",
      "Epoch 161/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2707 - acc: 0.8753 - val_loss: 0.9434 - val_acc: 0.7500\n",
      "Epoch 162/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3543 - acc: 0.7955 - val_loss: 0.9878 - val_acc: 0.7683\n",
      "Epoch 163/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.3796 - acc: 0.8323 - val_loss: 1.0167 - val_acc: 0.6951\n",
      "Epoch 164/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.3572 - acc: 0.8139 - val_loss: 0.9583 - val_acc: 0.7561\n",
      "Epoch 165/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3553 - acc: 0.8466 - val_loss: 1.0124 - val_acc: 0.6829\n",
      "Epoch 166/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.3261 - acc: 0.8384 - val_loss: 0.9644 - val_acc: 0.7622\n",
      "Epoch 167/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.3209 - acc: 0.8650 - val_loss: 0.9829 - val_acc: 0.7439\n",
      "Epoch 168/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2961 - acc: 0.8732 - val_loss: 1.1004 - val_acc: 0.6646\n",
      "Epoch 169/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.3071 - acc: 0.8630 - val_loss: 1.0646 - val_acc: 0.7195\n",
      "Epoch 170/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3113 - acc: 0.8528 - val_loss: 0.9413 - val_acc: 0.7744\n",
      "Epoch 171/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2598 - acc: 0.8793 - val_loss: 0.9376 - val_acc: 0.7622\n",
      "Epoch 172/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2928 - acc: 0.8773 - val_loss: 1.0569 - val_acc: 0.7012\n",
      "Epoch 173/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2856 - acc: 0.8487 - val_loss: 0.9919 - val_acc: 0.7378\n",
      "Epoch 174/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3047 - acc: 0.8671 - val_loss: 1.0125 - val_acc: 0.7378\n",
      "Epoch 175/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.2818 - acc: 0.8650 - val_loss: 1.0569 - val_acc: 0.7622\n",
      "Epoch 176/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2847 - acc: 0.8630 - val_loss: 0.9879 - val_acc: 0.7134\n",
      "Epoch 177/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2637 - acc: 0.8691 - val_loss: 1.0202 - val_acc: 0.7805\n",
      "Epoch 178/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2765 - acc: 0.8630 - val_loss: 0.9653 - val_acc: 0.7988\n",
      "Epoch 179/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 145us/step - loss: 0.2845 - acc: 0.8548 - val_loss: 0.9472 - val_acc: 0.7988\n",
      "Epoch 180/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.4804 - acc: 0.8466 - val_loss: 1.6845 - val_acc: 0.6768\n",
      "Epoch 181/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.7091 - acc: 0.8303 - val_loss: 1.3238 - val_acc: 0.6829\n",
      "Epoch 182/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.8719 - acc: 0.8323 - val_loss: 1.1185 - val_acc: 0.7317\n",
      "Epoch 183/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.7679 - acc: 0.8098 - val_loss: 1.7733 - val_acc: 0.6768\n",
      "Epoch 184/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.9716 - acc: 0.8303 - val_loss: 1.1316 - val_acc: 0.7317\n",
      "Epoch 185/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 1.0315 - acc: 0.8241 - val_loss: 1.6656 - val_acc: 0.7012\n",
      "Epoch 186/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.8850 - acc: 0.8344 - val_loss: 0.9548 - val_acc: 0.7805\n",
      "Epoch 187/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.6556 - acc: 0.8119 - val_loss: 1.0190 - val_acc: 0.8354\n",
      "Epoch 188/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.7858 - acc: 0.8160 - val_loss: 1.2954 - val_acc: 0.7866\n",
      "Epoch 189/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.7156 - acc: 0.8160 - val_loss: 1.4169 - val_acc: 0.7012\n",
      "Epoch 190/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.7202 - acc: 0.8241 - val_loss: 1.1407 - val_acc: 0.7988\n",
      "Epoch 191/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.4517 - acc: 0.8773 - val_loss: 0.8351 - val_acc: 0.8293\n",
      "Epoch 192/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.4258 - acc: 0.8609 - val_loss: 0.8856 - val_acc: 0.7866\n",
      "Epoch 193/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.5036 - acc: 0.8609 - val_loss: 0.9836 - val_acc: 0.7561\n",
      "Epoch 194/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.5171 - acc: 0.8303 - val_loss: 0.9345 - val_acc: 0.7683\n",
      "Epoch 195/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.4027 - acc: 0.8732 - val_loss: 0.9434 - val_acc: 0.7439\n",
      "Epoch 196/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.3464 - acc: 0.8753 - val_loss: 0.8590 - val_acc: 0.8049\n",
      "Epoch 197/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3238 - acc: 0.8650 - val_loss: 0.9209 - val_acc: 0.7195\n",
      "Epoch 198/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3148 - acc: 0.8691 - val_loss: 0.8662 - val_acc: 0.7622\n",
      "Epoch 199/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.3209 - acc: 0.8753 - val_loss: 0.7970 - val_acc: 0.8171\n",
      "Epoch 200/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2815 - acc: 0.8978 - val_loss: 0.8433 - val_acc: 0.7988\n",
      "Epoch 201/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2764 - acc: 0.8896 - val_loss: 0.8886 - val_acc: 0.7500\n",
      "Epoch 202/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3859 - acc: 0.8671 - val_loss: 0.8362 - val_acc: 0.8171\n",
      "Epoch 203/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3589 - acc: 0.8773 - val_loss: 1.0129 - val_acc: 0.7195\n",
      "Epoch 204/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.4587 - acc: 0.8548 - val_loss: 0.8625 - val_acc: 0.7988\n",
      "Epoch 205/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3567 - acc: 0.8650 - val_loss: 0.8787 - val_acc: 0.7805\n",
      "Epoch 206/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3104 - acc: 0.8712 - val_loss: 0.8973 - val_acc: 0.8049\n",
      "Epoch 207/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2860 - acc: 0.8814 - val_loss: 0.8966 - val_acc: 0.8049\n",
      "Epoch 208/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2957 - acc: 0.8875 - val_loss: 0.8338 - val_acc: 0.8415\n",
      "Epoch 209/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.2811 - acc: 0.8875 - val_loss: 0.8647 - val_acc: 0.8232\n",
      "Epoch 210/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2667 - acc: 0.8978 - val_loss: 0.8656 - val_acc: 0.8293\n",
      "Epoch 211/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2615 - acc: 0.8978 - val_loss: 0.8920 - val_acc: 0.8232\n",
      "Epoch 212/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2468 - acc: 0.8998 - val_loss: 0.9276 - val_acc: 0.8293\n",
      "Epoch 213/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2544 - acc: 0.8814 - val_loss: 0.9457 - val_acc: 0.8171\n",
      "Epoch 214/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2502 - acc: 0.8957 - val_loss: 0.9266 - val_acc: 0.8110\n",
      "Epoch 215/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2511 - acc: 0.8937 - val_loss: 0.9638 - val_acc: 0.8110\n",
      "Epoch 216/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2459 - acc: 0.8998 - val_loss: 0.9188 - val_acc: 0.8171\n",
      "Epoch 217/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2457 - acc: 0.8978 - val_loss: 1.0022 - val_acc: 0.8232\n",
      "Epoch 218/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3210 - acc: 0.8466 - val_loss: 1.0041 - val_acc: 0.7256\n",
      "Epoch 219/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.3162 - acc: 0.8507 - val_loss: 1.0253 - val_acc: 0.7683\n",
      "Epoch 220/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2822 - acc: 0.8834 - val_loss: 1.0682 - val_acc: 0.7500\n",
      "Epoch 221/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2755 - acc: 0.8957 - val_loss: 1.0372 - val_acc: 0.7378\n",
      "Epoch 222/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2719 - acc: 0.9039 - val_loss: 1.0679 - val_acc: 0.8171\n",
      "Epoch 223/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3755 - acc: 0.8732 - val_loss: 1.0758 - val_acc: 0.6768\n",
      "Epoch 224/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.3691 - acc: 0.8425 - val_loss: 1.1330 - val_acc: 0.7317\n",
      "Epoch 225/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3062 - acc: 0.8548 - val_loss: 1.0518 - val_acc: 0.7500\n",
      "Epoch 226/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2829 - acc: 0.8528 - val_loss: 1.0687 - val_acc: 0.7378\n",
      "Epoch 227/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2752 - acc: 0.8814 - val_loss: 0.9884 - val_acc: 0.8171\n",
      "Epoch 228/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2983 - acc: 0.8937 - val_loss: 1.0826 - val_acc: 0.7927\n",
      "Epoch 229/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2516 - acc: 0.9018 - val_loss: 1.0102 - val_acc: 0.7866\n",
      "Epoch 230/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2361 - acc: 0.9121 - val_loss: 1.0663 - val_acc: 0.7561\n",
      "Epoch 231/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.2556 - acc: 0.8978 - val_loss: 0.9800 - val_acc: 0.7805\n",
      "Epoch 232/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2392 - acc: 0.9100 - val_loss: 1.0032 - val_acc: 0.7805\n",
      "Epoch 233/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2461 - acc: 0.9018 - val_loss: 1.1676 - val_acc: 0.7195\n",
      "Epoch 234/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2836 - acc: 0.8691 - val_loss: 0.9396 - val_acc: 0.8476\n",
      "Epoch 235/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2542 - acc: 0.9018 - val_loss: 0.9529 - val_acc: 0.8293\n",
      "Epoch 236/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2524 - acc: 0.8916 - val_loss: 1.0365 - val_acc: 0.7927\n",
      "Epoch 237/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2189 - acc: 0.9059 - val_loss: 1.0945 - val_acc: 0.7683\n",
      "Epoch 238/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 157us/step - loss: 0.2337 - acc: 0.8978 - val_loss: 1.0792 - val_acc: 0.7317\n",
      "Epoch 239/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2417 - acc: 0.8855 - val_loss: 1.1097 - val_acc: 0.7561\n",
      "Epoch 240/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2443 - acc: 0.8957 - val_loss: 0.9586 - val_acc: 0.8232\n",
      "Epoch 241/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2212 - acc: 0.9162 - val_loss: 1.0288 - val_acc: 0.7988\n",
      "Epoch 242/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2417 - acc: 0.9059 - val_loss: 0.9830 - val_acc: 0.7500\n",
      "Epoch 243/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2269 - acc: 0.9059 - val_loss: 0.9625 - val_acc: 0.8415\n",
      "Epoch 244/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2202 - acc: 0.9162 - val_loss: 0.9365 - val_acc: 0.8293\n",
      "Epoch 245/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2036 - acc: 0.9223 - val_loss: 1.0669 - val_acc: 0.7927\n",
      "Epoch 246/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2189 - acc: 0.9100 - val_loss: 0.9778 - val_acc: 0.8232\n",
      "Epoch 247/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2251 - acc: 0.9100 - val_loss: 0.9819 - val_acc: 0.7744\n",
      "Epoch 248/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2238 - acc: 0.9080 - val_loss: 0.9854 - val_acc: 0.8293\n",
      "Epoch 249/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2322 - acc: 0.9059 - val_loss: 0.9919 - val_acc: 0.8049\n",
      "Epoch 250/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3043 - acc: 0.8814 - val_loss: 1.0431 - val_acc: 0.8110\n",
      "Epoch 251/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.2469 - acc: 0.8998 - val_loss: 1.0366 - val_acc: 0.7988\n",
      "Epoch 252/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2279 - acc: 0.9018 - val_loss: 1.0685 - val_acc: 0.7622\n",
      "Epoch 253/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2235 - acc: 0.9100 - val_loss: 0.9824 - val_acc: 0.8232\n",
      "Epoch 254/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2104 - acc: 0.9162 - val_loss: 1.0915 - val_acc: 0.7927\n",
      "Epoch 255/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2203 - acc: 0.9141 - val_loss: 1.0394 - val_acc: 0.8110\n",
      "Epoch 256/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2018 - acc: 0.9305 - val_loss: 1.0366 - val_acc: 0.8110\n",
      "Epoch 257/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2136 - acc: 0.9162 - val_loss: 1.1016 - val_acc: 0.7927\n",
      "Epoch 258/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2856 - acc: 0.9059 - val_loss: 1.0099 - val_acc: 0.7073\n",
      "Epoch 259/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2505 - acc: 0.8957 - val_loss: 1.0737 - val_acc: 0.7683\n",
      "Epoch 260/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2269 - acc: 0.9121 - val_loss: 0.9230 - val_acc: 0.8110\n",
      "Epoch 261/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2281 - acc: 0.9202 - val_loss: 0.9545 - val_acc: 0.8049\n",
      "Epoch 262/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2261 - acc: 0.9202 - val_loss: 1.0097 - val_acc: 0.7988\n",
      "Epoch 263/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3823 - acc: 0.8875 - val_loss: 1.1365 - val_acc: 0.7073\n",
      "Epoch 264/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.4304 - acc: 0.8507 - val_loss: 1.2641 - val_acc: 0.7805\n",
      "Epoch 265/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.4022 - acc: 0.8732 - val_loss: 0.9675 - val_acc: 0.7988\n",
      "Epoch 266/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2762 - acc: 0.9141 - val_loss: 0.9962 - val_acc: 0.8049\n",
      "Epoch 267/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.2428 - acc: 0.9100 - val_loss: 1.0885 - val_acc: 0.7317\n",
      "Epoch 268/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.2943 - acc: 0.8466 - val_loss: 1.1556 - val_acc: 0.7866\n",
      "Epoch 269/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2250 - acc: 0.9100 - val_loss: 0.9904 - val_acc: 0.7927\n",
      "Epoch 270/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2012 - acc: 0.9182 - val_loss: 1.0128 - val_acc: 0.8171\n",
      "Epoch 271/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.1927 - acc: 0.9284 - val_loss: 0.9845 - val_acc: 0.8049\n",
      "Epoch 272/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2061 - acc: 0.9264 - val_loss: 1.0397 - val_acc: 0.8110\n",
      "Epoch 273/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.4762 - acc: 0.8814 - val_loss: 1.2996 - val_acc: 0.7561\n",
      "Epoch 274/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.5636 - acc: 0.8712 - val_loss: 1.5385 - val_acc: 0.7073\n",
      "Epoch 275/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.5226 - acc: 0.8793 - val_loss: 1.7955 - val_acc: 0.7256\n",
      "Epoch 276/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.6681 - acc: 0.8650 - val_loss: 1.3849 - val_acc: 0.7744\n",
      "Epoch 277/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.4261 - acc: 0.8548 - val_loss: 1.4493 - val_acc: 0.7012\n",
      "Epoch 278/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.3789 - acc: 0.8793 - val_loss: 1.2482 - val_acc: 0.7561\n",
      "Epoch 279/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.3508 - acc: 0.8753 - val_loss: 1.1354 - val_acc: 0.8110\n",
      "Epoch 280/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3339 - acc: 0.8773 - val_loss: 1.0840 - val_acc: 0.8171\n",
      "Epoch 281/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2756 - acc: 0.8896 - val_loss: 1.1848 - val_acc: 0.7744\n",
      "Epoch 282/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.7759 - acc: 0.8712 - val_loss: 1.6277 - val_acc: 0.7500\n",
      "Epoch 283/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.7619 - acc: 0.8405 - val_loss: 1.3670 - val_acc: 0.7500\n",
      "Epoch 284/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.4748 - acc: 0.8650 - val_loss: 1.2621 - val_acc: 0.7317\n",
      "Epoch 285/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3970 - acc: 0.8487 - val_loss: 1.0122 - val_acc: 0.7988\n",
      "Epoch 286/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.3294 - acc: 0.8609 - val_loss: 1.0215 - val_acc: 0.8232\n",
      "Epoch 287/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.2625 - acc: 0.8875 - val_loss: 1.2603 - val_acc: 0.7012\n",
      "Epoch 288/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.3923 - acc: 0.8712 - val_loss: 1.0683 - val_acc: 0.7927\n",
      "Epoch 289/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.3504 - acc: 0.8773 - val_loss: 1.1007 - val_acc: 0.7561\n",
      "Epoch 290/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2327 - acc: 0.9141 - val_loss: 1.0021 - val_acc: 0.8110\n",
      "Epoch 291/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3144 - acc: 0.8957 - val_loss: 1.5734 - val_acc: 0.6585\n",
      "Epoch 292/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.4272 - acc: 0.8753 - val_loss: 1.0944 - val_acc: 0.7805\n",
      "Epoch 293/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3676 - acc: 0.8916 - val_loss: 1.2300 - val_acc: 0.7622\n",
      "Epoch 294/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.5715 - acc: 0.8712 - val_loss: 1.5898 - val_acc: 0.7256\n",
      "Epoch 295/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.5349 - acc: 0.8548 - val_loss: 1.0598 - val_acc: 0.7927\n",
      "Epoch 296/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.4630 - acc: 0.8732 - val_loss: 1.2067 - val_acc: 0.7744\n",
      "Epoch 297/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 145us/step - loss: 0.3216 - acc: 0.8916 - val_loss: 1.4131 - val_acc: 0.7195\n",
      "Epoch 298/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2930 - acc: 0.8773 - val_loss: 1.1691 - val_acc: 0.7561\n",
      "Epoch 299/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.2747 - acc: 0.9100 - val_loss: 1.1819 - val_acc: 0.7805\n",
      "Epoch 300/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2494 - acc: 0.9182 - val_loss: 1.0043 - val_acc: 0.7805\n",
      "Epoch 301/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2528 - acc: 0.9059 - val_loss: 1.2827 - val_acc: 0.7317\n",
      "Epoch 302/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.2389 - acc: 0.9100 - val_loss: 1.1160 - val_acc: 0.8049\n",
      "Epoch 303/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.6237 - acc: 0.8160 - val_loss: 1.2495 - val_acc: 0.7378\n",
      "Epoch 304/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.3947 - acc: 0.8814 - val_loss: 1.0049 - val_acc: 0.7866\n",
      "Epoch 305/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2709 - acc: 0.8814 - val_loss: 1.1211 - val_acc: 0.7866\n",
      "Epoch 306/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2621 - acc: 0.9080 - val_loss: 0.9668 - val_acc: 0.7866\n",
      "Epoch 307/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.2577 - acc: 0.9039 - val_loss: 1.1337 - val_acc: 0.7744\n",
      "Epoch 308/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2476 - acc: 0.9080 - val_loss: 1.1278 - val_acc: 0.7866\n",
      "Epoch 309/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.2459 - acc: 0.9121 - val_loss: 0.8047 - val_acc: 0.8415\n",
      "Epoch 310/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.2406 - acc: 0.8937 - val_loss: 1.2580 - val_acc: 0.7683\n",
      "Epoch 311/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.2517 - acc: 0.8978 - val_loss: 1.0478 - val_acc: 0.8110\n",
      "Epoch 312/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2710 - acc: 0.9141 - val_loss: 0.9910 - val_acc: 0.8049\n",
      "Epoch 313/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2475 - acc: 0.9141 - val_loss: 1.2559 - val_acc: 0.7439\n",
      "Epoch 314/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2801 - acc: 0.8957 - val_loss: 1.2466 - val_acc: 0.8171\n",
      "Epoch 315/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.3313 - acc: 0.8998 - val_loss: 0.9221 - val_acc: 0.8232\n",
      "Epoch 316/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3113 - acc: 0.8978 - val_loss: 0.9649 - val_acc: 0.8354\n",
      "Epoch 317/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3825 - acc: 0.8896 - val_loss: 0.9665 - val_acc: 0.7988\n",
      "Epoch 318/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2926 - acc: 0.8998 - val_loss: 1.0036 - val_acc: 0.8415\n",
      "Epoch 319/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2816 - acc: 0.9100 - val_loss: 1.0748 - val_acc: 0.8354\n",
      "Epoch 320/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3266 - acc: 0.9018 - val_loss: 0.9745 - val_acc: 0.7927\n",
      "Epoch 321/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2680 - acc: 0.9243 - val_loss: 1.1553 - val_acc: 0.7988\n",
      "Epoch 322/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.3312 - acc: 0.9059 - val_loss: 1.1260 - val_acc: 0.7561\n",
      "Epoch 323/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.4821 - acc: 0.8691 - val_loss: 1.0347 - val_acc: 0.8049\n",
      "Epoch 324/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.3268 - acc: 0.8712 - val_loss: 1.0299 - val_acc: 0.7561\n",
      "Epoch 325/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.3326 - acc: 0.8916 - val_loss: 0.8791 - val_acc: 0.8415\n",
      "Epoch 326/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.3034 - acc: 0.8957 - val_loss: 1.0199 - val_acc: 0.8110\n",
      "Epoch 327/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2791 - acc: 0.9243 - val_loss: 0.8994 - val_acc: 0.8598\n",
      "Epoch 328/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.2791 - acc: 0.9223 - val_loss: 1.0186 - val_acc: 0.8293\n",
      "Epoch 329/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2757 - acc: 0.9223 - val_loss: 0.8762 - val_acc: 0.8415\n",
      "Epoch 330/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2843 - acc: 0.9141 - val_loss: 0.9321 - val_acc: 0.8171\n",
      "Epoch 331/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.2993 - acc: 0.9059 - val_loss: 1.0826 - val_acc: 0.8171\n",
      "Epoch 332/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2675 - acc: 0.9162 - val_loss: 0.9402 - val_acc: 0.8415\n",
      "Epoch 333/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.2767 - acc: 0.9182 - val_loss: 0.9574 - val_acc: 0.8476\n",
      "Epoch 334/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2602 - acc: 0.9264 - val_loss: 0.9990 - val_acc: 0.8110\n",
      "Epoch 335/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2498 - acc: 0.9284 - val_loss: 1.0180 - val_acc: 0.8049\n",
      "Epoch 336/1000\n",
      "489/489 [==============================] - 0s 130us/step - loss: 0.2563 - acc: 0.9243 - val_loss: 1.0796 - val_acc: 0.7866\n",
      "Epoch 337/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.3214 - acc: 0.9141 - val_loss: 1.0689 - val_acc: 0.7927\n",
      "Epoch 338/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3028 - acc: 0.8937 - val_loss: 0.9848 - val_acc: 0.8110\n",
      "Epoch 339/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3850 - acc: 0.8793 - val_loss: 1.0394 - val_acc: 0.8293\n",
      "Epoch 340/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.6141 - acc: 0.8609 - val_loss: 1.3469 - val_acc: 0.7317\n",
      "Epoch 341/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.6568 - acc: 0.8609 - val_loss: 1.1257 - val_acc: 0.7439\n",
      "Epoch 342/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.5155 - acc: 0.8937 - val_loss: 1.3222 - val_acc: 0.7439\n",
      "Epoch 343/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.7631 - acc: 0.8405 - val_loss: 1.3317 - val_acc: 0.7378\n",
      "Epoch 344/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.4380 - acc: 0.8589 - val_loss: 1.0085 - val_acc: 0.7927\n",
      "Epoch 345/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.4773 - acc: 0.8671 - val_loss: 1.0544 - val_acc: 0.7744\n",
      "Epoch 346/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3886 - acc: 0.8896 - val_loss: 0.9330 - val_acc: 0.7805\n",
      "Epoch 347/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3027 - acc: 0.9182 - val_loss: 0.9970 - val_acc: 0.7805\n",
      "Epoch 348/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.5799 - acc: 0.8732 - val_loss: 1.4101 - val_acc: 0.7317\n",
      "Epoch 349/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.6410 - acc: 0.8793 - val_loss: 1.1338 - val_acc: 0.7500\n",
      "Epoch 350/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.4827 - acc: 0.8773 - val_loss: 0.9791 - val_acc: 0.8049\n",
      "Epoch 351/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.3138 - acc: 0.8998 - val_loss: 0.9240 - val_acc: 0.8049\n",
      "Epoch 352/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2915 - acc: 0.9039 - val_loss: 1.0794 - val_acc: 0.7927\n",
      "Epoch 353/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2732 - acc: 0.9162 - val_loss: 0.9990 - val_acc: 0.7805\n",
      "Epoch 354/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2518 - acc: 0.9141 - val_loss: 1.0394 - val_acc: 0.7927\n",
      "Epoch 355/1000\n",
      "489/489 [==============================] - 0s 130us/step - loss: 0.2564 - acc: 0.9202 - val_loss: 0.9192 - val_acc: 0.7988\n",
      "Epoch 356/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 133us/step - loss: 0.2309 - acc: 0.9243 - val_loss: 0.9243 - val_acc: 0.8293\n",
      "Epoch 357/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.2249 - acc: 0.9448 - val_loss: 0.9033 - val_acc: 0.8049\n",
      "Epoch 358/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2156 - acc: 0.9366 - val_loss: 0.9094 - val_acc: 0.8354\n",
      "Epoch 359/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2218 - acc: 0.9202 - val_loss: 1.0145 - val_acc: 0.7805\n",
      "Epoch 360/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.2328 - acc: 0.9141 - val_loss: 0.9182 - val_acc: 0.8537\n",
      "Epoch 361/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.2278 - acc: 0.9284 - val_loss: 0.8824 - val_acc: 0.8049\n",
      "Epoch 362/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.2186 - acc: 0.9223 - val_loss: 0.9155 - val_acc: 0.8354\n",
      "Epoch 363/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2263 - acc: 0.9243 - val_loss: 0.9080 - val_acc: 0.8293\n",
      "Epoch 364/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2072 - acc: 0.9162 - val_loss: 0.8261 - val_acc: 0.8171\n",
      "Epoch 365/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2743 - acc: 0.9080 - val_loss: 1.2226 - val_acc: 0.7866\n",
      "Epoch 366/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3923 - acc: 0.8998 - val_loss: 0.9045 - val_acc: 0.8232\n",
      "Epoch 367/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2067 - acc: 0.9427 - val_loss: 0.9420 - val_acc: 0.8171\n",
      "Epoch 368/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.1971 - acc: 0.9427 - val_loss: 0.9481 - val_acc: 0.8293\n",
      "Epoch 369/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2705 - acc: 0.9018 - val_loss: 1.0439 - val_acc: 0.7927\n",
      "Epoch 370/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.3150 - acc: 0.8589 - val_loss: 0.8747 - val_acc: 0.8354\n",
      "Epoch 371/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2348 - acc: 0.9202 - val_loss: 0.9593 - val_acc: 0.8110\n",
      "Epoch 372/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2638 - acc: 0.9182 - val_loss: 0.9612 - val_acc: 0.8171\n",
      "Epoch 373/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2409 - acc: 0.9080 - val_loss: 0.8546 - val_acc: 0.8476\n",
      "Epoch 374/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1956 - acc: 0.9325 - val_loss: 1.1008 - val_acc: 0.8049\n",
      "Epoch 375/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2629 - acc: 0.9243 - val_loss: 1.0227 - val_acc: 0.7805\n",
      "Epoch 376/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3201 - acc: 0.8998 - val_loss: 0.9419 - val_acc: 0.7988\n",
      "Epoch 377/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.2139 - acc: 0.9366 - val_loss: 0.9065 - val_acc: 0.8232\n",
      "Epoch 378/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2876 - acc: 0.8978 - val_loss: 0.8723 - val_acc: 0.8354\n",
      "Epoch 379/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2266 - acc: 0.9305 - val_loss: 0.8152 - val_acc: 0.8415\n",
      "Epoch 380/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.2149 - acc: 0.9325 - val_loss: 1.1105 - val_acc: 0.8171\n",
      "Epoch 381/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2045 - acc: 0.9407 - val_loss: 0.9750 - val_acc: 0.7988\n",
      "Epoch 382/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2060 - acc: 0.9284 - val_loss: 0.9779 - val_acc: 0.8415\n",
      "Epoch 383/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2178 - acc: 0.9366 - val_loss: 0.9058 - val_acc: 0.8415\n",
      "Epoch 384/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.2053 - acc: 0.9407 - val_loss: 0.9691 - val_acc: 0.8232\n",
      "Epoch 385/1000\n",
      "489/489 [==============================] - 0s 130us/step - loss: 0.2053 - acc: 0.9325 - val_loss: 1.0297 - val_acc: 0.8171\n",
      "Epoch 386/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.2049 - acc: 0.9325 - val_loss: 0.8580 - val_acc: 0.8598\n",
      "Epoch 387/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2016 - acc: 0.9427 - val_loss: 0.9672 - val_acc: 0.8354\n",
      "Epoch 388/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2685 - acc: 0.9141 - val_loss: 1.1758 - val_acc: 0.7622\n",
      "Epoch 389/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2212 - acc: 0.9284 - val_loss: 0.9794 - val_acc: 0.7988\n",
      "Epoch 390/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1903 - acc: 0.9325 - val_loss: 0.9611 - val_acc: 0.8110\n",
      "Epoch 391/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1923 - acc: 0.9366 - val_loss: 0.7891 - val_acc: 0.8293\n",
      "Epoch 392/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1880 - acc: 0.9346 - val_loss: 1.0239 - val_acc: 0.8293\n",
      "Epoch 393/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1830 - acc: 0.9407 - val_loss: 0.8252 - val_acc: 0.8232\n",
      "Epoch 394/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.1749 - acc: 0.9448 - val_loss: 0.9068 - val_acc: 0.8293\n",
      "Epoch 395/1000\n",
      "489/489 [==============================] - 0s 132us/step - loss: 0.1886 - acc: 0.9366 - val_loss: 0.8525 - val_acc: 0.8232\n",
      "Epoch 396/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.1646 - acc: 0.9509 - val_loss: 1.0319 - val_acc: 0.8171\n",
      "Epoch 397/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.1868 - acc: 0.9325 - val_loss: 0.9157 - val_acc: 0.8293\n",
      "Epoch 398/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.1895 - acc: 0.9407 - val_loss: 0.8851 - val_acc: 0.8232\n",
      "Epoch 399/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2545 - acc: 0.9080 - val_loss: 1.0447 - val_acc: 0.7927\n",
      "Epoch 400/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2081 - acc: 0.9387 - val_loss: 0.9642 - val_acc: 0.8171\n",
      "Epoch 401/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2303 - acc: 0.9182 - val_loss: 1.1594 - val_acc: 0.7927\n",
      "Epoch 402/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1807 - acc: 0.9509 - val_loss: 0.9481 - val_acc: 0.8110\n",
      "Epoch 403/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2012 - acc: 0.9305 - val_loss: 0.8240 - val_acc: 0.8354\n",
      "Epoch 404/1000\n",
      "489/489 [==============================] - 0s 130us/step - loss: 0.2353 - acc: 0.9182 - val_loss: 1.1729 - val_acc: 0.7988\n",
      "Epoch 405/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2356 - acc: 0.9141 - val_loss: 0.8018 - val_acc: 0.8110\n",
      "Epoch 406/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.2172 - acc: 0.9264 - val_loss: 1.1886 - val_acc: 0.7744\n",
      "Epoch 407/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2013 - acc: 0.9284 - val_loss: 1.1856 - val_acc: 0.7622\n",
      "Epoch 408/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.3073 - acc: 0.8937 - val_loss: 0.9770 - val_acc: 0.8049\n",
      "Epoch 409/1000\n",
      "489/489 [==============================] - 0s 130us/step - loss: 0.2520 - acc: 0.9100 - val_loss: 0.7151 - val_acc: 0.8598\n",
      "Epoch 410/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.2450 - acc: 0.9243 - val_loss: 1.0013 - val_acc: 0.7927\n",
      "Epoch 411/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2139 - acc: 0.9305 - val_loss: 1.0286 - val_acc: 0.8171\n",
      "Epoch 412/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2072 - acc: 0.9325 - val_loss: 0.9244 - val_acc: 0.8415\n",
      "Epoch 413/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.1820 - acc: 0.9366 - val_loss: 0.9289 - val_acc: 0.8232\n",
      "Epoch 414/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.2272 - acc: 0.9243 - val_loss: 0.9851 - val_acc: 0.8232\n",
      "Epoch 415/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 145us/step - loss: 0.1927 - acc: 0.9346 - val_loss: 0.9413 - val_acc: 0.8232\n",
      "Epoch 416/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1878 - acc: 0.9387 - val_loss: 0.9261 - val_acc: 0.8293\n",
      "Epoch 417/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1917 - acc: 0.9407 - val_loss: 0.9276 - val_acc: 0.8171\n",
      "Epoch 418/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.1850 - acc: 0.9387 - val_loss: 0.9927 - val_acc: 0.8232\n",
      "Epoch 419/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.1722 - acc: 0.9530 - val_loss: 1.1922 - val_acc: 0.8171\n",
      "Epoch 420/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.1870 - acc: 0.9305 - val_loss: 1.0050 - val_acc: 0.7744\n",
      "Epoch 421/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2096 - acc: 0.9305 - val_loss: 1.0418 - val_acc: 0.7866\n",
      "Epoch 422/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.2136 - acc: 0.9223 - val_loss: 0.9178 - val_acc: 0.8110\n",
      "Epoch 423/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.1775 - acc: 0.9427 - val_loss: 1.0834 - val_acc: 0.8110\n",
      "Epoch 424/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.1575 - acc: 0.9550 - val_loss: 1.0133 - val_acc: 0.8232\n",
      "Epoch 425/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.1921 - acc: 0.9407 - val_loss: 0.8162 - val_acc: 0.8476\n",
      "Epoch 426/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1703 - acc: 0.9346 - val_loss: 0.9715 - val_acc: 0.8110\n",
      "Epoch 427/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.1873 - acc: 0.9346 - val_loss: 0.9453 - val_acc: 0.8049\n",
      "Epoch 428/1000\n",
      "489/489 [==============================] - 0s 118us/step - loss: 0.1660 - acc: 0.9509 - val_loss: 0.8323 - val_acc: 0.8171\n",
      "Epoch 429/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1966 - acc: 0.9407 - val_loss: 0.9813 - val_acc: 0.8171\n",
      "Epoch 430/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2057 - acc: 0.9243 - val_loss: 1.0475 - val_acc: 0.7866\n",
      "Epoch 431/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.1906 - acc: 0.9346 - val_loss: 0.9929 - val_acc: 0.8171\n",
      "Epoch 432/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.3080 - acc: 0.9039 - val_loss: 1.0643 - val_acc: 0.7927\n",
      "Epoch 433/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2839 - acc: 0.9162 - val_loss: 0.9469 - val_acc: 0.8293\n",
      "Epoch 434/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.3403 - acc: 0.9141 - val_loss: 1.1138 - val_acc: 0.7683\n",
      "Epoch 435/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.5668 - acc: 0.8875 - val_loss: 1.1616 - val_acc: 0.7317\n",
      "Epoch 436/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.4192 - acc: 0.8303 - val_loss: 1.1503 - val_acc: 0.7622\n",
      "Epoch 437/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.3497 - acc: 0.8589 - val_loss: 1.4776 - val_acc: 0.6646\n",
      "Epoch 438/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.3931 - acc: 0.8160 - val_loss: 1.1462 - val_acc: 0.6585\n",
      "Epoch 439/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.8352 - acc: 0.8466 - val_loss: 1.1125 - val_acc: 0.7256\n",
      "Epoch 440/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.5288 - acc: 0.7587 - val_loss: 0.9320 - val_acc: 0.7866\n",
      "Epoch 441/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.3084 - acc: 0.8405 - val_loss: 0.9078 - val_acc: 0.7256\n",
      "Epoch 442/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2420 - acc: 0.8814 - val_loss: 0.8594 - val_acc: 0.7683\n",
      "Epoch 443/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.3138 - acc: 0.8241 - val_loss: 0.9540 - val_acc: 0.7012\n",
      "Epoch 444/1000\n",
      "489/489 [==============================] - 0s 130us/step - loss: 0.3185 - acc: 0.8262 - val_loss: 0.9794 - val_acc: 0.7683\n",
      "Epoch 445/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3701 - acc: 0.8793 - val_loss: 1.0460 - val_acc: 0.7805\n",
      "Epoch 446/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2577 - acc: 0.9100 - val_loss: 0.9541 - val_acc: 0.7988\n",
      "Epoch 447/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2362 - acc: 0.9039 - val_loss: 1.0559 - val_acc: 0.7500\n",
      "Epoch 448/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.1871 - acc: 0.9182 - val_loss: 0.9834 - val_acc: 0.7500\n",
      "Epoch 449/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2203 - acc: 0.8814 - val_loss: 1.0894 - val_acc: 0.6768\n",
      "Epoch 450/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2520 - acc: 0.8487 - val_loss: 1.0309 - val_acc: 0.7561\n",
      "Epoch 451/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2389 - acc: 0.8691 - val_loss: 0.9727 - val_acc: 0.7012\n",
      "Epoch 452/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2291 - acc: 0.8630 - val_loss: 1.0366 - val_acc: 0.7134\n",
      "Epoch 453/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1714 - acc: 0.9223 - val_loss: 1.0316 - val_acc: 0.7683\n",
      "Epoch 454/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1721 - acc: 0.9407 - val_loss: 0.9966 - val_acc: 0.7927\n",
      "Epoch 455/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1925 - acc: 0.8978 - val_loss: 1.0382 - val_acc: 0.7561\n",
      "Epoch 456/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.2358 - acc: 0.9018 - val_loss: 0.9079 - val_acc: 0.7256\n",
      "Epoch 457/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2837 - acc: 0.8528 - val_loss: 0.9645 - val_acc: 0.7439\n",
      "Epoch 458/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2421 - acc: 0.8691 - val_loss: 0.8559 - val_acc: 0.7805\n",
      "Epoch 459/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2260 - acc: 0.9141 - val_loss: 1.0094 - val_acc: 0.7744\n",
      "Epoch 460/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.1712 - acc: 0.9223 - val_loss: 1.0473 - val_acc: 0.7500\n",
      "Epoch 461/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1875 - acc: 0.9346 - val_loss: 0.8608 - val_acc: 0.7622\n",
      "Epoch 462/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2453 - acc: 0.8405 - val_loss: 0.9988 - val_acc: 0.7195\n",
      "Epoch 463/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2179 - acc: 0.8630 - val_loss: 1.0374 - val_acc: 0.6951\n",
      "Epoch 464/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.1872 - acc: 0.9100 - val_loss: 1.1980 - val_acc: 0.7317\n",
      "Epoch 465/1000\n",
      "489/489 [==============================] - 0s 118us/step - loss: 0.1897 - acc: 0.9100 - val_loss: 0.9983 - val_acc: 0.7744\n",
      "Epoch 466/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.1603 - acc: 0.9407 - val_loss: 0.9140 - val_acc: 0.7805\n",
      "Epoch 467/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.1568 - acc: 0.9284 - val_loss: 0.9167 - val_acc: 0.7622\n",
      "Epoch 468/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1571 - acc: 0.9202 - val_loss: 0.8776 - val_acc: 0.7866\n",
      "Epoch 469/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.1697 - acc: 0.9202 - val_loss: 0.8761 - val_acc: 0.8049\n",
      "Epoch 470/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1382 - acc: 0.9407 - val_loss: 0.9292 - val_acc: 0.8049\n",
      "Epoch 471/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.1874 - acc: 0.9141 - val_loss: 0.9634 - val_acc: 0.8049\n",
      "Epoch 472/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2301 - acc: 0.9018 - val_loss: 0.9638 - val_acc: 0.7561\n",
      "Epoch 473/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.1586 - acc: 0.9427 - val_loss: 1.0257 - val_acc: 0.8110\n",
      "Epoch 474/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 131us/step - loss: 0.1686 - acc: 0.9366 - val_loss: 0.9789 - val_acc: 0.8049\n",
      "Epoch 475/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.1300 - acc: 0.9448 - val_loss: 1.0076 - val_acc: 0.7744\n",
      "Epoch 476/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1346 - acc: 0.9346 - val_loss: 0.9733 - val_acc: 0.8049\n",
      "Epoch 477/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1829 - acc: 0.9100 - val_loss: 0.9948 - val_acc: 0.8110\n",
      "Epoch 478/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1841 - acc: 0.9366 - val_loss: 1.0103 - val_acc: 0.8110\n",
      "Epoch 479/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.1708 - acc: 0.9387 - val_loss: 0.9747 - val_acc: 0.7805\n",
      "Epoch 480/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.1503 - acc: 0.9366 - val_loss: 0.9534 - val_acc: 0.8110\n",
      "Epoch 481/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.2850 - acc: 0.9162 - val_loss: 0.9043 - val_acc: 0.8171\n",
      "Epoch 482/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2283 - acc: 0.9141 - val_loss: 0.8563 - val_acc: 0.8232\n",
      "Epoch 483/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1427 - acc: 0.9387 - val_loss: 1.0259 - val_acc: 0.7683\n",
      "Epoch 484/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.1264 - acc: 0.9407 - val_loss: 0.9152 - val_acc: 0.8110\n",
      "Epoch 485/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.1265 - acc: 0.9489 - val_loss: 0.9685 - val_acc: 0.8171\n",
      "Epoch 486/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1403 - acc: 0.9468 - val_loss: 1.2049 - val_acc: 0.7256\n",
      "Epoch 487/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3997 - acc: 0.8978 - val_loss: 0.8726 - val_acc: 0.8110\n",
      "Epoch 488/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.1512 - acc: 0.9387 - val_loss: 0.9120 - val_acc: 0.8354\n",
      "Epoch 489/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.1389 - acc: 0.9387 - val_loss: 0.9919 - val_acc: 0.7866\n",
      "Epoch 490/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.1588 - acc: 0.9305 - val_loss: 0.8993 - val_acc: 0.8354\n",
      "Epoch 491/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.1250 - acc: 0.9427 - val_loss: 0.9408 - val_acc: 0.8232\n",
      "Epoch 492/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1470 - acc: 0.9407 - val_loss: 0.9382 - val_acc: 0.8232\n",
      "Epoch 493/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.1841 - acc: 0.9468 - val_loss: 1.0896 - val_acc: 0.7988\n",
      "Epoch 494/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.1520 - acc: 0.9489 - val_loss: 1.0291 - val_acc: 0.7805\n",
      "Epoch 495/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.1548 - acc: 0.9305 - val_loss: 0.8586 - val_acc: 0.7927\n",
      "Epoch 496/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.1205 - acc: 0.9489 - val_loss: 1.0215 - val_acc: 0.8049\n",
      "Epoch 497/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.1196 - acc: 0.9509 - val_loss: 0.8843 - val_acc: 0.8232\n",
      "Epoch 498/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1198 - acc: 0.9448 - val_loss: 0.9779 - val_acc: 0.7805\n",
      "Epoch 499/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1261 - acc: 0.9530 - val_loss: 0.9790 - val_acc: 0.8110\n",
      "Epoch 500/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1463 - acc: 0.9448 - val_loss: 1.0788 - val_acc: 0.7683\n",
      "Epoch 501/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.1427 - acc: 0.9325 - val_loss: 1.0241 - val_acc: 0.8110\n",
      "Epoch 502/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1272 - acc: 0.9407 - val_loss: 0.9697 - val_acc: 0.8049\n",
      "Epoch 503/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.1249 - acc: 0.9427 - val_loss: 1.0860 - val_acc: 0.7744\n",
      "Epoch 504/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1454 - acc: 0.9387 - val_loss: 1.1268 - val_acc: 0.7622\n",
      "Epoch 505/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.1771 - acc: 0.9325 - val_loss: 1.0855 - val_acc: 0.7683\n",
      "Epoch 506/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.1799 - acc: 0.9243 - val_loss: 0.8992 - val_acc: 0.7988\n",
      "Epoch 507/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.1476 - acc: 0.9407 - val_loss: 1.1338 - val_acc: 0.8110\n",
      "Epoch 508/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.1496 - acc: 0.9243 - val_loss: 0.8903 - val_acc: 0.7988\n",
      "Epoch 509/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.1422 - acc: 0.9346 - val_loss: 0.8097 - val_acc: 0.8232\n",
      "Epoch 510/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.1555 - acc: 0.9284 - val_loss: 0.8613 - val_acc: 0.8110\n",
      "Epoch 511/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.1453 - acc: 0.9366 - val_loss: 1.0916 - val_acc: 0.7866\n",
      "Epoch 512/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.1612 - acc: 0.9489 - val_loss: 0.9800 - val_acc: 0.7744\n",
      "Epoch 513/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.1555 - acc: 0.9366 - val_loss: 0.8702 - val_acc: 0.8110\n",
      "Epoch 514/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.1249 - acc: 0.9387 - val_loss: 1.1730 - val_acc: 0.7744\n",
      "Epoch 515/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.1886 - acc: 0.9264 - val_loss: 1.0360 - val_acc: 0.8049\n",
      "Epoch 516/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.1656 - acc: 0.9468 - val_loss: 1.0423 - val_acc: 0.7927\n",
      "Epoch 517/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.1120 - acc: 0.9611 - val_loss: 1.0481 - val_acc: 0.7805\n",
      "Epoch 518/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.0960 - acc: 0.9632 - val_loss: 1.0632 - val_acc: 0.7744\n",
      "Epoch 519/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0892 - acc: 0.9652 - val_loss: 1.0518 - val_acc: 0.8171\n",
      "Epoch 520/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.0911 - acc: 0.9611 - val_loss: 1.1635 - val_acc: 0.7744\n",
      "Epoch 521/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.1876 - acc: 0.9468 - val_loss: 1.3669 - val_acc: 0.7500\n",
      "Epoch 522/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3337 - acc: 0.9387 - val_loss: 1.3557 - val_acc: 0.7195\n",
      "Epoch 523/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2801 - acc: 0.9243 - val_loss: 1.1138 - val_acc: 0.7744\n",
      "Epoch 524/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.1323 - acc: 0.9550 - val_loss: 1.2138 - val_acc: 0.7683\n",
      "Epoch 525/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.1357 - acc: 0.9611 - val_loss: 1.0894 - val_acc: 0.7805\n",
      "Epoch 526/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.1166 - acc: 0.9571 - val_loss: 1.1990 - val_acc: 0.7500\n",
      "Epoch 527/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.1104 - acc: 0.9530 - val_loss: 1.2463 - val_acc: 0.7683\n",
      "Epoch 528/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.1352 - acc: 0.9468 - val_loss: 1.1118 - val_acc: 0.7622\n",
      "Epoch 529/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.1123 - acc: 0.9652 - val_loss: 0.9665 - val_acc: 0.7744\n",
      "Epoch 530/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.1113 - acc: 0.9571 - val_loss: 0.9695 - val_acc: 0.7988\n",
      "Epoch 531/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.1057 - acc: 0.9611 - val_loss: 1.0249 - val_acc: 0.7805\n",
      "Epoch 532/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.1071 - acc: 0.9571 - val_loss: 0.9609 - val_acc: 0.7988\n",
      "Epoch 533/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 131us/step - loss: 0.1403 - acc: 0.9530 - val_loss: 1.0700 - val_acc: 0.7988\n",
      "Epoch 534/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0919 - acc: 0.9673 - val_loss: 1.0275 - val_acc: 0.7683\n",
      "Epoch 535/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0846 - acc: 0.9652 - val_loss: 1.0515 - val_acc: 0.8049\n",
      "Epoch 536/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.1087 - acc: 0.9571 - val_loss: 1.0872 - val_acc: 0.7561\n",
      "Epoch 537/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.1227 - acc: 0.9591 - val_loss: 0.9718 - val_acc: 0.7805\n",
      "Epoch 538/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.1198 - acc: 0.9550 - val_loss: 0.9526 - val_acc: 0.7744\n",
      "Epoch 539/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0807 - acc: 0.9714 - val_loss: 1.0219 - val_acc: 0.7927\n",
      "Epoch 540/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0875 - acc: 0.9673 - val_loss: 1.0408 - val_acc: 0.7927\n",
      "Epoch 541/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.1254 - acc: 0.9489 - val_loss: 1.0922 - val_acc: 0.7561\n",
      "Epoch 542/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.1203 - acc: 0.9550 - val_loss: 0.9759 - val_acc: 0.8049\n",
      "Epoch 543/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.1147 - acc: 0.9530 - val_loss: 0.9669 - val_acc: 0.7683\n",
      "Epoch 544/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.1271 - acc: 0.9427 - val_loss: 0.9267 - val_acc: 0.7683\n",
      "Epoch 545/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.1065 - acc: 0.9509 - val_loss: 0.9359 - val_acc: 0.7988\n",
      "Epoch 546/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0869 - acc: 0.9632 - val_loss: 1.0258 - val_acc: 0.7805\n",
      "Epoch 547/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1307 - acc: 0.9530 - val_loss: 1.0198 - val_acc: 0.7866\n",
      "Epoch 548/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.1139 - acc: 0.9571 - val_loss: 0.9986 - val_acc: 0.7866\n",
      "Epoch 549/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0828 - acc: 0.9632 - val_loss: 0.9483 - val_acc: 0.8171\n",
      "Epoch 550/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1496 - acc: 0.9468 - val_loss: 1.0483 - val_acc: 0.7805\n",
      "Epoch 551/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0800 - acc: 0.9632 - val_loss: 1.0976 - val_acc: 0.7927\n",
      "Epoch 552/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0679 - acc: 0.9734 - val_loss: 1.0710 - val_acc: 0.8171\n",
      "Epoch 553/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.0710 - acc: 0.9796 - val_loss: 1.0247 - val_acc: 0.8110\n",
      "Epoch 554/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0745 - acc: 0.9857 - val_loss: 1.0528 - val_acc: 0.8171\n",
      "Epoch 555/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0668 - acc: 0.9775 - val_loss: 1.0055 - val_acc: 0.8110\n",
      "Epoch 556/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0731 - acc: 0.9755 - val_loss: 1.0068 - val_acc: 0.8110\n",
      "Epoch 557/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0628 - acc: 0.9734 - val_loss: 1.1124 - val_acc: 0.8110\n",
      "Epoch 558/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.0821 - acc: 0.9734 - val_loss: 1.1212 - val_acc: 0.7927\n",
      "Epoch 559/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.0609 - acc: 0.9775 - val_loss: 1.1691 - val_acc: 0.7683\n",
      "Epoch 560/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.0968 - acc: 0.9632 - val_loss: 1.0952 - val_acc: 0.7866\n",
      "Epoch 561/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1103 - acc: 0.9509 - val_loss: 0.9870 - val_acc: 0.7805\n",
      "Epoch 562/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0908 - acc: 0.9673 - val_loss: 1.0478 - val_acc: 0.7927\n",
      "Epoch 563/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0878 - acc: 0.9693 - val_loss: 1.1981 - val_acc: 0.7622\n",
      "Epoch 564/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1302 - acc: 0.9509 - val_loss: 1.0166 - val_acc: 0.7683\n",
      "Epoch 565/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0943 - acc: 0.9611 - val_loss: 0.9703 - val_acc: 0.7866\n",
      "Epoch 566/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0844 - acc: 0.9673 - val_loss: 1.0111 - val_acc: 0.7927\n",
      "Epoch 567/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.1286 - acc: 0.9611 - val_loss: 1.2981 - val_acc: 0.7683\n",
      "Epoch 568/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1001 - acc: 0.9632 - val_loss: 1.2469 - val_acc: 0.7683\n",
      "Epoch 569/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.0826 - acc: 0.9632 - val_loss: 0.9362 - val_acc: 0.7927\n",
      "Epoch 570/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.0938 - acc: 0.9734 - val_loss: 0.9834 - val_acc: 0.8049\n",
      "Epoch 571/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0654 - acc: 0.9836 - val_loss: 1.0425 - val_acc: 0.7866\n",
      "Epoch 572/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0796 - acc: 0.9673 - val_loss: 1.0687 - val_acc: 0.7744\n",
      "Epoch 573/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0906 - acc: 0.9652 - val_loss: 1.0080 - val_acc: 0.7988\n",
      "Epoch 574/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.1088 - acc: 0.9591 - val_loss: 1.1063 - val_acc: 0.7561\n",
      "Epoch 575/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.0760 - acc: 0.9673 - val_loss: 1.0699 - val_acc: 0.7988\n",
      "Epoch 576/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.0738 - acc: 0.9714 - val_loss: 1.2188 - val_acc: 0.7500\n",
      "Epoch 577/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0936 - acc: 0.9550 - val_loss: 1.1617 - val_acc: 0.7866\n",
      "Epoch 578/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0930 - acc: 0.9632 - val_loss: 1.0222 - val_acc: 0.7988\n",
      "Epoch 579/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.0548 - acc: 0.9796 - val_loss: 1.1053 - val_acc: 0.7988\n",
      "Epoch 580/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0559 - acc: 0.9857 - val_loss: 1.1457 - val_acc: 0.7622\n",
      "Epoch 581/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0536 - acc: 0.9836 - val_loss: 1.0612 - val_acc: 0.8049\n",
      "Epoch 582/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.0667 - acc: 0.9775 - val_loss: 1.1892 - val_acc: 0.7683\n",
      "Epoch 583/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.0543 - acc: 0.9816 - val_loss: 1.0719 - val_acc: 0.7988\n",
      "Epoch 584/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1685 - acc: 0.9714 - val_loss: 1.1975 - val_acc: 0.7683\n",
      "Epoch 585/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0896 - acc: 0.9734 - val_loss: 1.0795 - val_acc: 0.7988\n",
      "Epoch 586/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0670 - acc: 0.9734 - val_loss: 1.0877 - val_acc: 0.7988\n",
      "Epoch 587/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.0705 - acc: 0.9734 - val_loss: 1.0210 - val_acc: 0.7927\n",
      "Epoch 588/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0961 - acc: 0.9591 - val_loss: 0.9355 - val_acc: 0.8110\n",
      "Epoch 589/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0947 - acc: 0.9632 - val_loss: 0.9725 - val_acc: 0.7927\n",
      "Epoch 590/1000\n",
      "489/489 [==============================] - 0s 130us/step - loss: 0.1822 - acc: 0.9387 - val_loss: 0.9234 - val_acc: 0.7927\n",
      "Epoch 591/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.1746 - acc: 0.9427 - val_loss: 1.0509 - val_acc: 0.7622\n",
      "Epoch 592/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 147us/step - loss: 0.1635 - acc: 0.9387 - val_loss: 0.9466 - val_acc: 0.8110\n",
      "Epoch 593/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1513 - acc: 0.9652 - val_loss: 1.2271 - val_acc: 0.7927\n",
      "Epoch 594/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.1352 - acc: 0.9611 - val_loss: 1.1286 - val_acc: 0.7805\n",
      "Epoch 595/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2008 - acc: 0.9591 - val_loss: 1.3076 - val_acc: 0.7378\n",
      "Epoch 596/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.1601 - acc: 0.9509 - val_loss: 1.2187 - val_acc: 0.7378\n",
      "Epoch 597/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.1281 - acc: 0.9366 - val_loss: 1.1551 - val_acc: 0.7500\n",
      "Epoch 598/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.1011 - acc: 0.9489 - val_loss: 1.1428 - val_acc: 0.7927\n",
      "Epoch 599/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0818 - acc: 0.9714 - val_loss: 1.0187 - val_acc: 0.8476\n",
      "Epoch 600/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0896 - acc: 0.9693 - val_loss: 1.0096 - val_acc: 0.8110\n",
      "Epoch 601/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.0888 - acc: 0.9714 - val_loss: 0.9494 - val_acc: 0.8049\n",
      "Epoch 602/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.1100 - acc: 0.9693 - val_loss: 1.2659 - val_acc: 0.7683\n",
      "Epoch 603/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.0798 - acc: 0.9693 - val_loss: 1.1657 - val_acc: 0.7866\n",
      "Epoch 604/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.0658 - acc: 0.9734 - val_loss: 1.0310 - val_acc: 0.8110\n",
      "Epoch 605/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.0802 - acc: 0.9714 - val_loss: 1.0307 - val_acc: 0.8110\n",
      "Epoch 606/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.0645 - acc: 0.9734 - val_loss: 1.0307 - val_acc: 0.8049\n",
      "Epoch 607/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.0748 - acc: 0.9714 - val_loss: 0.9869 - val_acc: 0.8171\n",
      "Epoch 608/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.0794 - acc: 0.9796 - val_loss: 1.0371 - val_acc: 0.8293\n",
      "Epoch 609/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0565 - acc: 0.9775 - val_loss: 1.0422 - val_acc: 0.8171\n",
      "Epoch 610/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.5504 - acc: 0.9366 - val_loss: 1.6544 - val_acc: 0.6768\n",
      "Epoch 611/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.8034 - acc: 0.9121 - val_loss: 1.6562 - val_acc: 0.7805\n",
      "Epoch 612/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.5630 - acc: 0.9100 - val_loss: 1.4508 - val_acc: 0.7744\n",
      "Epoch 613/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.5340 - acc: 0.9182 - val_loss: 1.1321 - val_acc: 0.8171\n",
      "Epoch 614/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.7224 - acc: 0.8773 - val_loss: 2.3227 - val_acc: 0.6890\n",
      "Epoch 615/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.6289 - acc: 0.9121 - val_loss: 1.3402 - val_acc: 0.7988\n",
      "Epoch 616/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.4063 - acc: 0.9305 - val_loss: 1.0626 - val_acc: 0.8110\n",
      "Epoch 617/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.4363 - acc: 0.9305 - val_loss: 1.5026 - val_acc: 0.7500\n",
      "Epoch 618/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.5760 - acc: 0.9346 - val_loss: 1.5691 - val_acc: 0.7378\n",
      "Epoch 619/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.4181 - acc: 0.9427 - val_loss: 1.1692 - val_acc: 0.7683\n",
      "Epoch 620/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2992 - acc: 0.9509 - val_loss: 1.5581 - val_acc: 0.7073\n",
      "Epoch 621/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.3900 - acc: 0.9652 - val_loss: 1.0061 - val_acc: 0.7927\n",
      "Epoch 622/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3835 - acc: 0.9427 - val_loss: 1.3695 - val_acc: 0.7378\n",
      "Epoch 623/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3247 - acc: 0.9509 - val_loss: 1.1459 - val_acc: 0.7622\n",
      "Epoch 624/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.2883 - acc: 0.9591 - val_loss: 1.3752 - val_acc: 0.6951\n",
      "Epoch 625/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3583 - acc: 0.9427 - val_loss: 0.9232 - val_acc: 0.7927\n",
      "Epoch 626/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.1871 - acc: 0.9714 - val_loss: 0.9112 - val_acc: 0.8110\n",
      "Epoch 627/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.2286 - acc: 0.9693 - val_loss: 1.0860 - val_acc: 0.7866\n",
      "Epoch 628/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1477 - acc: 0.9755 - val_loss: 1.0647 - val_acc: 0.7805\n",
      "Epoch 629/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1544 - acc: 0.9775 - val_loss: 1.1993 - val_acc: 0.7683\n",
      "Epoch 630/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.3234 - acc: 0.9509 - val_loss: 1.3156 - val_acc: 0.7805\n",
      "Epoch 631/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.5713 - acc: 0.9305 - val_loss: 1.2088 - val_acc: 0.7622\n",
      "Epoch 632/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.4006 - acc: 0.9182 - val_loss: 1.6881 - val_acc: 0.7134\n",
      "Epoch 633/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.4678 - acc: 0.9346 - val_loss: 1.1389 - val_acc: 0.7988\n",
      "Epoch 634/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1909 - acc: 0.9530 - val_loss: 1.1441 - val_acc: 0.7988\n",
      "Epoch 635/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.2241 - acc: 0.9571 - val_loss: 1.0365 - val_acc: 0.7988\n",
      "Epoch 636/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1823 - acc: 0.9775 - val_loss: 1.0895 - val_acc: 0.7866\n",
      "Epoch 637/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1754 - acc: 0.9734 - val_loss: 1.1316 - val_acc: 0.7622\n",
      "Epoch 638/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.1397 - acc: 0.9796 - val_loss: 1.1070 - val_acc: 0.7744\n",
      "Epoch 639/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.1515 - acc: 0.9755 - val_loss: 1.0463 - val_acc: 0.7866\n",
      "Epoch 640/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1822 - acc: 0.9755 - val_loss: 1.2974 - val_acc: 0.7683\n",
      "Epoch 641/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2047 - acc: 0.9611 - val_loss: 1.0710 - val_acc: 0.7744\n",
      "Epoch 642/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.1592 - acc: 0.9755 - val_loss: 1.0496 - val_acc: 0.7683\n",
      "Epoch 643/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.1402 - acc: 0.9775 - val_loss: 0.9413 - val_acc: 0.8171\n",
      "Epoch 644/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1336 - acc: 0.9775 - val_loss: 1.2151 - val_acc: 0.7622\n",
      "Epoch 645/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1475 - acc: 0.9714 - val_loss: 1.1664 - val_acc: 0.7866\n",
      "Epoch 646/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.1849 - acc: 0.9673 - val_loss: 0.9079 - val_acc: 0.8171\n",
      "Epoch 647/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2030 - acc: 0.9673 - val_loss: 1.0402 - val_acc: 0.7866\n",
      "Epoch 648/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2364 - acc: 0.9468 - val_loss: 1.6159 - val_acc: 0.7012\n",
      "Epoch 649/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3361 - acc: 0.9407 - val_loss: 1.1886 - val_acc: 0.7866\n",
      "Epoch 650/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2266 - acc: 0.9366 - val_loss: 1.1205 - val_acc: 0.7622\n",
      "Epoch 651/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 128us/step - loss: 0.2155 - acc: 0.9530 - val_loss: 0.8414 - val_acc: 0.8110\n",
      "Epoch 652/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.1050 - acc: 0.9673 - val_loss: 0.8998 - val_acc: 0.7744\n",
      "Epoch 653/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.2422 - acc: 0.9509 - val_loss: 0.9676 - val_acc: 0.7866\n",
      "Epoch 654/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.1894 - acc: 0.9673 - val_loss: 1.1022 - val_acc: 0.7805\n",
      "Epoch 655/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.1439 - acc: 0.9775 - val_loss: 0.9470 - val_acc: 0.7927\n",
      "Epoch 656/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.1356 - acc: 0.9816 - val_loss: 0.9990 - val_acc: 0.7866\n",
      "Epoch 657/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.1367 - acc: 0.9836 - val_loss: 1.0270 - val_acc: 0.7927\n",
      "Epoch 658/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.1323 - acc: 0.9836 - val_loss: 0.9892 - val_acc: 0.7927\n",
      "Epoch 659/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.1277 - acc: 0.9836 - val_loss: 1.1180 - val_acc: 0.7866\n",
      "Epoch 660/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1836 - acc: 0.9796 - val_loss: 1.0510 - val_acc: 0.7988\n",
      "Epoch 661/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.1527 - acc: 0.9775 - val_loss: 0.9570 - val_acc: 0.8171\n",
      "Epoch 662/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1413 - acc: 0.9796 - val_loss: 1.1970 - val_acc: 0.7683\n",
      "Epoch 663/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1189 - acc: 0.9796 - val_loss: 1.1717 - val_acc: 0.7683\n",
      "Epoch 664/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1448 - acc: 0.9755 - val_loss: 0.9810 - val_acc: 0.8110\n",
      "Epoch 665/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0879 - acc: 0.9714 - val_loss: 1.1849 - val_acc: 0.7561\n",
      "Epoch 666/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.1949 - acc: 0.9673 - val_loss: 0.9231 - val_acc: 0.8293\n",
      "Epoch 667/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0904 - acc: 0.9693 - val_loss: 1.1109 - val_acc: 0.7988\n",
      "Epoch 668/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0716 - acc: 0.9857 - val_loss: 1.2486 - val_acc: 0.7561\n",
      "Epoch 669/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.3466 - acc: 0.9325 - val_loss: 1.2054 - val_acc: 0.7256\n",
      "Epoch 670/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.1899 - acc: 0.9223 - val_loss: 1.0321 - val_acc: 0.7561\n",
      "Epoch 671/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.1660 - acc: 0.9305 - val_loss: 0.9914 - val_acc: 0.7622\n",
      "Epoch 672/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.1324 - acc: 0.9243 - val_loss: 1.3402 - val_acc: 0.7439\n",
      "Epoch 673/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.1318 - acc: 0.9243 - val_loss: 1.0788 - val_acc: 0.7683\n",
      "Epoch 674/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.1124 - acc: 0.9387 - val_loss: 1.2196 - val_acc: 0.7866\n",
      "Epoch 675/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.0929 - acc: 0.9571 - val_loss: 1.2588 - val_acc: 0.8049\n",
      "Epoch 676/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.0987 - acc: 0.9836 - val_loss: 1.2459 - val_acc: 0.8110\n",
      "Epoch 677/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.1195 - acc: 0.9509 - val_loss: 1.3569 - val_acc: 0.7500\n",
      "Epoch 678/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.1573 - acc: 0.9530 - val_loss: 1.4133 - val_acc: 0.7256\n",
      "Epoch 679/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.1306 - acc: 0.9427 - val_loss: 1.0432 - val_acc: 0.7622\n",
      "Epoch 680/1000\n",
      "489/489 [==============================] - 0s 206us/step - loss: 0.1105 - acc: 0.9571 - val_loss: 1.0473 - val_acc: 0.7927\n",
      "Epoch 681/1000\n",
      "489/489 [==============================] - 0s 200us/step - loss: 0.0792 - acc: 0.9775 - val_loss: 1.2222 - val_acc: 0.7622\n",
      "Epoch 682/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.0879 - acc: 0.9714 - val_loss: 1.2885 - val_acc: 0.7866\n",
      "Epoch 683/1000\n",
      "489/489 [==============================] - 0s 192us/step - loss: 0.0977 - acc: 0.9693 - val_loss: 0.9428 - val_acc: 0.8293\n",
      "Epoch 684/1000\n",
      "489/489 [==============================] - 0s 188us/step - loss: 0.1341 - acc: 0.9468 - val_loss: 1.0399 - val_acc: 0.7561\n",
      "Epoch 685/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.1483 - acc: 0.9264 - val_loss: 1.1726 - val_acc: 0.7988\n",
      "Epoch 686/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.1035 - acc: 0.9755 - val_loss: 1.0602 - val_acc: 0.8110\n",
      "Epoch 687/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.0783 - acc: 0.9836 - val_loss: 1.1340 - val_acc: 0.8354\n",
      "Epoch 688/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.1234 - acc: 0.9796 - val_loss: 2.0439 - val_acc: 0.7195\n",
      "Epoch 689/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.3911 - acc: 0.9141 - val_loss: 1.2149 - val_acc: 0.7805\n",
      "Epoch 690/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.2003 - acc: 0.9162 - val_loss: 1.1457 - val_acc: 0.7439\n",
      "Epoch 691/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.1615 - acc: 0.9325 - val_loss: 0.9775 - val_acc: 0.8049\n",
      "Epoch 692/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.1531 - acc: 0.9141 - val_loss: 1.3079 - val_acc: 0.7256\n",
      "Epoch 693/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.1754 - acc: 0.9427 - val_loss: 1.1146 - val_acc: 0.7927\n",
      "Epoch 694/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.1024 - acc: 0.9714 - val_loss: 1.0226 - val_acc: 0.7805\n",
      "Epoch 695/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.1387 - acc: 0.9427 - val_loss: 1.2287 - val_acc: 0.7683\n",
      "Epoch 696/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.0825 - acc: 0.9734 - val_loss: 1.2203 - val_acc: 0.7866\n",
      "Epoch 697/1000\n",
      "489/489 [==============================] - 0s 186us/step - loss: 0.0842 - acc: 0.9775 - val_loss: 1.1437 - val_acc: 0.7805\n",
      "Epoch 698/1000\n",
      "489/489 [==============================] - 0s 186us/step - loss: 0.1480 - acc: 0.9080 - val_loss: 1.2981 - val_acc: 0.7134\n",
      "Epoch 699/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.0829 - acc: 0.9693 - val_loss: 1.3273 - val_acc: 0.7683\n",
      "Epoch 700/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.0730 - acc: 0.9816 - val_loss: 1.4689 - val_acc: 0.7561\n",
      "Epoch 701/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.1883 - acc: 0.9468 - val_loss: 1.4769 - val_acc: 0.7561\n",
      "Epoch 702/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.1281 - acc: 0.9611 - val_loss: 1.0446 - val_acc: 0.7927\n",
      "Epoch 703/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.0920 - acc: 0.9632 - val_loss: 1.1830 - val_acc: 0.7927\n",
      "Epoch 704/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.0854 - acc: 0.9734 - val_loss: 1.2442 - val_acc: 0.7988\n",
      "Epoch 705/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.0626 - acc: 0.9816 - val_loss: 1.1656 - val_acc: 0.7927\n",
      "Epoch 706/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.0664 - acc: 0.9796 - val_loss: 1.2242 - val_acc: 0.7805\n",
      "Epoch 707/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.0606 - acc: 0.9775 - val_loss: 1.3252 - val_acc: 0.7866\n",
      "Epoch 708/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.0729 - acc: 0.9816 - val_loss: 1.2352 - val_acc: 0.7866\n",
      "Epoch 709/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.0543 - acc: 0.9877 - val_loss: 1.3281 - val_acc: 0.7805\n",
      "Epoch 710/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 188us/step - loss: 0.0494 - acc: 0.9898 - val_loss: 1.1277 - val_acc: 0.8049\n",
      "Epoch 711/1000\n",
      "489/489 [==============================] - 0s 192us/step - loss: 0.0488 - acc: 0.9877 - val_loss: 1.3937 - val_acc: 0.7622\n",
      "Epoch 712/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.0698 - acc: 0.9775 - val_loss: 1.2040 - val_acc: 0.7988\n",
      "Epoch 713/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.0504 - acc: 0.9877 - val_loss: 1.3028 - val_acc: 0.7988\n",
      "Epoch 714/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.0499 - acc: 0.9816 - val_loss: 1.3445 - val_acc: 0.7927\n",
      "Epoch 715/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.0457 - acc: 0.9898 - val_loss: 1.2851 - val_acc: 0.8049\n",
      "Epoch 716/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.0492 - acc: 0.9877 - val_loss: 1.2717 - val_acc: 0.7866\n",
      "Epoch 717/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.0537 - acc: 0.9836 - val_loss: 1.4078 - val_acc: 0.7866\n",
      "Epoch 718/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.0628 - acc: 0.9755 - val_loss: 1.3884 - val_acc: 0.7988\n",
      "Epoch 719/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.0555 - acc: 0.9877 - val_loss: 1.3784 - val_acc: 0.7866\n",
      "Epoch 720/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.0596 - acc: 0.9775 - val_loss: 1.2691 - val_acc: 0.7927\n",
      "Epoch 721/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.0603 - acc: 0.9816 - val_loss: 1.5069 - val_acc: 0.7744\n",
      "Epoch 722/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.0703 - acc: 0.9693 - val_loss: 1.4087 - val_acc: 0.7805\n",
      "Epoch 723/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.0440 - acc: 0.9877 - val_loss: 1.2632 - val_acc: 0.7988\n",
      "Epoch 724/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.0870 - acc: 0.9673 - val_loss: 1.4112 - val_acc: 0.7866\n",
      "Epoch 725/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.0508 - acc: 0.9857 - val_loss: 1.3923 - val_acc: 0.7744\n",
      "Epoch 726/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.1288 - acc: 0.9571 - val_loss: 1.4049 - val_acc: 0.7805\n",
      "Epoch 727/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.1305 - acc: 0.9755 - val_loss: 1.3182 - val_acc: 0.8110\n",
      "Epoch 728/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.0963 - acc: 0.9530 - val_loss: 1.3657 - val_acc: 0.8110\n",
      "Epoch 729/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.1533 - acc: 0.9407 - val_loss: 1.4745 - val_acc: 0.7988\n",
      "Epoch 730/1000\n",
      "489/489 [==============================] - 0s 180us/step - loss: 0.0588 - acc: 0.9796 - val_loss: 1.5357 - val_acc: 0.7805\n",
      "Epoch 731/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.1150 - acc: 0.9509 - val_loss: 1.2839 - val_acc: 0.7866\n",
      "Epoch 732/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.1264 - acc: 0.9448 - val_loss: 1.5588 - val_acc: 0.7683\n",
      "Epoch 733/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.0622 - acc: 0.9857 - val_loss: 1.5148 - val_acc: 0.7866\n",
      "Epoch 734/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.1440 - acc: 0.9530 - val_loss: 1.2076 - val_acc: 0.7378\n",
      "Epoch 735/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.1921 - acc: 0.8753 - val_loss: 1.3349 - val_acc: 0.7012\n",
      "Epoch 736/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.0991 - acc: 0.9509 - val_loss: 1.5014 - val_acc: 0.7744\n",
      "Epoch 737/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.1484 - acc: 0.9816 - val_loss: 1.4824 - val_acc: 0.7927\n",
      "Epoch 738/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.1370 - acc: 0.9857 - val_loss: 1.4258 - val_acc: 0.7988\n",
      "Epoch 739/1000\n",
      "489/489 [==============================] - 0s 188us/step - loss: 0.1321 - acc: 0.9857 - val_loss: 1.4940 - val_acc: 0.7927\n",
      "Epoch 740/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.1299 - acc: 0.9836 - val_loss: 1.5166 - val_acc: 0.7866\n",
      "Epoch 741/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.1258 - acc: 0.9877 - val_loss: 1.4698 - val_acc: 0.7988\n",
      "Epoch 742/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.1283 - acc: 0.9836 - val_loss: 1.3965 - val_acc: 0.7927\n",
      "Epoch 743/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.0784 - acc: 0.9796 - val_loss: 1.4306 - val_acc: 0.7866\n",
      "Epoch 744/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.0957 - acc: 0.9530 - val_loss: 1.4014 - val_acc: 0.7683\n",
      "Epoch 745/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.0664 - acc: 0.9652 - val_loss: 1.4574 - val_acc: 0.7866\n",
      "Epoch 746/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.0406 - acc: 0.9857 - val_loss: 1.3234 - val_acc: 0.7744\n",
      "Epoch 747/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.0882 - acc: 0.9550 - val_loss: 1.5063 - val_acc: 0.7988\n",
      "Epoch 748/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.0614 - acc: 0.9796 - val_loss: 1.4486 - val_acc: 0.7683\n",
      "Epoch 749/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.0504 - acc: 0.9734 - val_loss: 1.4914 - val_acc: 0.7988\n",
      "Epoch 750/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.0743 - acc: 0.9591 - val_loss: 1.4726 - val_acc: 0.7622\n",
      "Epoch 751/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.0537 - acc: 0.9673 - val_loss: 1.5600 - val_acc: 0.7927\n",
      "Epoch 752/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.0724 - acc: 0.9673 - val_loss: 1.4907 - val_acc: 0.7134\n",
      "Epoch 753/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.0853 - acc: 0.9346 - val_loss: 1.5866 - val_acc: 0.7683\n",
      "Epoch 754/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.0746 - acc: 0.9836 - val_loss: 1.5715 - val_acc: 0.7805\n",
      "Epoch 755/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.0623 - acc: 0.9509 - val_loss: 1.5500 - val_acc: 0.7805\n",
      "Epoch 756/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.0628 - acc: 0.9652 - val_loss: 1.5515 - val_acc: 0.8049\n",
      "Epoch 757/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.0764 - acc: 0.9387 - val_loss: 1.5333 - val_acc: 0.7744\n",
      "Epoch 758/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.0745 - acc: 0.9693 - val_loss: 1.4012 - val_acc: 0.7683\n",
      "Epoch 759/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.0850 - acc: 0.9305 - val_loss: 1.6155 - val_acc: 0.7561\n",
      "Epoch 760/1000\n",
      "489/489 [==============================] - 0s 186us/step - loss: 0.0719 - acc: 0.9693 - val_loss: 1.4894 - val_acc: 0.8049\n",
      "Epoch 761/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.0614 - acc: 0.9755 - val_loss: 1.5391 - val_acc: 0.7988\n",
      "Epoch 762/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.0586 - acc: 0.9714 - val_loss: 1.4076 - val_acc: 0.8049\n",
      "Epoch 763/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.0683 - acc: 0.9734 - val_loss: 1.7164 - val_acc: 0.7866\n",
      "Epoch 764/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.1037 - acc: 0.9673 - val_loss: 1.5070 - val_acc: 0.8049\n",
      "Epoch 765/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.1258 - acc: 0.9734 - val_loss: 1.3312 - val_acc: 0.8110\n",
      "Epoch 766/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.4927 - acc: 0.9468 - val_loss: 1.9210 - val_acc: 0.6951\n",
      "Epoch 767/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.4216 - acc: 0.9346 - val_loss: 1.4834 - val_acc: 0.7500\n",
      "Epoch 768/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.1892 - acc: 0.9305 - val_loss: 1.1357 - val_acc: 0.8049\n",
      "Epoch 769/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 141us/step - loss: 0.0988 - acc: 0.9673 - val_loss: 1.1946 - val_acc: 0.7988\n",
      "Epoch 770/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0728 - acc: 0.9755 - val_loss: 1.4097 - val_acc: 0.7988\n",
      "Epoch 771/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.0519 - acc: 0.9734 - val_loss: 1.4806 - val_acc: 0.8049\n",
      "Epoch 772/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.0476 - acc: 0.9857 - val_loss: 1.4578 - val_acc: 0.8049\n",
      "Epoch 773/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.0459 - acc: 0.9836 - val_loss: 1.5186 - val_acc: 0.8110\n",
      "Epoch 774/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.0425 - acc: 0.9836 - val_loss: 1.4551 - val_acc: 0.8110\n",
      "Epoch 775/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.0493 - acc: 0.9857 - val_loss: 1.4643 - val_acc: 0.8110\n",
      "Epoch 776/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.0439 - acc: 0.9877 - val_loss: 1.4684 - val_acc: 0.8049\n",
      "Epoch 777/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0537 - acc: 0.9796 - val_loss: 1.4821 - val_acc: 0.7988\n",
      "Epoch 778/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.0539 - acc: 0.9816 - val_loss: 1.6124 - val_acc: 0.8049\n",
      "Epoch 779/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.0399 - acc: 0.9836 - val_loss: 1.4349 - val_acc: 0.8171\n",
      "Epoch 780/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0867 - acc: 0.9673 - val_loss: 1.5519 - val_acc: 0.8110\n",
      "Epoch 781/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.0971 - acc: 0.9632 - val_loss: 1.2396 - val_acc: 0.8293\n",
      "Epoch 782/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.1070 - acc: 0.9571 - val_loss: 1.4099 - val_acc: 0.7622\n",
      "Epoch 783/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.0766 - acc: 0.9673 - val_loss: 1.6578 - val_acc: 0.7866\n",
      "Epoch 784/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0896 - acc: 0.9652 - val_loss: 1.5073 - val_acc: 0.7805\n",
      "Epoch 785/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.1029 - acc: 0.9673 - val_loss: 1.1758 - val_acc: 0.8049\n",
      "Epoch 786/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.1041 - acc: 0.9673 - val_loss: 1.2754 - val_acc: 0.7927\n",
      "Epoch 787/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0488 - acc: 0.9734 - val_loss: 1.3310 - val_acc: 0.7744\n",
      "Epoch 788/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0388 - acc: 0.9877 - val_loss: 1.4630 - val_acc: 0.7927\n",
      "Epoch 789/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0383 - acc: 0.9816 - val_loss: 1.2256 - val_acc: 0.7988\n",
      "Epoch 790/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.0335 - acc: 0.9898 - val_loss: 1.3116 - val_acc: 0.8049\n",
      "Epoch 791/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.0381 - acc: 0.9877 - val_loss: 1.3681 - val_acc: 0.8110\n",
      "Epoch 792/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0869 - acc: 0.9836 - val_loss: 1.3457 - val_acc: 0.8110\n",
      "Epoch 793/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.1408 - acc: 0.9796 - val_loss: 1.3368 - val_acc: 0.8171\n",
      "Epoch 794/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.1343 - acc: 0.9918 - val_loss: 1.5916 - val_acc: 0.7988\n",
      "Epoch 795/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1285 - acc: 0.9877 - val_loss: 1.4026 - val_acc: 0.8293\n",
      "Epoch 796/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1176 - acc: 0.9877 - val_loss: 1.5781 - val_acc: 0.7988\n",
      "Epoch 797/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1188 - acc: 0.9898 - val_loss: 1.4958 - val_acc: 0.8049\n",
      "Epoch 798/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1209 - acc: 0.9877 - val_loss: 1.7425 - val_acc: 0.7744\n",
      "Epoch 799/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1824 - acc: 0.9693 - val_loss: 1.2595 - val_acc: 0.8049\n",
      "Epoch 800/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0791 - acc: 0.9877 - val_loss: 1.3828 - val_acc: 0.8049\n",
      "Epoch 801/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0607 - acc: 0.9857 - val_loss: 1.4041 - val_acc: 0.8232\n",
      "Epoch 802/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.1501 - acc: 0.9836 - val_loss: 1.6054 - val_acc: 0.7866\n",
      "Epoch 803/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.1452 - acc: 0.9755 - val_loss: 1.4142 - val_acc: 0.8110\n",
      "Epoch 804/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1282 - acc: 0.9857 - val_loss: 1.5922 - val_acc: 0.8049\n",
      "Epoch 805/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1244 - acc: 0.9836 - val_loss: 1.4610 - val_acc: 0.8049\n",
      "Epoch 806/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.1934 - acc: 0.9693 - val_loss: 1.7585 - val_acc: 0.7744\n",
      "Epoch 807/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.1081 - acc: 0.9652 - val_loss: 1.3459 - val_acc: 0.7744\n",
      "Epoch 808/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.0887 - acc: 0.9652 - val_loss: 1.4136 - val_acc: 0.7805\n",
      "Epoch 809/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1227 - acc: 0.9714 - val_loss: 1.3163 - val_acc: 0.7927\n",
      "Epoch 810/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0564 - acc: 0.9775 - val_loss: 1.3574 - val_acc: 0.7866\n",
      "Epoch 811/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0748 - acc: 0.9714 - val_loss: 1.1699 - val_acc: 0.8171\n",
      "Epoch 812/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.0822 - acc: 0.9673 - val_loss: 1.3148 - val_acc: 0.7988\n",
      "Epoch 813/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0588 - acc: 0.9755 - val_loss: 1.4402 - val_acc: 0.8049\n",
      "Epoch 814/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.0806 - acc: 0.9632 - val_loss: 1.3899 - val_acc: 0.8171\n",
      "Epoch 815/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.4791 - acc: 0.9305 - val_loss: 1.8927 - val_acc: 0.7439\n",
      "Epoch 816/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.4808 - acc: 0.9305 - val_loss: 1.8257 - val_acc: 0.7561\n",
      "Epoch 817/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1950 - acc: 0.9407 - val_loss: 1.4142 - val_acc: 0.7866\n",
      "Epoch 818/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1586 - acc: 0.9448 - val_loss: 1.2631 - val_acc: 0.7988\n",
      "Epoch 819/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1708 - acc: 0.9387 - val_loss: 1.0867 - val_acc: 0.7927\n",
      "Epoch 820/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1169 - acc: 0.9673 - val_loss: 1.1708 - val_acc: 0.7927\n",
      "Epoch 821/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0884 - acc: 0.9714 - val_loss: 1.3452 - val_acc: 0.7744\n",
      "Epoch 822/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.0791 - acc: 0.9755 - val_loss: 1.1820 - val_acc: 0.8293\n",
      "Epoch 823/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.0724 - acc: 0.9734 - val_loss: 1.3880 - val_acc: 0.7927\n",
      "Epoch 824/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1198 - acc: 0.9734 - val_loss: 1.2168 - val_acc: 0.8232\n",
      "Epoch 825/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.0813 - acc: 0.9714 - val_loss: 1.2371 - val_acc: 0.8293\n",
      "Epoch 826/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.1120 - acc: 0.9611 - val_loss: 1.4980 - val_acc: 0.7683\n",
      "Epoch 827/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.0868 - acc: 0.9673 - val_loss: 1.4129 - val_acc: 0.8049\n",
      "Epoch 828/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 128us/step - loss: 0.0613 - acc: 0.9775 - val_loss: 1.2039 - val_acc: 0.8354\n",
      "Epoch 829/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0869 - acc: 0.9714 - val_loss: 1.2727 - val_acc: 0.7988\n",
      "Epoch 830/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0688 - acc: 0.9734 - val_loss: 1.2709 - val_acc: 0.8049\n",
      "Epoch 831/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0635 - acc: 0.9755 - val_loss: 1.3480 - val_acc: 0.7988\n",
      "Epoch 832/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0690 - acc: 0.9755 - val_loss: 1.2828 - val_acc: 0.8171\n",
      "Epoch 833/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.1003 - acc: 0.9693 - val_loss: 1.3226 - val_acc: 0.7988\n",
      "Epoch 834/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.0884 - acc: 0.9755 - val_loss: 1.2907 - val_acc: 0.8110\n",
      "Epoch 835/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0727 - acc: 0.9816 - val_loss: 1.2842 - val_acc: 0.8171\n",
      "Epoch 836/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.0729 - acc: 0.9775 - val_loss: 1.3580 - val_acc: 0.8110\n",
      "Epoch 837/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0667 - acc: 0.9796 - val_loss: 1.3304 - val_acc: 0.8110\n",
      "Epoch 838/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0514 - acc: 0.9857 - val_loss: 1.3049 - val_acc: 0.8171\n",
      "Epoch 839/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.0511 - acc: 0.9877 - val_loss: 1.3431 - val_acc: 0.8049\n",
      "Epoch 840/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.0647 - acc: 0.9775 - val_loss: 1.5273 - val_acc: 0.7378\n",
      "Epoch 841/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.0929 - acc: 0.9652 - val_loss: 1.4128 - val_acc: 0.7744\n",
      "Epoch 842/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.0634 - acc: 0.9734 - val_loss: 1.4441 - val_acc: 0.7927\n",
      "Epoch 843/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.0694 - acc: 0.9714 - val_loss: 1.5342 - val_acc: 0.7683\n",
      "Epoch 844/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0745 - acc: 0.9673 - val_loss: 1.3284 - val_acc: 0.7927\n",
      "Epoch 845/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.1151 - acc: 0.9632 - val_loss: 1.5277 - val_acc: 0.7378\n",
      "Epoch 846/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0714 - acc: 0.9755 - val_loss: 1.4603 - val_acc: 0.7805\n",
      "Epoch 847/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0634 - acc: 0.9734 - val_loss: 1.0240 - val_acc: 0.8354\n",
      "Epoch 848/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1644 - acc: 0.9468 - val_loss: 1.3902 - val_acc: 0.7744\n",
      "Epoch 849/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0889 - acc: 0.9693 - val_loss: 1.3014 - val_acc: 0.7988\n",
      "Epoch 850/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0739 - acc: 0.9755 - val_loss: 1.2925 - val_acc: 0.8110\n",
      "Epoch 851/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.0648 - acc: 0.9734 - val_loss: 1.3227 - val_acc: 0.7866\n",
      "Epoch 852/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.0572 - acc: 0.9734 - val_loss: 1.2749 - val_acc: 0.8049\n",
      "Epoch 853/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.0532 - acc: 0.9755 - val_loss: 1.2742 - val_acc: 0.8110\n",
      "Epoch 854/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.0515 - acc: 0.9734 - val_loss: 1.3196 - val_acc: 0.8110\n",
      "Epoch 855/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.0484 - acc: 0.9755 - val_loss: 1.2755 - val_acc: 0.8110\n",
      "Epoch 856/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.0507 - acc: 0.9755 - val_loss: 1.3310 - val_acc: 0.7927\n",
      "Epoch 857/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0668 - acc: 0.9734 - val_loss: 1.2252 - val_acc: 0.8110\n",
      "Epoch 858/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0570 - acc: 0.9755 - val_loss: 1.3393 - val_acc: 0.7927\n",
      "Epoch 859/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.0500 - acc: 0.9734 - val_loss: 1.2834 - val_acc: 0.8049\n",
      "Epoch 860/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0681 - acc: 0.9755 - val_loss: 1.2792 - val_acc: 0.7988\n",
      "Epoch 861/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.1030 - acc: 0.9734 - val_loss: 1.2593 - val_acc: 0.8171\n",
      "Epoch 862/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0800 - acc: 0.9755 - val_loss: 1.3022 - val_acc: 0.8110\n",
      "Epoch 863/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0762 - acc: 0.9734 - val_loss: 1.2997 - val_acc: 0.8171\n",
      "Epoch 864/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0726 - acc: 0.9775 - val_loss: 1.3172 - val_acc: 0.8171\n",
      "Epoch 865/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0693 - acc: 0.9755 - val_loss: 1.2752 - val_acc: 0.8171\n",
      "Epoch 866/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.1444 - acc: 0.9632 - val_loss: 1.5690 - val_acc: 0.7805\n",
      "Epoch 867/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.0553 - acc: 0.9836 - val_loss: 1.5115 - val_acc: 0.7805\n",
      "Epoch 868/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0482 - acc: 0.9857 - val_loss: 1.4053 - val_acc: 0.8110\n",
      "Epoch 869/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0437 - acc: 0.9857 - val_loss: 1.5063 - val_acc: 0.7866\n",
      "Epoch 870/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.0434 - acc: 0.9796 - val_loss: 1.3653 - val_acc: 0.8049\n",
      "Epoch 871/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.0281 - acc: 0.9898 - val_loss: 1.3639 - val_acc: 0.8049\n",
      "Epoch 872/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.0387 - acc: 0.9857 - val_loss: 1.4114 - val_acc: 0.7927\n",
      "Epoch 873/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0647 - acc: 0.9816 - val_loss: 1.4424 - val_acc: 0.8049\n",
      "Epoch 874/1000\n",
      "489/489 [==============================] - 0s 130us/step - loss: 0.0651 - acc: 0.9775 - val_loss: 1.5229 - val_acc: 0.7927\n",
      "Epoch 875/1000\n",
      "489/489 [==============================] - 0s 130us/step - loss: 0.0613 - acc: 0.9775 - val_loss: 1.5317 - val_acc: 0.7866\n",
      "Epoch 876/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0488 - acc: 0.9796 - val_loss: 1.4066 - val_acc: 0.7988\n",
      "Epoch 877/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0329 - acc: 0.9857 - val_loss: 1.4397 - val_acc: 0.7988\n",
      "Epoch 878/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0406 - acc: 0.9836 - val_loss: 1.3644 - val_acc: 0.8110\n",
      "Epoch 879/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.0567 - acc: 0.9816 - val_loss: 1.3298 - val_acc: 0.8171\n",
      "Epoch 880/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.0528 - acc: 0.9857 - val_loss: 1.3911 - val_acc: 0.7988\n",
      "Epoch 881/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.0952 - acc: 0.9796 - val_loss: 2.1542 - val_acc: 0.7012\n",
      "Epoch 882/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2336 - acc: 0.9162 - val_loss: 1.6932 - val_acc: 0.6768\n",
      "Epoch 883/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3256 - acc: 0.9121 - val_loss: 2.0019 - val_acc: 0.7012\n",
      "Epoch 884/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.5796 - acc: 0.8916 - val_loss: 1.8791 - val_acc: 0.7195\n",
      "Epoch 885/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3850 - acc: 0.9243 - val_loss: 2.7927 - val_acc: 0.6768\n",
      "Epoch 886/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 2.2971 - acc: 0.7914 - val_loss: 1.8573 - val_acc: 0.7256\n",
      "Epoch 887/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 163us/step - loss: 0.3869 - acc: 0.9407 - val_loss: 1.7412 - val_acc: 0.7683\n",
      "Epoch 888/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.3427 - acc: 0.9571 - val_loss: 1.8061 - val_acc: 0.7683\n",
      "Epoch 889/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.3328 - acc: 0.9550 - val_loss: 1.4842 - val_acc: 0.7744\n",
      "Epoch 890/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.1561 - acc: 0.9366 - val_loss: 1.4434 - val_acc: 0.7744\n",
      "Epoch 891/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2910 - acc: 0.9264 - val_loss: 1.1919 - val_acc: 0.8049\n",
      "Epoch 892/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.1393 - acc: 0.9571 - val_loss: 1.2426 - val_acc: 0.8171\n",
      "Epoch 893/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.1050 - acc: 0.9550 - val_loss: 1.3037 - val_acc: 0.8232\n",
      "Epoch 894/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.1117 - acc: 0.9652 - val_loss: 1.2125 - val_acc: 0.8171\n",
      "Epoch 895/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.0899 - acc: 0.9632 - val_loss: 1.3503 - val_acc: 0.7927\n",
      "Epoch 896/1000\n",
      "489/489 [==============================] - 0s 120us/step - loss: 0.0755 - acc: 0.9693 - val_loss: 1.3265 - val_acc: 0.8049\n",
      "Epoch 897/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.0726 - acc: 0.9755 - val_loss: 1.3624 - val_acc: 0.7988\n",
      "Epoch 898/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0754 - acc: 0.9755 - val_loss: 1.3888 - val_acc: 0.7988\n",
      "Epoch 899/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0683 - acc: 0.9734 - val_loss: 1.4068 - val_acc: 0.7988\n",
      "Epoch 900/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0879 - acc: 0.9734 - val_loss: 1.3849 - val_acc: 0.7988\n",
      "Epoch 901/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0986 - acc: 0.9611 - val_loss: 1.5565 - val_acc: 0.7866\n",
      "Epoch 902/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.0854 - acc: 0.9693 - val_loss: 1.4784 - val_acc: 0.7805\n",
      "Epoch 903/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.0900 - acc: 0.9693 - val_loss: 1.4438 - val_acc: 0.7927\n",
      "Epoch 904/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0731 - acc: 0.9755 - val_loss: 1.4395 - val_acc: 0.7927\n",
      "Epoch 905/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0660 - acc: 0.9775 - val_loss: 1.4330 - val_acc: 0.7927\n",
      "Epoch 906/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0832 - acc: 0.9714 - val_loss: 1.4424 - val_acc: 0.7927\n",
      "Epoch 907/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0816 - acc: 0.9734 - val_loss: 1.4305 - val_acc: 0.7988\n",
      "Epoch 908/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0794 - acc: 0.9734 - val_loss: 1.3842 - val_acc: 0.8049\n",
      "Epoch 909/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.0743 - acc: 0.9755 - val_loss: 1.4379 - val_acc: 0.7988\n",
      "Epoch 910/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.0733 - acc: 0.9775 - val_loss: 1.3928 - val_acc: 0.8110\n",
      "Epoch 911/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0998 - acc: 0.9755 - val_loss: 1.4591 - val_acc: 0.8049\n",
      "Epoch 912/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.0808 - acc: 0.9755 - val_loss: 1.5256 - val_acc: 0.7805\n",
      "Epoch 913/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0730 - acc: 0.9775 - val_loss: 1.5166 - val_acc: 0.7927\n",
      "Epoch 914/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0675 - acc: 0.9775 - val_loss: 1.4943 - val_acc: 0.7927\n",
      "Epoch 915/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0659 - acc: 0.9734 - val_loss: 1.5908 - val_acc: 0.7805\n",
      "Epoch 916/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0597 - acc: 0.9755 - val_loss: 1.5772 - val_acc: 0.7866\n",
      "Epoch 917/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.0644 - acc: 0.9775 - val_loss: 1.4522 - val_acc: 0.7988\n",
      "Epoch 918/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.0806 - acc: 0.9714 - val_loss: 1.4405 - val_acc: 0.7988\n",
      "Epoch 919/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.0734 - acc: 0.9755 - val_loss: 1.4306 - val_acc: 0.8049\n",
      "Epoch 920/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0716 - acc: 0.9775 - val_loss: 1.4357 - val_acc: 0.7988\n",
      "Epoch 921/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0711 - acc: 0.9775 - val_loss: 1.3969 - val_acc: 0.8049\n",
      "Epoch 922/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0772 - acc: 0.9734 - val_loss: 1.4886 - val_acc: 0.7866\n",
      "Epoch 923/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.0761 - acc: 0.9755 - val_loss: 1.4313 - val_acc: 0.7988\n",
      "Epoch 924/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.0690 - acc: 0.9755 - val_loss: 1.4347 - val_acc: 0.7988\n",
      "Epoch 925/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.0679 - acc: 0.9775 - val_loss: 1.4079 - val_acc: 0.8049\n",
      "Epoch 926/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.0676 - acc: 0.9775 - val_loss: 1.4190 - val_acc: 0.8049\n",
      "Epoch 927/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.0676 - acc: 0.9755 - val_loss: 1.4343 - val_acc: 0.8049\n",
      "Epoch 928/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.0669 - acc: 0.9755 - val_loss: 1.4510 - val_acc: 0.8049\n",
      "Epoch 929/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.0664 - acc: 0.9755 - val_loss: 1.4529 - val_acc: 0.8049\n",
      "Epoch 930/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.0659 - acc: 0.9755 - val_loss: 1.4672 - val_acc: 0.8049\n",
      "Epoch 931/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.0641 - acc: 0.9775 - val_loss: 1.4788 - val_acc: 0.8049\n",
      "Epoch 932/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.0639 - acc: 0.9755 - val_loss: 1.4685 - val_acc: 0.8049\n",
      "Epoch 933/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.0621 - acc: 0.9775 - val_loss: 1.4557 - val_acc: 0.8110\n",
      "Epoch 934/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.0600 - acc: 0.9755 - val_loss: 1.4692 - val_acc: 0.7927\n",
      "Epoch 935/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.0571 - acc: 0.9775 - val_loss: 1.4319 - val_acc: 0.7988\n",
      "Epoch 936/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0567 - acc: 0.9775 - val_loss: 1.4530 - val_acc: 0.7988\n",
      "Epoch 937/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0559 - acc: 0.9775 - val_loss: 1.4475 - val_acc: 0.7988\n",
      "Epoch 938/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.0577 - acc: 0.9775 - val_loss: 1.4540 - val_acc: 0.8110\n",
      "Epoch 939/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.0546 - acc: 0.9775 - val_loss: 1.5164 - val_acc: 0.7927\n",
      "Epoch 940/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.0524 - acc: 0.9755 - val_loss: 1.4450 - val_acc: 0.8049\n",
      "Epoch 941/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.0519 - acc: 0.9755 - val_loss: 1.4427 - val_acc: 0.7988\n",
      "Epoch 942/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.0512 - acc: 0.9775 - val_loss: 1.4268 - val_acc: 0.7988\n",
      "Epoch 943/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.0500 - acc: 0.9755 - val_loss: 1.5087 - val_acc: 0.7927\n",
      "Epoch 944/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0467 - acc: 0.9775 - val_loss: 1.4095 - val_acc: 0.8110\n",
      "Epoch 945/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0499 - acc: 0.9775 - val_loss: 1.4473 - val_acc: 0.7988\n",
      "Epoch 946/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 131us/step - loss: 0.0470 - acc: 0.9775 - val_loss: 1.4587 - val_acc: 0.8049\n",
      "Epoch 947/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0478 - acc: 0.9775 - val_loss: 1.4943 - val_acc: 0.8049\n",
      "Epoch 948/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0460 - acc: 0.9775 - val_loss: 1.4967 - val_acc: 0.7988\n",
      "Epoch 949/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.0490 - acc: 0.9755 - val_loss: 1.4523 - val_acc: 0.8110\n",
      "Epoch 950/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.0493 - acc: 0.9775 - val_loss: 1.4670 - val_acc: 0.7988\n",
      "Epoch 951/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0414 - acc: 0.9775 - val_loss: 1.4793 - val_acc: 0.7988\n",
      "Epoch 952/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.0450 - acc: 0.9755 - val_loss: 1.4912 - val_acc: 0.7988\n",
      "Epoch 953/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.0450 - acc: 0.9775 - val_loss: 1.4796 - val_acc: 0.7988\n",
      "Epoch 954/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0425 - acc: 0.9775 - val_loss: 1.4689 - val_acc: 0.8049\n",
      "Epoch 955/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0603 - acc: 0.9755 - val_loss: 1.5092 - val_acc: 0.8049\n",
      "Epoch 956/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.0517 - acc: 0.9755 - val_loss: 1.5611 - val_acc: 0.7927\n",
      "Epoch 957/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.0408 - acc: 0.9775 - val_loss: 1.4873 - val_acc: 0.7988\n",
      "Epoch 958/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.0432 - acc: 0.9755 - val_loss: 1.4677 - val_acc: 0.8110\n",
      "Epoch 959/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0750 - acc: 0.9734 - val_loss: 1.5603 - val_acc: 0.7988\n",
      "Epoch 960/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.0588 - acc: 0.9734 - val_loss: 1.6348 - val_acc: 0.7866\n",
      "Epoch 961/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0478 - acc: 0.9734 - val_loss: 1.5649 - val_acc: 0.8049\n",
      "Epoch 962/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0422 - acc: 0.9755 - val_loss: 1.5424 - val_acc: 0.8049\n",
      "Epoch 963/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0444 - acc: 0.9755 - val_loss: 1.5672 - val_acc: 0.7988\n",
      "Epoch 964/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.0403 - acc: 0.9775 - val_loss: 1.5963 - val_acc: 0.8049\n",
      "Epoch 965/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.0593 - acc: 0.9734 - val_loss: 1.4771 - val_acc: 0.8232\n",
      "Epoch 966/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0492 - acc: 0.9775 - val_loss: 1.6077 - val_acc: 0.7988\n",
      "Epoch 967/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.0723 - acc: 0.9693 - val_loss: 1.4580 - val_acc: 0.8110\n",
      "Epoch 968/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0535 - acc: 0.9734 - val_loss: 1.6187 - val_acc: 0.7927\n",
      "Epoch 969/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0877 - acc: 0.9611 - val_loss: 1.5601 - val_acc: 0.8110\n",
      "Epoch 970/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0665 - acc: 0.9673 - val_loss: 1.3776 - val_acc: 0.8110\n",
      "Epoch 971/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0491 - acc: 0.9714 - val_loss: 1.4540 - val_acc: 0.8110\n",
      "Epoch 972/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.1323 - acc: 0.9550 - val_loss: 1.3016 - val_acc: 0.7927\n",
      "Epoch 973/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.2111 - acc: 0.9264 - val_loss: 1.1072 - val_acc: 0.7988\n",
      "Epoch 974/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.1567 - acc: 0.9325 - val_loss: 1.2463 - val_acc: 0.7805\n",
      "Epoch 975/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.0905 - acc: 0.9652 - val_loss: 1.1170 - val_acc: 0.7927\n",
      "Epoch 976/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0688 - acc: 0.9775 - val_loss: 1.1404 - val_acc: 0.8049\n",
      "Epoch 977/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0899 - acc: 0.9652 - val_loss: 1.3102 - val_acc: 0.7988\n",
      "Epoch 978/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0866 - acc: 0.9796 - val_loss: 1.2297 - val_acc: 0.8171\n",
      "Epoch 979/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.0558 - acc: 0.9857 - val_loss: 1.3797 - val_acc: 0.7988\n",
      "Epoch 980/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0426 - acc: 0.9836 - val_loss: 1.5579 - val_acc: 0.7805\n",
      "Epoch 981/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.0575 - acc: 0.9836 - val_loss: 1.4541 - val_acc: 0.7988\n",
      "Epoch 982/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.0433 - acc: 0.9816 - val_loss: 1.4931 - val_acc: 0.7805\n",
      "Epoch 983/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.0905 - acc: 0.9611 - val_loss: 1.3216 - val_acc: 0.8110\n",
      "Epoch 984/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.1173 - acc: 0.9571 - val_loss: 1.4010 - val_acc: 0.7805\n",
      "Epoch 985/1000\n",
      "489/489 [==============================] - 0s 118us/step - loss: 0.1022 - acc: 0.9509 - val_loss: 1.2510 - val_acc: 0.7744\n",
      "Epoch 986/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.1063 - acc: 0.9571 - val_loss: 1.4387 - val_acc: 0.7744\n",
      "Epoch 987/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.0891 - acc: 0.9591 - val_loss: 1.3220 - val_acc: 0.7805\n",
      "Epoch 988/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.0685 - acc: 0.9632 - val_loss: 1.1585 - val_acc: 0.8049\n",
      "Epoch 989/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0543 - acc: 0.9652 - val_loss: 1.4407 - val_acc: 0.7866\n",
      "Epoch 990/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.0628 - acc: 0.9714 - val_loss: 1.5343 - val_acc: 0.7805\n",
      "Epoch 991/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.0555 - acc: 0.9775 - val_loss: 1.4652 - val_acc: 0.8049\n",
      "Epoch 992/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.0510 - acc: 0.9775 - val_loss: 1.5262 - val_acc: 0.7805\n",
      "Epoch 993/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.0836 - acc: 0.9775 - val_loss: 1.3648 - val_acc: 0.8171\n",
      "Epoch 994/1000\n",
      "489/489 [==============================] - 0s 124us/step - loss: 0.0660 - acc: 0.9775 - val_loss: 1.3627 - val_acc: 0.8049\n",
      "Epoch 995/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.0438 - acc: 0.9775 - val_loss: 1.3680 - val_acc: 0.8049\n",
      "Epoch 996/1000\n",
      "489/489 [==============================] - 0s 122us/step - loss: 0.0675 - acc: 0.9775 - val_loss: 1.2984 - val_acc: 0.8110\n",
      "Epoch 997/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.0673 - acc: 0.9775 - val_loss: 1.1823 - val_acc: 0.8171\n",
      "Epoch 998/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.0523 - acc: 0.9755 - val_loss: 1.2797 - val_acc: 0.8171\n",
      "Epoch 999/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.0478 - acc: 0.9714 - val_loss: 1.2941 - val_acc: 0.8171\n",
      "Epoch 1000/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.0378 - acc: 0.9775 - val_loss: 1.2785 - val_acc: 0.8171\n"
     ]
    }
   ],
   "source": [
    "history = model.fit(x_train,y_train,epochs=1000,validation_data=[x_test,y_test])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 76,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 63us/step\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "[0.039307406743519874, 0.97750511247443761]"
      ]
     },
     "execution_count": 76,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model.evaluate(x_train,y_train)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 77,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "164/164 [==============================] - 0s 103us/step\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "[1.2785010948413755, 0.81707317073170727]"
      ]
     },
     "execution_count": 77,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model.evaluate(x_test,y_test)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 78,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<matplotlib.legend.Legend at 0x22e95ca9710>"
      ]
     },
     "execution_count": 78,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAD8CAYAAACMwORRAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXeYFMXWxt/aDLhkRJKASBAEA2tOF8yocFUUA+asGNDr\nB+i9ZsV8zXIVFUGEi5gAEVHBnADhShJEkosSRTJsqu+PM0VX11SHCbuzM3t+zzPPdFf3dFf3dL99\n+tSpU0JKCYZhGCazyEp1BRiGYZjkw+LOMAyTgbC4MwzDZCAs7gzDMBkIizvDMEwGwuLOMAyTgbC4\nMwzDZCAs7gzDMBkIizvDMEwGkpOqHTdu3Fi2adMmVbtnGIZJS2bNmrVeStkkaL2UiXubNm0wc+bM\nVO2eYRgmLRFCrAizHrtlGIZhMhAWd4ZhmAwkUNyFEK8KIdYKIeZ5LBdCiGeEEEuEED8JIQ5OfjUZ\nhmGYWAhjuY8AcIrP8lMBtI98rgbwYuLVYhiGYRIhUNyllF8A+NNnlT4ARkriOwD1hRDNklVBhmEY\nJnaS4XNvAeA3bb44UsYwDMOkiCptUBVCXC2EmCmEmLlu3bqq3DXDMEyNIhnivgpAK22+ZaQsCinl\nS1LKIillUZMmgTH4DMNkKjNnAt99l+paZDTJEPcJAC6ORM0cDmCTlPKPJGyXYZiqZv584KSTgCuu\nAB5/HNixA9i2LXq97duB0tJw29y82ZkuLwf69wcOOQQ44ghg6FCgrCx8/bZsASoq/Nf54w+gVy9g\n7drw281EpJS+HwBjAPwBoBTkT78CwLUAro0sFwCeB/ArgLkAioK2KaVE9+7dJcNkJF9/LeWiRd7L\np06VctmyqqnL229LuWVL+PXPPltKIPrz/vtS7tol5dixUn75JZUdf7yUb73lva3166W88kpa98sv\nqaxfv+ht16tH5yyINWto/cce816nrEzKvfem9S6+WMpnn5VyypTwx58sNm+WcswYKcvLk75pADNl\nCI0NXKGyPizuTMaiREvnjTekfOQRutnV8oqK5O/700+lPPxwKUeOlHLmTNrP5ZdL+frrUj78MK2z\nbJmUffuSAClKS+lz6KF2cQekvPNOe/k330h5++1STpjgrstNN7nX27TJe9uAlNu2+R/byy/Tej16\neK8zZox92zt3xnU64+aee2i/V1yR9E2HFXfuocowyWTrVnt5//7AoEHAypVO2QMP2NfdsgWYNIlk\nyYvycuCdd6JdFPffT77siy8G3n6byn7/HbjkEmDwYJp/5BFg/Hhg1Ciaf+UVIDcXqF8f+OEH730+\n/7y9/IcfgMceA3r3dpcL4Z6fNct724D73JjMmAFcdRVN77mn93rzrH0tgV9/9d93slm/nr5HjQJ2\n7arafUdgcWeYZPLLL870xReTz1pnhZbz6aGH7Nt45BHgjDOAKVPc5RUVwIABwLffAl26AGefTd8X\nXkh+65ISWqYYOpS+a9d2yi691BGeDRuANWuAK6+keZtvXeevv5zpc891pm+5xb2eeiht2QLstZdT\nvnMncNxxznynTu7fDRniPJAAeoAtXQpcfjnw/vvuelRU0KesjB6opaXAnDnAgw/a6/7bb+55dayD\nBwOPPgq0bAl06AA0aAAceihwwgnABRfQtBBAURFQpw5Nd+lC30LQ8c2cCfTpQ9Pbt9P5fe452n5J\nCbBpEz1AjzuOPkIA//mPvZ7JJIx5XxkfdsswGcnYsW53wCefULmanzDBvXzxYvfvf/vNWfbss+5l\nn34qd/u6TbfD3LlSLl1K0w895F7Wv7/dVXHvvVL27h1d/tRTUg4b5u9CueEGe/kBB9D3Z585ZfPn\n0/eYMbS8d29yST37rH0bAwbQefHbf6tW/ssXLZKyVy8pn3mG5idNcs7jxx9T2VFH+W8jns+TT0o5\ncKC7bOlSKbt3l7JxY6dMucjiAOyWYZgqYNEiYOBAJ+Jj6VL38tJSskAV55zjXn7rrc56nToBrbSo\nYjOK5Pzz6buZpQP4Tz+RFQ4ABxxA8wrdctfZuhWYMMFd9tprwM03A9dcQ64QLxo0sJf/73/0/be/\nOWX169P35s0UwdKokdtlk2NkHn/uOeCjj+zb79mTvk1L3KRDB+CDD4CjjqJ5/VyqEMyvv3bKOnd2\n/z4nB3jmGbLs77qLrPEXXgCefhpYvZos8ZdeIote59Zbgbfecpdt3UrXyXnnAVdfTWV16/rXPwmk\nLJ87w6Q9s2cD119PYtGnDwna6tV0406fDnTvDpx6qnNDA47/9aGHgDvucFwkixbRR6ekxD2vQvtq\n1QKystz+9jffpPBCAGja1C1WderY6z9yZHRZ8+bOtClcOkqwg/jkE0fIhg+nMMX27Z16AsANN5Bo\n6ng9WEzXUf/+5OJQ4l2vnnu5enDoD9jFi93rXH451a28HLjtNuCUU4ATT4x+6Fx3nfs3ALm01q4F\n8vKAhg2prLjY/bslS0jgO3YkV9qffwL9+tmPL4mw5c4wYfj+e+Dee2l64UKaPvhgxwpcsoS+16wh\n0WqhZeB46aXo7Q0cSH7rjRvJ5/vVV1TeurUj8krclywB7r7b+a3yOet88AFwzz003aQJkJ0NHH00\nzXsJsa2hTxd3APjxR/tvw4p78+bOw0UJdvfu9N23L/Df/1JjrMncuUDbts78N9+QKJt13ndfejOp\nW9duDSuBVuK/aZPTkKx45RV6k8jJoYfMqadGC7sXQtD/3aCB99uMakju0IHeut56y3kQVCJsuTNM\nEFIChx9O0717U+cbU2SmTXOiOTp39r7RFfn5wLp1JOSDBjnlr77qWLZK3E87zW1tmp1zunYlMVQo\n4f30U9qPVych3ZpVmC6fgw6ixkbTGvU7vn33dR52tWqRANapQ1Z3ly7USQqgctUwe//91DBZUkKR\nLdu20XlYtoyWH3EEfY8YARx4oLOv3FzvegD0kAOcczBpknu52m4y6NYN+PxzZ/7hh8mtM3o0/Q8H\nV202dLbcGSYIXdgOPthu8Y4Z40z/9Re9puvk57vnhSDXjUnt2rQsL8/pAfqnkZRV5WVSD5xjj3Uv\n32MP+s7LI/GLRdwbNYouUwKp4yfueqiiOg+qzCtK5J//BN57j6zg7dspskY9aC66yFnvgAOo3sql\nY4Zbfvut4/cHot0yyq3zxhv0rdoxksHppzvTrVs7D67ly+k/atw4efsKAYs7k37s2uVYhlWBKa5B\n1KoVXbZ5M1mnOsOHR6+nXBh5eY7lbvreV6+m75tvpropN4ciS7utc3K80wSE7fbfsmV0mZ+4635v\n9VDr1i16mY38fPp/d+6k87hpE73N6GRlUZgpEC3uhx/u7AtwxH3LFnoTuOYamu/bl0JBBwzwr08s\n3Hor9SdQx9GmjbNMva1UISzuTHoxfDjFCrdv791hKBFmznRb4QAJQ6Lk5ZF1CgD77EPfuqWnUJEt\nubmOqJvirB42nTqRyHo1mAIkbl7x66a4m28bCj32XKHeDmzoAq62OWIE8PLL5Jbxo6CAxH3HDpqu\nW9fu/1ZtDlkBEqZ+u3Kl2yDIzye/t/lwSISsLLLYATpufdu9eiVvP2GrU+V7ZJggOnakRrbCQnen\nnLIy8mt//z3NFxZGR5jEyh9/OOI9ZQpFnFxwgbN8yxbv3o0HHAD8/HNs+9u40fGP20IUVZmf5a7Y\nf3/69hPa3FyyUG2YjbK2Nw7AcYHoFBQAPXrQ/3THHe5lNsu9fn2KLAkSU91yLyjwXk9GOkoFibty\nKW3c6JTpHbCSjTqH6jjVuVAP9CqExZ2pXuzYQY2H//d/ZJk/8oizbNOm6PVffjmx/TVvTm8B06ZR\nlIRCCX7XrtSr08acOfQgMlHC89VX1Fj6zTfOsvr1HQG3ianullEWu803XqeOY5XqlrvqGanIyfEW\ndxOveHgbubl0zv7xj+j2BF3cgxo8TZS4l5Z6P2wA58EU9LBQ50h3rel9CZKNeiCpa+D77yk6x+9B\nVUmwuDOVz4UXUvpYgITGjLwASEy3bKEIAx3dJaF3f1eY3fuDKCmhji0qrhmg8EUzb4p6Y9DTBeio\nzkfq94rmzYGnnqLpo46i4/GKyLC5GwoL6Vu33G3oYX/62AhmJyk/y93khRfCrQe4G1lN61kP84vV\n7WE+KLy46ip6qPXt67+eTdwrU2jVttXDp2NHisVPASzuTOXz5pvA7bfTdFGR23JasoRixlWc8n33\nuX+r+4W/+CJ62zt32ve5YwcwbJhjQf3vf+RLz88nS/qqq5yGSYCScOmcfLLjI1eoRsThw4EnnnDK\nVSRIp07AqlWUFyZelBgqn7vp71dCrh4CgBM6Cdg78YQV97//PXw9/cTdL7FXELq4m719dTp3pjc7\n5eP2QtVTF/ewD5B4MC33FMLizlQty5fTt5QkwO3bO51vbCjLfds2p1egjpd1O2QI9ShUcc0HHuj2\npQPRUSYmZhIq1anGZvmtXOm0BSQDZbmrbvb/+hf56lUsvS7uuuvDFK6cHEfYPvww/jo+/rj7oewn\n7omE/OnnNhnZFJXlrg8YUhWWO4s7k/F4XeSlpeF8vMpy92o0fOMN+z6UKO7cae9mD7jdKWFQnYNs\nll+rVvHlC3n6aeCYY6LLTXE/8URqQFX+dbMOevZFndxcJ1qmfftgS9eL225zp+TVXUpmHLwtVj4s\n+nE9+WT821GoeuruvaBG2ETwayeoYljcawplZanJK637xHURHjYs3O/Lyuy+dp1PP40uU8c6caIT\ne5wo6saNtZHQj5tuInfTqlX00fe1YweFZgJOtIV6IJr++gUL7LH/+np16/qHTcaCLuimGyqR86PE\nvVcve4K0WFH11MU9GaGtXpg+9xTC4l5T6NEj+nX0nXfcIWKJsHw5JYky0WOsp01zpm++2b6dAQPc\nHUvmz6c8LAqV+0TvWWiLJlHi/kccw/l65VlX4u7X0BkvzZu787rUrk3iftddJMoqV40Sd9P6bNAA\naNcueru637pu3dgiYvzQxb1TJ3pwX3stzSeSN0W9/ZhZGuMlK4vaMfT/LMhYSAT1YKuCrI9BsLjX\nFFRiKsWaNZShTh88IRYqKuiGLimh73btyHVgvh3oHY1OOMF7e8qtcNBB7oa9zZupA4xC9TD8/Xen\nzOZDVQ2tYR9ePXvSQBurVpG/3oYS91gjdOKhVi069ooKylKoUI26YXuXbt/uTOfnJ88lYUtJ8NRT\n5NNv144eqvH0QejVi9IQ/OtfiddRkZPjvi69GuGTgXpw7Ltv5e0jJCzumcwXXzgpZRXKylXhiHrC\nqVjIySFfcX4+5c1Qr6F6HnHA3ZDlxZQpwH770XTjxv7RDKobty7aNneTKjOTbHlx1FF0QyrrWc/X\nMnYs+e27dnXqWNnUru3kkDnsMKdc+bO9UgqEwQw3jQdbiGN+Po1cBNDDukOH2Lebl0fpk5Np+WZl\nOW+Q/fpFR2Qlk169yAAxUxinABb3TGX7drLKzfC2U06hLHXKXRGvf1RKZ7AD/a1A9xsDlB43iPx8\nx5qqX98u7kVFlF5XPQSuucYZes0m7kr89HBHP0yxmjiRklwVF5MgXHQRNSpOneru7FRZ1KrlPJj1\nyBjVsBzWNaTCOfWBKfQslDUB3W334ouJhWoGUVBA7UmVuY+QsLhnIlI6DZbz57sbdz75hAZHUG4N\nvQOMyaxZJHoqZ3kYVASKEBS6GMaC0cW9Th27uLdpQxZsvXp0fNdf7zQyzp7trLdyJe17zhyaj9fC\nrVuXBtnQ87JnZZHrKZn5SLzQfeN6pJCy3M3xR724/346X0cemby6pRu6uFdmpEw1o+YcaU3igw/I\nygTIEv7gA/fyTZucHpZm49dff5Fwbt9Ovk/AGah52jRqyPRztejhha+9Fu7BoIt7rVr2BFa2wSGU\nr/3uux0f9A8/eO/Hr32hut30ejuCbrm3a0f/h20AEMaOHjFUFQ/makI1u6KZuPnHP+jC3bjR8dUC\nFMVi6+6vfJBmjPhDD9Gr68svO41xK1dSh57jjyf/s9k4q6hblzrMmNs88UTgllu845/z82lEnksv\nJT+tmXRJbdtEF0D1UPFL5GW+paiRcYDqd9PrDaFmjP/JJ/snCwvDyy9T24HeWJ2pqLYSoPo9xCuR\nUEcqhDhFCLFICLFECDHYsryBEOJdIcRPQogfhBD7J7+qjC+qO/yxx0bHMquBCWyYbgslcjt3Oste\ne83dFb9PH/u2Cgsphtjc5pAhwL//TTlmbOTn0w342mtuK2vPPZ3eq7YQPt19oyJY/KIszBzk48Y5\nvV6rm7jrOWIqo2PMlVeSEZCsPgDVGb0BvLr9z5VIoLgLIbIBPA/gVACdAZwvhDCDUO8AMEdK2Q3A\nxQBS31RcXfnpp8QiHUx+/tkdSz5vnnvw3Vq1KJeKLW0rEF0X1cCqQhxteIXh1a1LLhszVFBZ7F51\nMEMZ99mHog5GjXLcNbZwR70sTHjiFVc409u2Ucre6oruQqrMXCgAXS8TJ1buPlKJbjCw5e7iUABL\npJRLpZQlAMYCME23zgCmAYCU8mcAbYQQHndyDWbJEhIUM/91vJSWUvSI11BhL77ohKZ5CZkp7srf\nXVISvkfrvffSQMrKcjeFVom6l7ib4pWbS+0Ehx3mbMsm7rpFG0bcdV++ehNQD7DqbNFVtriPHWsf\nOCRT0CPCWNxdtADwmzZfHCnT+R+AswBACHEogNYALGNz1XCU7zvW5E1//un4lJcto9fMPfd0Uona\nsiUClChLNX56JckyQ+qUAJaWhu/s0bMndT6yWe4tWzqiro9mb9unDb2h1SQnxxklSO3T1ktTkZtL\nETD//rdTFnZEn1RS2eKe6egdrqrzQzzJJOuKfhhAfSHEHAA3ApgNIKpPuBDiaiHETCHEzHV6o19N\nQVnCsd6sLVo4PTgHDaIUruvWkc8YcEdT6NSp46R7VYMpm/i5ZcJa7sqqVpa73hi4997OdI8eThd1\nwHld9svS5yfugHNe1Hp+Lq/cXIpdv+UWp0ydl6Ii79+lGr+HHxMMu2U8WQVAH7qkZaRsN1LKzVLK\ny6SUB4J87k0ARCVjllK+JKUsklIWNfGLr85UVK/KWG9W3YK2ZTL08oFnZzsxvoWFFFFj4iWGS5bQ\niEhhUP9l3brk899fa0/XG0KFoBh7xfbtdDx+DzsVFeI1ILOZEsDvbcPWYatPH+p4lYIBjEPDlnti\n6P87W+4uZgBoL4RoK4TIA3AegAn6CkKI+pFlAHAlgC+klCH6nWcYb73lL4jqbSXefNI7dtjF3U/Q\ndHF/7DHKQqijxH3ECEovq9w0Eyc6HYEAEvBHH3WyFOoo69wWrmhGueg3Wm5ucE++oUMpEqh3b/vy\nRMUdcCfsqo6wuCeGnkqCLXcHKWUZgAEAPgKwEMA4KeV8IcS1Qgj1jr0fgHlCiEWgqBqPlH8Zzrnn\nOtnsliyhXoT6cG6qa74tFjwMP/xgT8akBM0chQdwrHrlujGFoqSEOjVddhkNDu3V2LvHHjSa0v77\nU29R1QBXWOhYQzb3kBmWGetbS2EhdbjyuilNcfdzJSUzVW9VwuKeGGqAE4AtdxMp5WQpZQcpZTsp\n5YORsmFSymGR6W8jyztKKc+SUiYpj2waocRaCfm8eSTEV11Fgn/llcCvv9Kyzz4D3n039n2onqIm\nStz1ARuU6KkGQy9xl9LJYeI3eIV628jPp0ZdFYetC2aslnsy0BuApcxMcU/XelcXapCg69Scd5TK\nYssWEnQ96uTgg4Ezz3TmFy8GXnkFGD/eKZs/n77Ly8NlTgTITWIbVFmholJeeMEZwUcJrhJZm+Ws\n6uKH6UpSDwl9e5VhuQehzkdZGfXc9CNdRbIGuRKY5MFXTSJISeJ55ZXu8D89kZWJElllWQ8ZQu4U\nU+A3bIi27qdMIf/wxIn2BkAl5M2bOx2Hpk4lX7nfEHFePU51TOFWIq0Lpk3cv/3W/rtkoYv7xx/7\nr5uu4s4wccDingh6Y2TYARzUgBW3304PBxXOaMa+9+sHnHVWtPA2bUr+7rPOcpfn5TkWnu7Pb9+e\n9qWI13972mnR+wOCxf3SS93zyRZYJe5hYvLTTdxvvz15w+IxNQ4W90TQBT2suOs+6HvuAVassP9e\nuVUmTHCXK9eLKVQFBeEaaWMVd5V7xXQHhXXLXH+9e76yLHc9tt4L2+hB1ZlHH3WPZFUdmDgRePPN\nVNeCCQGLuxdqGDkpKTa7Y0eypt95x1lHF+SwvTlnzAD+9jea1nOdP/use96r047KYmiKe61aTplf\nA1IYcdXzmqiHkSmMqqFWb8Q1xd0clQnwbzOIB3XMYR6uNbRhLamcfrp3ugumWsHi7sW++1JX9u3b\nqYFy8WJyoZx9trNOPJZ7eTlw4IEkhHoHok8+cfec9BrIWA1dZopkQQE9IK67jpJueWFa7uedF72O\n6tKvYwrjEUeQVT9mjFNmirueatVrO4miHjphLHeGqUEk2YzKIJYto2/Vfd9ESreghBX3XbvIN67e\nDEzKy0mwvMRd5U6xuWWaN6cHkR9m5IXtDUEv80qsVVhIEUA6ieYYjwch6HyxuDOMC7bcg5g3zz2v\nusH37OnuZv/LL+G217GjkxbAFheukovpy2bNoo5DgOOWsVnuYSg3Uv7YHiL6tgKyJj72mDZMqm20\npJAMHerduffhhwMyIZSXA19+Gfe+GSYTYXEHgOnTgeeec+Z1AdSjRHr2pPww339PHZF0VMOjV2PT\nE09QfvK333bE3WbpqjeG1q2dsvx8p2HNq0E17IAOZh4am7jr1r0Sd0usdUkJ8H//pw3PWVgI3Bzp\nnOyVC0ahDdz911/UMbZHj+jVNm+maFHVTOHJkiXRZe3bB/yIYTIXFneARPvGG515fag2Pauh8nd7\nZVgEvMfp7NmTUvQ2bOiIu2lFA4646yKcm+u4Y1QuFlPcww5ibYq7VxpehY/lrpoMXCH6555L30YI\n37BhWvtqWZnLr6/aZjdtit69OkVhvV4uuPMPU4Phq1+ntJRS6uoDSuvRJWHipL185fp2srNJ0WyK\npUIgdRHOy6OQyMmTHdGMN+rEfKBccw0Nw2cO5vHjj9QjNoS4K3EG4Jwjw0103XXaLrKzXcKrtmNL\nbqmqG1c7bE0YQo5hPKi54r5hAwmpHkc8ahTFFg8aRPO1apElPWgQCXGYMEIv94gp7gDlcrnmGqds\nzz0dcdcjaVT2xFNPdcrMHCpjx2LjRhrH2iW2JuecAxx1lDOflWUf2/Sgg0iNfcRdF+P77otY3urY\nAtoAhg1z/Ogqc4NN3FWZvvuKCjrOjUEZjAYPrn5x4h5UVNDY5H/+meqaMJlCzRX39u3J5aKHDa5d\n615nxw4yHY84gsTq+OOpXHfV6Klo583zfgDYxB2gTkwffkhK16IF8PvvVG5a7ibmkHX16+O222gc\n60mT7FUAQG6hr76iaT1eWT0RRoxwrx/CcgeAu++mxtXd9fYR9/JysuSPOMK9HZuXSi3Tdz9lCh3n\nzV65R0eMoM42QqRND8/PPgPuvNM9lgnDJELNFXdl9n35JcbgPMzF/li+HBiOKzAK/fEzOjrrqsbB\nU0+lKBalSoC7UbRLF2//gZe4//orcMopNCh0gwaO49m03EH9pIYOjVi6Bx9MDyOVnbFRo90/2biR\nPCoqs4Hn8Y8c6cwrcT/4YPd6kfKl6+vujnz85BP6/PCDe9U99oDjatLeYMyIT/XS8eef9FBQbdE2\n1DFt2OAkr1RRj/q44C7atUu7MUHV28tff6W2HkzmwHHuAC4AdcRpNfpP/IaGu8slIkKtu1qaNaNM\nkIratSnaJshlo/vrdXHX3QZ16zppgS2W+7//TVEltWtHrNYmTci/ceKJQPfuu43UbdvIqwI47ZtR\nmGGLZ5xB2SHNwTMiynzUo72xehOlijnxRPsmmzQBvX0ALnE1LXK9M+9993nUL4L+jLvkEmoOUdvz\nzCZga4/o1Ml/RylGNUH4utQYJgZqpuX+1FO7J3fBEeV1O9yhiatAI/Ss3l4Xd9wRGWsjL88t7gMH\nUpze7nhAD/TIDV2V9BDMyADTM6esx4fLNDHSLHfA8Ms2bEg544VwibvC5uqw8sAD5BIy3T0RcV+9\nqXb0vg3KykA9e4uLXcnK9IzCFRXuTq1+TJrkzqemvGaB4m42fG/YQH0FPHjpJQqWimf8lGRhy/lW\nWWzdSrdAOjxINm6kW8Q8LwsW2DtSMw4103IfOHD35AY02j0tKtxK2BsTMAtFGD21MYYOpXDzu3Nz\nndi/4cOdkZf8ePZZd9y3rkpnnOFMFxQAK1bgkFMbA7gXEhGzNmKJqswBeup4HeUh0sX9r7+c7L++\nZGc7HaR0jLvKz4Wyu17Keo9w4IHO9HvvAQMGhKgP3KcGcERdvdSEttwbNrSvF0G1aZ96qn/mhsqk\nKi33gQPp0u3QIXXHG5ZrrqHRKw8+2G0/delC36l8IFd3aqblrqHEPR87o66UH9Edb6EvdpaTdT9h\nAjC3tJMj7gUFGDfOSey4m9NO2z0Q9Ou4GGv7GpkRdVXShUg1dEYYhf40EfHjK8/Pww8Df/wRfSy6\n5a5c/48/7gzdGhfGObH1FVJs2EBeIv0n5s03dar/7s49l8Tnk0+il82eTW8BSuRzcsjv/wWOca8Y\nQ2pf/c3GbyCqykb9X1LS29Hw4YkL13ffRXfc/fVX2jYQw1tdEpg5k7yXinXrotvudSoqgCefJGEH\n/AfYYjyQUqbk0717d5kynHyP8iscKQEpG2K9rIVt+qLdn/v/ucM1Lxs1oom335aAlE2b2nezAq0k\nIOUxxxgLnn1W25jGpElSAq59rX7ijd2Ln37aKe/RI3p/zzxDy264Qcq6dZ11jzsugXN18cWuOjVt\nGn1+zM9XXzk/f//94PVj/fznP/R9+eXaadRXWLQo9OFt2eL87KmnEjhPCfLxx1SHY4+Vsk8fmv7p\np8S2abvE8vKc8qlTE9t+InU57jiaX7bMvv6bb7r/0unT/bcXcwXSGAAzZQiNrVmW+8yZTjhjhFKQ\nlScgIWA3lWS2YQlGfO4yn8L9vCy+kog/X42LvRsvk8mMVAFQfp4Tg64bpF9+GT1Qk1q+dKm71+jn\nn7v7ZXn6YNzwAAAgAElEQVRRXGwZ2tUwH8NEc+g/8YxoSQDVBu3plmncONR2vvrKbdlOnUr+d+Ve\nWro0IKw0iShXU0WF09WhMnKh6S69VI67rVIozZsHfPRR9PLK6p6wZEn0EAnJYMEC+9tmKqlZPvdr\nrqGelxpK3P0orTBUJHKHVOQ7UTSzZzsRKooskAM1yo9q660DWGOy9eeAHjlSVkaDMdle3T/8MLrs\n9NODX/OPPJKEpaJCi+g0fhTm9VjvpFsZGQD+8x/6tnpfbr890MeuOMbw5kyeTJ8NGyifzYEH0nO8\nKvy6tt6+lR0WmUpxV6h2FfMcV1bmiAMPJIOjrCy5Y7dUxzaAmiXuEad1GbLxBvrjYowMZbnroqr4\nBkegVrEjIjbRU+Ie9Yd7We6W3q0lJWRprFjh+M7bt3eSUE6aBLRqBSxfHltj3JYt9Nvzz6fQ9HHj\nHItRSm9xD4MuurHepPXq2XPM6KierdbsxlddFWo/Xs9XwHnrUUFR27eT0M6Z498AuXUrZUFeupTs\niDBt7Qpd3NUpf+cdSv2jUhrZmD2b3j70JKXl5dTZOoiqGrvEdm+oiF+dlSupwf2AA6Kvm1dfpYCn\nevWAK65wytW1unMn8I9/UEzAtdc6QQRffknP+oj27n6TXLGCupZUFvPn0zWjdwYfO5b+38WLgaOP\ndkbcrDTC+G4q45MSn/uxx0oJyMdwmwSkfAWXyffQWwJSNsEaWQdbrD7eW291z5t+cUDK77+P3t1S\ntJGAlK1aGQseesjbB2hs+6efoutz/PF2X/TDD/v7qktLnd1cdhmVffedlFdd5b2efOAB6/H6febM\ncX4+fnz43wFSHn6497LrrvNetnvi119DXQq//OK9rbvv3v1XSEDKlSulbN3a/nfp9O9v1CkGxo6l\n3xx2mJRdu4bfjm29p57y/r1e/u23sdUxXoYOddfljz8s/5+UsmFDZ/71173/n08+cabLyui3997r\nlP3nP9HHqybU/GefJfcYzXNtzi9e7D6GQYMS2Rf73KOJWO6rQUPD/YmG+KppXwDAOuyJbbAPNmGz\n3E127KD4bd16Lou8GEnplM2YASz4PXzec5vP2iuRY1DmxDffJMsIcKJtPv0U+Ppr93qjR2t1HjyY\nnPY+jBrlHm1PPwdhIzJUBuC6de1uJYCyJnvlZdtNwLv2qlV0DvQkoCb33eduRxk2zImI8jue5cvd\n8yrBpxdr11JbyJtvOr7wn38G5s6NrjNAKQqiIrM0xoyh/029geksXBg9BnvYNz0pKcLGHNrAi5Ur\ngWnTnN+ZUTG2dozVq919KPze+PQ3gSefJItd7zsxYgSdT7PNQrsNXcu2bAHGj7fva9s2itj55ht6\nW/7gAzoPU6bQ8tmz3aNJDh4M/O9/zvy4cXRf6uf63/+miLdKJ8wTAMApABYBWAJgsGV5PQATAfwP\nwHwAlwVtMyWW+2mnSQnIW/G4BKR8/KSPQlmT114bbSWa6/TuTd/Dhjm7W4iOEpCyRQunzGVJ3HVX\ndB2NbasoCvXJy5Pyv/+11/P224OPBZBy3jwpzzrLmW/cOHqd0aOjquX5WbpUyg4dnPkZM5zf+Vlg\n+uf88+m7b18pp02zr1NR4f373RO//eZ7CXTrFq4+Xp8dO7y3fdRR7nVvvtm3Ki4L/ZhjvPfZtq3z\nH+Tl+f8vb70l5YABxrnx+P++/NK/foolS2j9ffcNt75+nUX9T1LK00+PLm/ePPr68zofZiSN7TNk\niJQLF7qvjy2os3t+/HinvldeGX3dKs45x3sfJSX28h493PPXXy/lggXO/OTJ4c6j9/kNZ7kH+tyF\nENkAngdwIoBiADOEEBOklAu01W4AsEBKeYYQogmARUKI0VJKj+42qeFndMIq7MCTuI0KQjod1VNa\nIS3rKD/w2LH0ffjhwOvN7gH+IMtr1y5LA5atm/yPPwJa0IwZF56VRbHgixYBd93lXhY2wmDq1Kh2\n5SiUFbV8eXQ+NZ2jjqI3ifbtnXOgrJT33w80+nejzk2jRt45x0L9XT6W+48/2sfsjoWSEqd+y5ZR\nvpvWrcnHqltnTZu6I5amTycffNOmzjnVLWG//gPLlpEsqP37sW5d9BuctF2woLacgw5yt+OXlpJl\n3aYNvSV17Oi8PfrVsbSUcrXpx6ySqypycoCPP7ZvR+XLU/hZ7l4jX+oMHRrdeU/vsPjFF85wyKp9\nYsUK6mv45pt0XhYtcuLsbbz+ur3cNb5BZF/6ve/XhpJUgtQfwBEAPtLmhwAYYqwzBMALAASAtiAL\nP8tvu6mw3M0n7BOnTI3LetuOgqiyZs38f3PDDe46SIB87yHqabN+dD+m+lx0UezHAki5557RZS+8\nEK4uqj66tfjNN+QLjaUOV1xB33fcIeWPP3rvR023aGEsUxNr1nj+//vvH9/50T/r10f/T5070/dh\nhzllHTpIee65tF55OZV17Oj+nRDOdO3a3vusW5feGPTz4HWtPPeclBde6C7budN72/37u7c3eHD0\nOf/+e/u+de6+O/Fzq3/GjPFeFmkGiukjATkH3aKObetWZ17vflKZH1ebVhwgiT73FgB0L15xpEzn\nOQD7AfgdwFwAN0spq03mCimBie9FO0sXrfful++XKmYLCqPKgiI8dD/cbjQrc9Mm4LXXop/6Jnr6\nd5N4YsoPOcRuEccT5aKoqPCPRrGh3joaNQoXoqen5NFZuCTXNZCWzurVsdXJhrKcVS9PgGKcAbdP\nu04d5/9QIY2LFjlhnABdlwrdB2yOm1Kvnjud0ezZwMsv29tYKircidkAf0tXt6InTXKORV9u9n2Y\nO5es25Ur6e3svfcoQsiGmUIiLObbsk6s15ZiJ9yvhKNHu4c+Nu+9QYPsMfiJMHly/OPsxEqydnMy\ngDkAegJoB+BjIcSXUkrX6RJCXA3gagDYW8+JXsm88QZw8cXRavjSzOhOQwo/gdmO6Fa9oA4n1phl\n7V8eNizc2BIqzNAmvvGIe2mpXdxjjQEu1J535eWx34BKvBo0SGzAq85HUQ4fXTgVyejCXlpK4XVB\nEZe6uOviGiZfu+mWqlPHLe7nn08PCtswtRUV0deiX/oJ9d9//bVdiG3D0HbrFl120kn27ffpQ42Q\nsebMsbk8TjiBAgCSJe79+7vDaU0hP+wwoHnz+PblRVUOLxDGPlsFoJU23zJSpnMZgHcibw1LACwD\nEJVjVUr5kpSySEpZ1KRJk3jrHDO26IEg/ARmF2Lv/bFxo+UC1/LCq+iVoAvXLyNiMsV96VJvH+tF\nF0WX6Wnt5871tuRMRo0iIVbHVVBgF2aToPHAy8qi2yuC/NVhKCkJ5/OtU4f88Z995j1muhemYbF1\nqzuCSP3Ptp7R77wTHW3kV1+Vk//jj8PVzSsiS7W36HTuTDHp5eWuJKEAnNz8YZGS6pid7R3Zcsst\n9nJ1r5jiDuxOAQWAfOM6f/+7fQz7adPoP+nZ0ym77DKfymtUZcexMOI+A0B7IURbIUQegPMAmB14\nVwI4HgCEEE0BdAQQ8vauJP75TzInly5FxWybT8QfP8t1GGIfLmfzZuPG2LzZJe7qhg1KC6+Ez2a5\nx3rDACSCtm0NHWq32gCgZcvoMl1sBwywW3c2lMWvxD0vLzrrMBB9UwSFRD70EHDyye4u4bGKu+31\nubQ03FtNnTrUYNqjB3DPPbHt17wGiovdmTRVKn6baJsC5bWeQr1lBSV0U3g98M0wUIDCWhXmOQuV\nqdRCWRk83W59+tjLlVGlG2WtW9O3lNFuMIUQ9nq2bUv/r96ZatcupxOZH2FG6kwWgeIupSwDMADA\nRwAWAhgnpZwvhLhWCKFU7n4ARwoh5gL4FMAgKWUcUpNEHnyQHq/t2qF8/Dsx/9zvBp6Ng7wXerBl\ni/G6XOj22ytxD7JalQjaBHnRopir5Wm5+2ET91geLMuWkegBjkirN5a8PPIxS+kWJdOtpYt7djaw\nDo0xF87dtXAhfeuRPmHeCHQKo5tWUFISrj0ikdfvIAFQ/1fYXCZBbxqffx6uLwcAPPNMuPUA99tv\nZfqZJ0+m++Jvf7MvlxKQs37EznsfBUAPXd36N+P/dQoLadsVFfQpL6dIIgC44AInvr6khCKxgt6e\nq1LcQ51yKeVkAJONsmHa9O8APLxuVcCGDXTFq5wihk+gIo6+Wn7ivg7xuZSeeMJ7mboognyTfrnM\nYxUvtb1kdEPXRx70orCQHnINGzrHoQRAPbR0C12fNt1ktWvTA2L6dPptV8zFGjg9qdR5VEIcVrx0\nbCGZpaXBvvvatRMTsyABUMdmpvP1IkjcTzgh/JuW3pAchH4O4nmrDEvnztEP3A4dyFVUuzYZVRUH\nHISdkdDTggL3W4X5VtiunTtAQt+2ea8UFdH3WWfRsiB3YVWlfAAyJZ9748bO+9Nvv9G/A2Au9qex\nUdEm5k36ifta7Om90IdHHvFeFlbc/Sz3eIjHcrdxzDF2l4DOkCFk4dStG912oLtlFPq0ebyNGpHl\nqmL9dWHXtzdzJu3TlstE5+WXo8ts53jFiuCemoWFiQ26ESTu+shWNsyGQb/RsxRmu5QQdO68uP9+\n72WHHELf+j3k5xL79FNnWnd1hGHQIMfFojNrFj2Ehwyh+YoK56FcUBA1noyLRYvs4yXY2Hdf2q4a\naz7oXqrKHPqZlThs+3aX1d4Nc31W9sdP3P/MagwkcPPaLkblsgmyvk87jb6TldGurCz2Rh6vOnpZ\nLSedRD7do492LPB+/ahLt0reZBN33fIzb5o6dajMq8OT2t4TT1C0iD4WuA1bxxLbjdq3r/92AOC6\n64IfJn4k2uhmDherxoL3w1xHCO80F4DjmrChGiH1h6OfuOsZmvfay3s9v30pDjmEUnzUrk37140H\nFSJaUODU7fDDne1s3UpGSqz3lu1hfOih0QPIA7EfXyJkhuWuOPhgb8dbwM9M/P7giorETN0VKxyB\nXL6cxEdZekEWnxrwOlmW+4YN9saweLDV6ZVXKGZ52zZ3it0bb6SbSVlQppsG8LeC1DIv94d+Ht/x\naHLR99WmDdWnf3+nLFY3lwp/vOuuyrXcg2jVyj0fxnIHaPBz1R9DCHubg8IvRFDVP6zlrveRiLWt\nwnTXff45Ha+6FlUdKiqcGH718Nyxw3nb3LCBXIb6W0S87NhBoaVbtwL77Udljz1G81UYJJhh4q61\nKMZyX9qsv2TmerbxKLXtoG1big5RQhJWFJIl7vGIkJfo2epUrx4JhRndIoT7Rg4c9DqCmUbXS9z1\n119b34GTT3YLQ3Y21Ue3misqYusqvscedJxCpE7cL700+qEYxnIH6M1LeTeFcJ+f+kauO1tEk0Kd\nQy/LXbltLrmEvpXlfvTRsd93ekpdgI5Bj//Xx6ZVHaPUG2ZBgXOMeXn0/8UwQqMnBQV0Xdap4/Qd\nOProqo1xBzJN3DXK4X2V1K4NPPCA/+8rW9wffNCZljK85a4IW7+gnrNh6djRmfYSd1udwjYshhX3\n2bPdEQlhLHcbEye6j8P2YNq+nWL2V68mF5KOTYB1UY2ncdtv22GxNXiGFfe8PGff6lg2b6aHo5n7\nxdaBSuEn7v/9rzNU8CuvkLVcpw75uD/6yPn/+/Rxd9yyccYZJJp+qDps305vqBddVPn3ts5DD9F+\nlfunKkl/cfe4i/zEvX5953XJi8oaCUaxZYs7triyLHc9KsAklovcHLXIhq1OYS2hsOKel+d+C4hX\n3HNz3S4YtV99FMYdO5y4e/N12uZmOPlkZ9rssenn4jCJw7O4G9v5C+uWyc93/i8l7oWFJL61arkf\nOl5tHWo7Zl1UY2b9+m63jfKZ77UX/a/qNwUFzjLlijTR3TleqO2NHk3fhx0W/Jtkkp1tb2OrCtJf\n3M1EGhH8xD0rCzjzTOqo40VVPN11/56Ky0625e5HLK+g3bs7EQGptNxNvKzcMA8/veu52u/55zuZ\nLHVXhE3MzjrLPf/SS870pZc6FmpOjr2B1XQXdexI7S8qt70NcyQor+PUc8JHjeHrQV6e/zXx3XfO\ndHY2vRGMGxe9np/lHtRYrP4Hdd1s3EjpQ+JF1UHljQnTIJ4ppL+4e/gdynwCgYSgj/LfJiO3Sjwo\nvzvgRMuEDZVKxptFLOKelRU87rStTmHFvXdv+jYtZK9esgovcQ/z/+l109dX18Xllztltkggsx1B\nr4sQTiTQkCHRx3XccWQR6z7j3Nzgc2xGqdx6q329eKIycnOdY1APch397SM7mx5+Bx4YvZ7ahn49\nqLcavygbwPlP1Hf9+t7XaZgQ3t2pByI2YHUYN7aqSH9xX7MGf6IBNoLMrDJkYwX2xi/wVgUz0sJ2\nkVRV5jaTsEmRYnn4bNxI+TBMYvHtqgciEFuDatjzOHQo+bZNcZszx9+t4HWz2jJA/vOf3tvRz2fj\nxuRj1ttFTHEfMya4Y1SzZmQ133uvu7yoyGnc++QTCp0Ewp0rVY8TTqCOQQ8/DNx5J5VdeKGzXjx9\nF3btIiFdu9Ye9282QAP2h6/ZgQygB9wffwS7KNRvwhgeYY5RbU+5hZLRYJouZIS4N8KfaAhqNbod\nj6ENVqAIszx/oi4KZVnZBqrVb/bAod2SSDKSW5nUr29PgBSLuGdlucVdWWJ6Q5HtgRP2ZsrOtkdg\n1K7t33jndQy2DjhKWGzWsflgatbMXWa6ZRo2dDfmebVtNG8eLUJNmjjbKyhwrOwwD2z1MMvNpcgW\n/bzp5ymea1Ztp0kT+//m1QdBnSd1Hyl3ln48WVnh3ibUA5PFPXHSX9yNfKYTEZxAWl0UnTpRzPng\nwdHr6Bdm2N5qfoS1tMN2kzd98127+q9vs6oTsdzfe49iovU3gkQs93iJ5TU7L4+sXVvGyqD/xzy2\n3FzKKLh8OcVIh8k8qkb8MUUpFsFRx6v//+pNSn+jqlXLnqnxkEO80xZcf73/vm2Wu77fhx+m+0nF\n2cfz9qC8rGEaS8NgumVY3NMJIwepmdaz3h7Rfg79Rt17b/sfrl+8NqtMDw0Mg190gU5Yy930zZuD\nKgNAly7OdCJWNeAWd4COp1s3t7sikQbVeInlAZWXR9auLXIl1jaWVq3ofLRuTVa8X1SSQvmnjz02\nul5hsYm7Fzb/dlaWd8/ToP9Kv170e0iJe14e3U9qWTztQipBnBlXHy+65Z6dXbW5XVJN2qcfqChx\ni7cp7rXyK7DJiEowLzrbRRh0s590ErDnnuGTNxUUhMu3Hq+4mzRvHh3dYBKruAdhO482d1AyiVXc\nvYhF3GfPppwisbL//jTyj3IHKtT/EOZc2cRd/Tfmf2QTaynjb4wPOtdqf+pcxhOUoCz3MOIea4Nq\nTbLagQyw3Ddvcf/DprgX5EarYJiLIihLXl5ecCSHqx4hLfewbpkgcW/Z0i0WthtNv1mD4rCzsmjQ\nYCA6d4m+jkks8d3xEKtbxotYLLpY39p09t03+jypeoU5V+o6CmO5246pvDx+cQ8SR3OUsHj2o3Ki\nh81SGYSqA4t7GmKmX90Bd1hDrbzYxb1bt+gYZpPcXO/t2LIjhr2wdHH3CxsLiqoJ83aii11QA5wQ\n1Oln/nzg1FPt69geIJUt7rEIiN9/EIu4J1sk1PbCuHbC+txN3n7b+V284h5kiZtvCvHsZ8AAusbC\npJCO5W1y586qzaVeHUh7cS/Z6TZhKozOS/FY7p07B/sf/S6UZs2iy+IRdz+CLHfzxgqy3IPyXuyz\nj7tvQJh9ApUfaRRLN/9k3dzJ7gOhzr3XG5GOOoZYU8eqSJhELPeg+0bdM4mkpc7K8r/GdA4KMWaO\n+q9UmGdNIu197qUl/nd3veZ1AKN3YNBFKkTwDex3odgu6rAXuj5ykB/mzT19ujO6ERBd/0TF3UzQ\nZMN2jJXdgJUKcU/2MZ19NqWiCHOO1XUXxueuox6yiYh7rHWrjI6Aq1fTMaxZY+9AZaI3qNY0cc84\ny92kYZPo51e8jYM6sYp70AgtiunTnWk/4TLF3WykO/RQ97ztRtN9x3vv7b2vsG0LynI788yqy1sd\ny1ic1fW1PDcXOPHEcG85NnEPgzr2RNwyQaj/39aJKVk0bUrBAgcdxA2qQaSluO/aRRfQjh3Azl3O\nP7wT0a1rtg4wQT1Sw1w0ulibwmhe1D/9FM6fahKLuOvCNWMGxRz71WnSJKfLP0BCf9NN0ftp3tw+\n6ICNwkLqUTp6NI1WZIuzTjbt2vnnFtepruIeC+o61a+NMG8v+qAVlS3uyR4tLBHYck8jSkooYqBH\nD7J0Dhp+w+5ltRCdRMzWGSKos00Yn58e/RIk7l27Jv9CN7tx6/UvKopuMzAt9yOOiH6g2XyYnTvH\nFnN8wAFOXvBYookSQY1jGYRN3INynVQ3zDFnw6Lug0MOSW/LPVY4WiaNUH2WgsbrVNjcEUFx7bYe\nqya1anlb+PHEzcdKnz7uGPugC9fmgw8z6lFV5r6OF686tmzpzsZoE/eZM4PHRK1O+PncbSxZAhQX\nUyP/Dz9E54yx9dZNVt2qw7Wj6rB9e80T97RrUE1kEARFkFvG76KsW5fSh+puGXN7iTSomuTnR4d7\nKvTcJkHRPeYx5eREW+62c5useOPKxOvctmrlbouwiXujRuH99uq/TyWmdRxEZKx4AM4ISKorfo8e\nyc01Xp0t9/XrPbODZyxpJ+6xvo7aBCuRFL/6COpeJMtylxJYtoxyl+Tn+/uMc3LIQvMaeckWGqnX\nySbu06eHi+BINWFFJFGf++LF4aOZKotYxd1GQQH1Xt5vv+QKsNpWdfK569e4bbjFTCbtxD1sSlyF\nTdxtSbbC5kBR6QFidcvEe6E3a2aPmzfJzaXBptWA0yY2t4ye6sAm7omMCFSVhH1wJiruTZv6jx1a\nFTRsSN963qB4MEckChtbbqNTJ+Dnn5356uSWqQ4PmFQR6tCFEKcIIRYJIZYIIaI80kKI24UQcyKf\neUKIciFEw+RXN3HL/fXXgWHDotcLEveFC8l/qbaXbMtdvTLHS6wXcXa2O+eal1smHfA6djNXSyb4\nXNu1o0yctnzr8fLtt+HbsGx8+aU7j1F1cstUhwdMqgi0V4UQ2QCeB3AigGIAM4QQE6SUC9Q6UsrH\nADwWWf8MAAOllCFHboyNWC13M976pJPs8cRB4m72HvSLW49H3I85hkIYdZIptrau4XpqgHTOlud1\nbs1IqUwIhczKcndWSwaJDt7cuLE7R351cstUhzqkijCHfiiAJVLKpVLKEgBjAfTxWf98AGOSUTkb\nsYj7ww9HDxfmJeKxPuELCuKLlnnxReDjj8P9JpnUqUNjti5e7IwRetRRTmenTLTcTXFPV8t9wQJn\nOh3EqjpZ7nodUjW6WqoIc/pbANCHIiiOlEUhhKgN4BQAbydeNTuxiPuZZ0ZfYF5/cKx/fK1azqAE\n5viYtota5dDed1+gZ8/o5S1bRpclkn3QRs+eFHuu5xO/6ir6bt48+jjSBa8Hs/m2la5vJ/vt50zb\nrq0996Rvr/aWqqY6+dyrQx1SRbKfZWcA+NrLJSOEuBrA1QCwt19/dx+8xP01XIrLMMJVpg8Np4jF\ncp8xw7tnaUEBxcN36kQNk++8496vyYMP0uvv8cdH1+nGGykb3i23OGWTJ4fLjJcol19Ox9i3L9Vr\n+HDgyisrf7/JRJ3PCy4ABg502i8GDkxdnaqSc8+la+7MMyt/XwsWeIfmKqqT5R421XYmEub0rwLQ\nSptvGSmzcR58XDJSypeklEVSyqImcZqJNnHvh7E4AxOjyrOzw2VHBOyiX1QEdOhgX79WLfqNEkUd\nr/S6Z59ttx4vuCC6XqeemrzRaPzIynLEQQjgnHMqf5/JRrmT9tvP3VtVndN0dcfo+Fnl6n+rCrfD\nfvsFJ+xSt3aqI4uA8DmdMpEwl8MMAO2FEG1Bon4egAvMlYQQ9QAcB6B/UmtoYBP3HJQhB/bh9GK1\n3EeODFePWBtU/ahOvsDqYG3FihJ3r/P4yy/unqrpyA8/pE9P2gEDqGPYBVEqUfVU5eD21Y3AW1lK\nWQZgAICPACwEME5KOV8Ica0Q4lpt1TMBTJVShhhMLn5soZA5jeojZ/PGqPKsrPCWu/KJd+8erh56\n5EUYy91E97tXJ8syHf3SSty9/tvWre3tHOlE8+YU6ZUOZGfTwC7VwVDQxT1dAwbiJZTNKKWcDGCy\nUTbMmB8BGE7vSsBquWeVIycv+kqyDYjrdcGNGEF+7rCdOfxEUN9HmDFW2XJPjCDLPZ2ZMiXVNUhv\narJbJu1uZZu454pyq/Vrs9y9qFcvOmwyXnTh1/O/6Oi9TquTKKWj5a4a8KrTeUwWJ5+c6hqkN7q4\n1zTLPSPEPUeUe3b5rwqxMvcRZp8vvuhMVydRSmfLvSaHvTF2avI1kXa3spe424jFck+EeB4ghYVO\nBAT73BMjk90yTPKw9SXJZNLudrC6ZbLswe82n3uizJ5N4zgmAyVK6gH06aeUibFfv/Db+P775Ga7\nS0fL3XTLfPRR+nbIUkybVrMjPZLJu+8CP/6Yfv03EiUjxD1b2J1plWG522J8470JTXHv2TP2qA5z\nrNRESUdxN90y6RJV4key88fUZP7+d/rUNNLuVraFQmYLMt3GjnV3Yqkqn3uvXsDjj8f+O2VxVidX\nSHWqS1jYLcMw0aSduFvTD0QEqV8/t8hWlc89Kwu49dbYf6dGAKrJjT7JoDp1d2eY6kLa2TptmpcA\ncOdu1Y1N3fI0fe5vvVV59YrH4p0yBZg40Un8xMSHSjeb6lGSGKY6kXa2TlG3EpyCD11lurDq1ptp\nufftW8mVi5FWrYDrr091LexUpwieIFTHs43RnZQZpsaSdpY7hEAWKowiqS/eDb+mx8ewYe60wNWd\nq68G1q2LzzXGMJlK+om7lNHirk2bljsTO9dck+oaxEZeHnDvvamuBcNUL9JP/mzivnO7M21Y7ukY\n/cEwDJMomSHuWrC3sta7dYssY3FnGKYGkhFuGT24vbqI+bBhwP77p7oWDMPUVNJP3CsqIODukaoL\nutw2MTMAAA/7SURBVJpOtcinm9+aYZjMIjPcMpYImVSLO8MwTCrJOHFnGIZh0lHcKyqqrbg/9BDw\n9deprgXDMEw6+tyrseU+ZEiqa8AwDEOkn+UupW+Dql8ZwzBMTSEtxT2BxQzDMDWC9BP3igpIuM1y\n3UpX4s6WO8MwNZn0E3eLac5uGYZhGDcZK+4MwzA1mfQT94qKqCIWd4ZhGDehxF0IcYoQYpEQYokQ\nYrDHOn8TQswRQswXQnye3GpqBFju3KDKMAwTIs5dCJEN4HkAJwIoBjBDCDFBSrlAW6c+gBcAnCKl\nXCmEqLyB46T0bVD1K2MYhqkphLHcDwWwREq5VEpZAmAsgD7GOhcAeEdKuRIApJSVN5olh0IyDMME\nEkbcWwD4TZsvjpTpdADQQAjxmRBilhDiYtuGhBBXCyFmCiFmrlu3Lr4acygkwzBMIMlqUM0B0B3A\naQBOBvAvIUQHcyUp5UtSyiIpZVGTJk3i2xNHyzAMwwQSJrfMKgCttPmWkTKdYgAbpJTbAGwTQnwB\n4AAAi5NSSx0Wd4ZhmEDCWO4zALQXQrQVQuQBOA/ABGOd9wEcLYTIEULUBnAYgIXJrWqEAHHff3+g\nTRvg0UcrZe8MwzBpQaDlLqUsE0IMAPARgGwAr0op5wshro0sHyalXCiEmALgJwAVAIZLKedVSo0D\nfO516gDLllXKnhmGYdKGUCl/pZSTAUw2yoYZ848BeCx5VfOsDCqMFw52yzAMw7hJvx6qFnFnGIZh\n3KSfSlZUoBzZriK23BmGYdykn7hLyeLOMAwTAIs7wzBMBsLizjAMk4Gk3wDZcfjcu3QBiooqsU4M\nwzDVjPQT9zgs93mVE3HPMAxTbckItwzDMAzjJv3EnUMhGYZhAkk/cecGVYZhmEBY3BmGYTIQFneG\nYZgMJP3EnX3uDMMwgaSfuLPlzjAME0hGiDvDMAzjJi3F/XK8CgBo1IiK2HJnGIZxk37iXlGBW/Ek\nyqZOQ4MGVMTizjAM4yb9xF3SIHvZOY6is7gzDMO4SUtxBwBkOVVncWcYhnGTfuJeUUHfgi13hmEY\nL9JP3JXlzuLOMAzjSVqLO4s6wzCMnfQVd/a5MwzDeJJ+4s4+d4ZhmEBCibsQ4hQhxCIhxBIhxGDL\n8r8JITYJIeZEPnclv6oR2OfOMAwTSOAwe0KIbADPAzgRQDGAGUKICVLKBcaqX0opT6+EOrphtwzD\nMEwgYSz3QwEskVIulVKWABgLoE/lVssHdsswDMMEEkbcWwD4TZsvjpSZHCmE+EkI8aEQoktSameD\n3TIMwzCBBLplQvIjgL2llFuFEL0AvAegvbmSEOJqAFcDwN577x3fnjgUkmEYJpAwlvsqAK20+ZaR\nst1IKTdLKbdGpicDyBVCNDY3JKV8SUpZJKUsatKkSXw1Zp87wzBMIGHEfQaA9kKItkKIPADnAZig\nryCE2EsIklghxKGR7W5IdmUBsM+dYRgmBIFuGSllmRBiAICPAGQDeFVKOV8IcW1k+TAAfQFcJ4Qo\nA7ADwHlSKhM7ybDPnWEYJpBQPveIq2WyUTZMm34OwHPJrZpnZeibxZ1h0pLS0lIUFxdj586dqa5K\ntaagoAAtW7ZEbm5uXL9PVoNq1cE+d4ZJa4qLi1FYWIg2bdpA8M1rRUqJDRs2oLi4GG3bto1rG5x+\ngGGYKmXnzp1o1KgRC7sPQgg0atQoobeb9BP3Qw4BXnkFaN6cRZ1h0hQW9mASPUfp55Zp25Y+Gnyd\nMAzDuEk/y90CizvDMJXFHnvs4bls+fLl2H///auwNuFhcWcYhslA0s8tY4HFnWHSlFtuAebMSe42\nDzwQeOopz8WDBw9Gq1atcMMNNwAA7rnnHuTk5GD69OnYuHEjSktL8cADD6BPn9jyI+7cuRPXXXcd\nZs6ciZycHDz55JPo0aMH5s+fj8suuwwlJSWoqKjA22+/jebNm+Pcc89FcXExysvL8a9//Qv9+vVL\n6LBNWNwZhqlR9OvXD7fccstucR83bhw++ugj3HTTTahbty7Wr1+Pww8/HL17946pUfP555+HEAJz\n587Fzz//jJNOOgmLFy/GsGHDcPPNN+PCCy9ESUkJysvLMXnyZDRv3hwffPABAGDTpk1JP04Wd4Zh\nUoePhV1ZHHTQQVi7di1+//13rFu3Dg0aNMBee+2FgQMH4osvvkBWVhZWrVqFNWvWYK+99gq93a++\n+go33ngjAKBTp05o3bo1Fi9ejCOOOAIPPvggiouLcdZZZ6F9+/bo2rUrbrvtNgwaNAinn346jjnm\nmKQfZ1r73FnUGYaJh3POOQfjx4/Hf//7X/Tr1w+jR4/GunXrMGvWLMyZMwdNmzZNWg/aCy64ABMm\nTECtWrXQq1cvTJs2DR06dMCPP/6Irl274p///Cfuu+++pOxLhy13hmFqHP369cNVV12F9evX4/PP\nP8e4ceOw5557Ijc3F9OnT8eKFSti3uYxxxyD0aNHo2fPnli8eDFWrlyJjh07YunSpdhnn31w0003\nYeXKlfjpp5/QqVMnNGzYEP3790f9+vUxfPjwpB8jizvDMDWOLl26YMuWLWjRogWaNWuGCy+8EGec\ncQa6du2KoqIidOrUKeZtXn/99bjuuuvQtWtX5OTkYMSIEcjPz8e4ceMwatQo5ObmYq+99sIdd9yB\nGTNm4Pbbb0dWVhZyc3Px4osvJv0YRWUlbwyiqKhIzpw5M6Ft7Lcf8PPPwNSpwIknJqliDMNUKgsX\nLsR+++2X6mqkBbZzJYSYJaUsCvptWvvcFWy5MwzDuGG3DMMwTABz587FRRdd5CrLz8/H999/n6Ia\nBcPizjAME0DXrl0xJ9mdrSqZtHbLsKgzDMPYSWtxz86mbxZ5hmEYN2kt7nl59M3izjAM4yatxV0N\nLcjizjAM4yatxV1Z7gzDMJWFXz736kxai7uy3EtLU1sPhmGY6kZah0Iqy72kJLX1YBgmPlKQzj2p\n+dy3bt2KPn36WH83cuRIPP744xBCoFu3bhg1ahTWrFmDa6+9FkuXLgUAvPjiizjyyCMTP2gLLO4M\nw9QokpnPvaCgAO+++27U7xYsWIAHHngA33zzDRo3bow///wTAHDTTTfhuOOOw7vvvovy8nJs3bq1\n0o4zlLgLIU4B8DSAbADDpZQPe6x3CIBvAZwnpRyftFp6wG4ZhklvUpDOPan53KWUuOOOO6J+N23a\nNJxzzjlo3LgxAKBhw4YAgGnTpmHkyJEAgOzsbNSrV6/SjjNQ3IUQ2QCeB3AigGIAM4QQE6SUCyzr\nPQJgamVU1Iay3FncGYaJBZXPffXq1VH53HNzc9GmTZtQ+dzj/V1VEKZB9VAAS6SUS6WUJQDGArA5\no24E8DaAtUmsny/Kcme3DMMwsdCvXz+MHTsW48ePxznnnINNmzbFlc/d63c9e/bEW2+9hQ0bNgDA\nbrfM8ccfvzu9b3l5eaUMr6cII+4tAPymzRdHynYjhGgB4EwAyU9K7ANb7gzDxIMtn/vMmTPRtWtX\njBw5MnQ+d6/fdenSBXfeeSeOO+44HHDAAbj11lsBAE8//TSmT5+Orl27onv37liwYIHf5hMiWQ2q\nTwEYJKWs8GuAEEJcDeBqANh7770T3qkKP01RSnqGYdKYuXPn7p5u3Lgxvv32W+t6fo2efr+75JJL\ncMkll7jKmjZtivfffz+O2sZOGHFfBaCVNt8yUqZTBGBsRNgbA+glhCiTUr6nrySlfAnASwAN1hFv\npRX33UeumUsvTXRLDMMwmUUYcZ8BoL0Qoi1I1M8DcIG+gpSyrZoWQowAMMkU9sqgsBB49NHK3gvD\nMDWdjMznLqUsE0IMAPARKBTyVSnlfCHEtZHlwyq5jgzDMCklHfO5h/K5SyknA5hslFlFXUp5aeLV\nYhgmk5FSBnYQqukkOr51WueWYRgm/SgoKMCGDRsSFq9MRkqJDRs2oKCgIO5tpHX6AYZh0o+WLVui\nuLgY69atS3VVqjUFBQVo2bJl3L9ncWcYpkrJzc1F27Ztg1dkEoLdMgzDMBkIizvDMEwGwuLOMAyT\ngYhUtVgLIdYBCJedJ5rGANYnsTrpAB9zzYCPuWaQyDG3llI2CVopZeKeCEKImVLKolTXoyrhY64Z\n8DHXDKrimNktwzAMk4GwuDMMw2Qg6SruL6W6AimAj7lmwMdcM6j0Y05LnzvDMAzjT7pa7gzDMIwP\naSfuQohThBCLhBBLhBCDU12fZCGEaCWEmC6EWCCEmC+EuDlS3lAI8bEQ4pfIdwPtN0Mi52GREOLk\n1NU+foQQ2UKI2UKISZH5TD/e+kKI8UKIn4UQC4UQR9SAYx4YuabnCSHGCCEKMu2YhRCvCiHWCiHm\naWUxH6MQorsQYm5k2TMikdSZUsq0+YDyyf8KYB8AeQD+B6BzquuVpGNrBuDgyHQhgMUAOgN4FMDg\nSPlgAI9EpjtHjj8fQNvIeclO9XHEcdy3AngTNMALasDxvg7gysh0HoD6mXzMoPGWlwGoFZkfB+DS\nTDtmAMcCOBjAPK0s5mME8AOAwwEIAB8CODXeOqWb5X4ogCVSyqVSyhIAYwH0SXGdkoKU8g8p5Y+R\n6S0AFoJujD4gQUDk+++R6T4Axkopd0kplwFYAjo/aYMQoiWA0wAM14oz+XjrgUTgFQCQUpZIKf9C\nBh9zhBwAtYQQOQBqA/gdGXbMUsovAPxpFMd0jEKIZgDqSim/k6T0I7XfxEy6iXsLAL9p88WRsoxC\nCNEGwEEAvgfQVEr5R2TRagBNI9OZcC6eAvB/ACq0skw+3rYA1gF4LeKKGi6EqIMMPmYp5SoAjwNY\nCeAPAJuklFORwcesEesxtohMm+VxkW7invEIIfYA8DaAW6SUm/Vlkad5RoQ3CSFOB7BWSjnLa51M\nOt4IOaBX9xellAcB2AZ6Xd9Nph1zxM/cB/Rgaw6gjhCiv75Oph2zjVQcY7qJ+yoArbT5lpGyjEAI\nkQsS9tFSyncixWsir2uIfK+NlKf7uTgKQG8hxHKQe62nEOINZO7xAmSJFUsp1ajK40Fin8nHfAKA\nZVLKdVLKUgDvADgSmX3MiliPcVVk2iyPi3QT9xkA2gsh2goh8gCcB2BCiuuUFCKt4q8AWCilfFJb\nNAHAJZHpSwC8r5WfJ4TIF0K0BdAe1BiTFkgph0gpW0op24D+x2lSyv7I0OMFACnlagC/CSE6RoqO\nB7AAGXzMIHfM4UKI2pFr/HhQe1ImH7MipmOMuHA2CyEOj5yri7XfxE6qW5njaJXuBYok+RXAnamu\nTxKP62jQa9tPAOZEPr0ANALwKYBfAHwCoKH2mzsj52EREmhVT/UHwN/gRMtk9PECOBDAzMj//B6A\nBjXgmO8F8DOAeQBGgaJEMuqYAYwBtSmUgt7QrojnGAEURc7TrwCeQ6SjaTwf7qHKMAyTgaSbW4Zh\nGIYJAYs7wzBMBsLizjAMk4GwuDMMw2QgLO4MwzAZCIs7wzBMBsLizjAMk4GwuDMMw2Qg/w8xRYZZ\ng0Y6NwAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x22e95cb1a90>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "plt.plot(history.epoch,history.history.get('acc'),c='r',label=\"acc\")\n",
    "plt.plot(history.epoch,history.history.get('val_acc'),c='b',label=\"val_acc\")\n",
    "plt.legend()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "添加dropout层进行过拟合抑制"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 86,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "model = keras.Sequential()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 87,
   "metadata": {},
   "outputs": [],
   "source": [
    "model.add(keras.layers.Dense(128,input_shape=(x.shape[1],),activation='relu'))\n",
    "model.add(keras.layers.Dropout(0.5))\n",
    "model.add(keras.layers.Dense(128,activation='relu'))\n",
    "model.add(keras.layers.Dropout(0.5))\n",
    "model.add(keras.layers.Dense(128,activation='relu'))\n",
    "model.add(keras.layers.Dropout(0.5))\n",
    "model.add(keras.layers.Dense(1,activation='sigmoid'))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 89,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "dense_40 (Dense)             (None, 128)               2048      \n",
      "_________________________________________________________________\n",
      "dropout_6 (Dropout)          (None, 128)               0         \n",
      "_________________________________________________________________\n",
      "dense_41 (Dense)             (None, 128)               16512     \n",
      "_________________________________________________________________\n",
      "dropout_7 (Dropout)          (None, 128)               0         \n",
      "_________________________________________________________________\n",
      "dense_42 (Dense)             (None, 128)               16512     \n",
      "_________________________________________________________________\n",
      "dropout_8 (Dropout)          (None, 128)               0         \n",
      "_________________________________________________________________\n",
      "dense_43 (Dense)             (None, 1)                 129       \n",
      "=================================================================\n",
      "Total params: 35,201\n",
      "Trainable params: 35,201\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "model.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 90,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "model.compile(optimizer='adam',loss='binary_crossentropy',metrics=['acc'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 92,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train on 489 samples, validate on 164 samples\n",
      "Epoch 1/1000\n",
      "489/489 [==============================] - 0s 210us/step - loss: 0.3080 - acc: 0.8773 - val_loss: 0.7482 - val_acc: 0.7744\n",
      "Epoch 2/1000\n",
      "489/489 [==============================] - 0s 172us/step - loss: 0.3110 - acc: 0.8691 - val_loss: 0.7415 - val_acc: 0.7805\n",
      "Epoch 3/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2984 - acc: 0.8773 - val_loss: 0.7312 - val_acc: 0.8171\n",
      "Epoch 4/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3423 - acc: 0.8446 - val_loss: 0.7634 - val_acc: 0.7927\n",
      "Epoch 5/1000\n",
      "489/489 [==============================] - 0s 168us/step - loss: 0.3286 - acc: 0.8712 - val_loss: 0.8047 - val_acc: 0.7683\n",
      "Epoch 6/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.3000 - acc: 0.8834 - val_loss: 0.7686 - val_acc: 0.8171\n",
      "Epoch 7/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.3043 - acc: 0.8814 - val_loss: 0.7707 - val_acc: 0.7927\n",
      "Epoch 8/1000\n",
      "489/489 [==============================] - 0s 197us/step - loss: 0.3990 - acc: 0.8425 - val_loss: 0.8078 - val_acc: 0.7439\n",
      "Epoch 9/1000\n",
      "489/489 [==============================] - 0s 203us/step - loss: 0.3611 - acc: 0.8712 - val_loss: 0.7399 - val_acc: 0.8293\n",
      "Epoch 10/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3256 - acc: 0.8937 - val_loss: 0.7276 - val_acc: 0.8049\n",
      "Epoch 11/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.3545 - acc: 0.8569 - val_loss: 0.7157 - val_acc: 0.8293\n",
      "Epoch 12/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.3610 - acc: 0.8712 - val_loss: 0.7503 - val_acc: 0.7439\n",
      "Epoch 13/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3416 - acc: 0.8609 - val_loss: 0.7335 - val_acc: 0.7378\n",
      "Epoch 14/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3218 - acc: 0.8487 - val_loss: 0.7642 - val_acc: 0.7195\n",
      "Epoch 15/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3180 - acc: 0.8691 - val_loss: 0.7148 - val_acc: 0.7683\n",
      "Epoch 16/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3508 - acc: 0.8569 - val_loss: 0.7276 - val_acc: 0.7988\n",
      "Epoch 17/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.3295 - acc: 0.8609 - val_loss: 0.7392 - val_acc: 0.7988\n",
      "Epoch 18/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3047 - acc: 0.8753 - val_loss: 0.7162 - val_acc: 0.8232\n",
      "Epoch 19/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3066 - acc: 0.8916 - val_loss: 0.7561 - val_acc: 0.7744\n",
      "Epoch 20/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3167 - acc: 0.8712 - val_loss: 0.7317 - val_acc: 0.8049\n",
      "Epoch 21/1000\n",
      "489/489 [==============================] - 0s 209us/step - loss: 0.3455 - acc: 0.8691 - val_loss: 0.7270 - val_acc: 0.7561\n",
      "Epoch 22/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.3436 - acc: 0.8650 - val_loss: 0.6692 - val_acc: 0.8110\n",
      "Epoch 23/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.3383 - acc: 0.8609 - val_loss: 0.6788 - val_acc: 0.7622\n",
      "Epoch 24/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.3698 - acc: 0.8691 - val_loss: 0.6709 - val_acc: 0.7927\n",
      "Epoch 25/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.3838 - acc: 0.8405 - val_loss: 0.6814 - val_acc: 0.8049\n",
      "Epoch 26/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3413 - acc: 0.8589 - val_loss: 0.7105 - val_acc: 0.7439\n",
      "Epoch 27/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.3192 - acc: 0.8589 - val_loss: 0.6806 - val_acc: 0.7927\n",
      "Epoch 28/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.3336 - acc: 0.8609 - val_loss: 0.6536 - val_acc: 0.8293\n",
      "Epoch 29/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.3511 - acc: 0.8630 - val_loss: 0.6938 - val_acc: 0.7927\n",
      "Epoch 30/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3755 - acc: 0.8466 - val_loss: 0.6695 - val_acc: 0.8110\n",
      "Epoch 31/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3518 - acc: 0.8569 - val_loss: 0.6624 - val_acc: 0.8232\n",
      "Epoch 32/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.3024 - acc: 0.8916 - val_loss: 0.7076 - val_acc: 0.7866\n",
      "Epoch 33/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.3549 - acc: 0.8650 - val_loss: 0.6824 - val_acc: 0.7988\n",
      "Epoch 34/1000\n",
      "489/489 [==============================] - 0s 206us/step - loss: 0.3268 - acc: 0.8671 - val_loss: 0.6793 - val_acc: 0.8110\n",
      "Epoch 35/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.3286 - acc: 0.8569 - val_loss: 0.7517 - val_acc: 0.7866\n",
      "Epoch 36/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.3688 - acc: 0.8712 - val_loss: 0.7182 - val_acc: 0.8171\n",
      "Epoch 37/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3381 - acc: 0.8630 - val_loss: 0.7416 - val_acc: 0.7744\n",
      "Epoch 38/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3021 - acc: 0.8875 - val_loss: 0.7400 - val_acc: 0.7927\n",
      "Epoch 39/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2888 - acc: 0.8855 - val_loss: 0.7320 - val_acc: 0.8110\n",
      "Epoch 40/1000\n",
      "489/489 [==============================] - 0s 193us/step - loss: 0.3033 - acc: 0.8814 - val_loss: 0.7032 - val_acc: 0.8171\n",
      "Epoch 41/1000\n",
      "489/489 [==============================] - 0s 168us/step - loss: 0.3593 - acc: 0.8425 - val_loss: 0.7425 - val_acc: 0.7866\n",
      "Epoch 42/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3657 - acc: 0.8671 - val_loss: 0.7705 - val_acc: 0.7622\n",
      "Epoch 43/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3149 - acc: 0.8875 - val_loss: 0.7484 - val_acc: 0.7866\n",
      "Epoch 44/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3317 - acc: 0.8569 - val_loss: 0.7507 - val_acc: 0.7927\n",
      "Epoch 45/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.3025 - acc: 0.8650 - val_loss: 0.7652 - val_acc: 0.8232\n",
      "Epoch 46/1000\n",
      "489/489 [==============================] - 0s 198us/step - loss: 0.3609 - acc: 0.8671 - val_loss: 0.7349 - val_acc: 0.7744\n",
      "Epoch 47/1000\n",
      "489/489 [==============================] - 0s 210us/step - loss: 0.3735 - acc: 0.8569 - val_loss: 0.7364 - val_acc: 0.7683\n",
      "Epoch 48/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3275 - acc: 0.8793 - val_loss: 0.7493 - val_acc: 0.7805\n",
      "Epoch 49/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3118 - acc: 0.8528 - val_loss: 0.7333 - val_acc: 0.8293\n",
      "Epoch 50/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3424 - acc: 0.8671 - val_loss: 0.6933 - val_acc: 0.8354\n",
      "Epoch 51/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3501 - acc: 0.8650 - val_loss: 0.7581 - val_acc: 0.7988\n",
      "Epoch 52/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.3420 - acc: 0.8589 - val_loss: 0.7464 - val_acc: 0.8171\n",
      "Epoch 53/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.3101 - acc: 0.8834 - val_loss: 0.7245 - val_acc: 0.8354\n",
      "Epoch 54/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3171 - acc: 0.8630 - val_loss: 0.7163 - val_acc: 0.8232\n",
      "Epoch 55/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.3128 - acc: 0.8732 - val_loss: 0.7197 - val_acc: 0.8232\n",
      "Epoch 56/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3347 - acc: 0.8569 - val_loss: 0.6975 - val_acc: 0.8110\n",
      "Epoch 57/1000\n",
      "489/489 [==============================] - 0s 160us/step - loss: 0.3232 - acc: 0.8773 - val_loss: 0.6915 - val_acc: 0.8110\n",
      "Epoch 58/1000\n",
      "489/489 [==============================] - 0s 183us/step - loss: 0.3347 - acc: 0.8364 - val_loss: 0.6879 - val_acc: 0.7988\n",
      "Epoch 59/1000\n",
      "489/489 [==============================] - 0s 191us/step - loss: 0.3198 - acc: 0.8650 - val_loss: 0.7127 - val_acc: 0.7805\n",
      "Epoch 60/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 184us/step - loss: 0.2893 - acc: 0.8712 - val_loss: 0.7093 - val_acc: 0.8110\n",
      "Epoch 61/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3029 - acc: 0.8732 - val_loss: 0.7275 - val_acc: 0.8049\n",
      "Epoch 62/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3114 - acc: 0.8712 - val_loss: 0.7362 - val_acc: 0.7866\n",
      "Epoch 63/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.3088 - acc: 0.8712 - val_loss: 0.7252 - val_acc: 0.7866\n",
      "Epoch 64/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3007 - acc: 0.8814 - val_loss: 0.7215 - val_acc: 0.8049\n",
      "Epoch 65/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3170 - acc: 0.8814 - val_loss: 0.7276 - val_acc: 0.8049\n",
      "Epoch 66/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3113 - acc: 0.8855 - val_loss: 0.7164 - val_acc: 0.8171\n",
      "Epoch 67/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3401 - acc: 0.8650 - val_loss: 0.7022 - val_acc: 0.8171\n",
      "Epoch 68/1000\n",
      "489/489 [==============================] - 0s 192us/step - loss: 0.3538 - acc: 0.8609 - val_loss: 0.7096 - val_acc: 0.8049\n",
      "Epoch 69/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3374 - acc: 0.8773 - val_loss: 0.7123 - val_acc: 0.7927\n",
      "Epoch 70/1000\n",
      "489/489 [==============================] - 0s 187us/step - loss: 0.3470 - acc: 0.8671 - val_loss: 0.6861 - val_acc: 0.8232\n",
      "Epoch 71/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.3322 - acc: 0.8691 - val_loss: 0.7306 - val_acc: 0.7927\n",
      "Epoch 72/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3118 - acc: 0.8691 - val_loss: 0.7445 - val_acc: 0.7866\n",
      "Epoch 73/1000\n",
      "489/489 [==============================] - 0s 188us/step - loss: 0.3131 - acc: 0.8712 - val_loss: 0.6926 - val_acc: 0.8293\n",
      "Epoch 74/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.3218 - acc: 0.8753 - val_loss: 0.7464 - val_acc: 0.7866\n",
      "Epoch 75/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3069 - acc: 0.8712 - val_loss: 0.7424 - val_acc: 0.8476\n",
      "Epoch 76/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.2827 - acc: 0.8896 - val_loss: 0.7417 - val_acc: 0.8232\n",
      "Epoch 77/1000\n",
      "489/489 [==============================] - 0s 192us/step - loss: 0.3091 - acc: 0.8671 - val_loss: 0.7341 - val_acc: 0.8171\n",
      "Epoch 78/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3307 - acc: 0.8671 - val_loss: 0.7736 - val_acc: 0.7744\n",
      "Epoch 79/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.3066 - acc: 0.8793 - val_loss: 0.7300 - val_acc: 0.8354\n",
      "Epoch 80/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.3518 - acc: 0.8712 - val_loss: 0.7713 - val_acc: 0.7927\n",
      "Epoch 81/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.3943 - acc: 0.8609 - val_loss: 0.7795 - val_acc: 0.7256\n",
      "Epoch 82/1000\n",
      "489/489 [==============================] - 0s 190us/step - loss: 0.3317 - acc: 0.8650 - val_loss: 0.7772 - val_acc: 0.7744\n",
      "Epoch 83/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.3282 - acc: 0.8732 - val_loss: 0.6871 - val_acc: 0.8293\n",
      "Epoch 84/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.3345 - acc: 0.8753 - val_loss: 0.7098 - val_acc: 0.8110\n",
      "Epoch 85/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.2996 - acc: 0.8896 - val_loss: 0.7087 - val_acc: 0.8171\n",
      "Epoch 86/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.3109 - acc: 0.8712 - val_loss: 0.6887 - val_acc: 0.8476\n",
      "Epoch 87/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2963 - acc: 0.8896 - val_loss: 0.6658 - val_acc: 0.8537\n",
      "Epoch 88/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.3599 - acc: 0.8487 - val_loss: 0.7334 - val_acc: 0.7988\n",
      "Epoch 89/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3119 - acc: 0.8753 - val_loss: 0.7480 - val_acc: 0.7927\n",
      "Epoch 90/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3008 - acc: 0.8814 - val_loss: 0.7159 - val_acc: 0.8476\n",
      "Epoch 91/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3335 - acc: 0.8528 - val_loss: 0.7207 - val_acc: 0.8415\n",
      "Epoch 92/1000\n",
      "489/489 [==============================] - 0s 172us/step - loss: 0.3007 - acc: 0.8630 - val_loss: 0.7871 - val_acc: 0.7866\n",
      "Epoch 93/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3164 - acc: 0.8650 - val_loss: 0.7526 - val_acc: 0.8110\n",
      "Epoch 94/1000\n",
      "489/489 [==============================] - 0s 206us/step - loss: 0.3418 - acc: 0.8589 - val_loss: 0.6729 - val_acc: 0.8415\n",
      "Epoch 95/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.3154 - acc: 0.8528 - val_loss: 0.7088 - val_acc: 0.8354\n",
      "Epoch 96/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3031 - acc: 0.8753 - val_loss: 0.7653 - val_acc: 0.7927\n",
      "Epoch 97/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3282 - acc: 0.8712 - val_loss: 0.7561 - val_acc: 0.8171\n",
      "Epoch 98/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.3497 - acc: 0.8630 - val_loss: 0.7034 - val_acc: 0.8293\n",
      "Epoch 99/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.3166 - acc: 0.8896 - val_loss: 0.6949 - val_acc: 0.8354\n",
      "Epoch 100/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3339 - acc: 0.8507 - val_loss: 0.7116 - val_acc: 0.8476\n",
      "Epoch 101/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3138 - acc: 0.8896 - val_loss: 0.7351 - val_acc: 0.8232\n",
      "Epoch 102/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3057 - acc: 0.8712 - val_loss: 0.7454 - val_acc: 0.8232\n",
      "Epoch 103/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3105 - acc: 0.8793 - val_loss: 0.7253 - val_acc: 0.8476\n",
      "Epoch 104/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3254 - acc: 0.8630 - val_loss: 0.7545 - val_acc: 0.7988\n",
      "Epoch 105/1000\n",
      "489/489 [==============================] - 0s 156us/step - loss: 0.3268 - acc: 0.8773 - val_loss: 0.7217 - val_acc: 0.8171\n",
      "Epoch 106/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2762 - acc: 0.8753 - val_loss: 0.6742 - val_acc: 0.8354\n",
      "Epoch 107/1000\n",
      "489/489 [==============================] - 0s 160us/step - loss: 0.2817 - acc: 0.8814 - val_loss: 0.6846 - val_acc: 0.7988\n",
      "Epoch 108/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2843 - acc: 0.8732 - val_loss: 0.6375 - val_acc: 0.8415\n",
      "Epoch 109/1000\n",
      "489/489 [==============================] - 0s 144us/step - loss: 0.3253 - acc: 0.8548 - val_loss: 0.6273 - val_acc: 0.8537\n",
      "Epoch 110/1000\n",
      "489/489 [==============================] - 0s 146us/step - loss: 0.3330 - acc: 0.8609 - val_loss: 0.6826 - val_acc: 0.8171\n",
      "Epoch 111/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3777 - acc: 0.8712 - val_loss: 0.6814 - val_acc: 0.8110\n",
      "Epoch 112/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.4072 - acc: 0.8691 - val_loss: 0.6510 - val_acc: 0.8476\n",
      "Epoch 113/1000\n",
      "489/489 [==============================] - 0s 174us/step - loss: 0.4317 - acc: 0.8425 - val_loss: 0.7017 - val_acc: 0.7500\n",
      "Epoch 114/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3587 - acc: 0.8487 - val_loss: 0.6327 - val_acc: 0.8293\n",
      "Epoch 115/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3969 - acc: 0.8569 - val_loss: 0.6432 - val_acc: 0.8049\n",
      "Epoch 116/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3694 - acc: 0.8364 - val_loss: 0.6723 - val_acc: 0.7744\n",
      "Epoch 117/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3730 - acc: 0.8282 - val_loss: 0.6598 - val_acc: 0.7927\n",
      "Epoch 118/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3373 - acc: 0.8753 - val_loss: 0.6938 - val_acc: 0.7317\n",
      "Epoch 119/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3063 - acc: 0.8691 - val_loss: 0.6873 - val_acc: 0.8049\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 120/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3350 - acc: 0.8691 - val_loss: 0.6865 - val_acc: 0.7866\n",
      "Epoch 121/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3515 - acc: 0.8671 - val_loss: 0.6723 - val_acc: 0.8232\n",
      "Epoch 122/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3269 - acc: 0.8753 - val_loss: 0.6753 - val_acc: 0.8293\n",
      "Epoch 123/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3063 - acc: 0.8732 - val_loss: 0.6824 - val_acc: 0.8232\n",
      "Epoch 124/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3082 - acc: 0.8753 - val_loss: 0.6876 - val_acc: 0.7988\n",
      "Epoch 125/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.3324 - acc: 0.8569 - val_loss: 0.7082 - val_acc: 0.7988\n",
      "Epoch 126/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.3321 - acc: 0.8671 - val_loss: 0.6660 - val_acc: 0.8354\n",
      "Epoch 127/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.3246 - acc: 0.8712 - val_loss: 0.6565 - val_acc: 0.8537\n",
      "Epoch 128/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3281 - acc: 0.8773 - val_loss: 0.6870 - val_acc: 0.8415\n",
      "Epoch 129/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.3091 - acc: 0.8814 - val_loss: 0.7382 - val_acc: 0.8171\n",
      "Epoch 130/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3116 - acc: 0.8712 - val_loss: 0.7260 - val_acc: 0.8354\n",
      "Epoch 131/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3446 - acc: 0.8753 - val_loss: 0.7344 - val_acc: 0.8354\n",
      "Epoch 132/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.3925 - acc: 0.8548 - val_loss: 0.6768 - val_acc: 0.8537\n",
      "Epoch 133/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3516 - acc: 0.8528 - val_loss: 0.6709 - val_acc: 0.8293\n",
      "Epoch 134/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3401 - acc: 0.8834 - val_loss: 0.6885 - val_acc: 0.8232\n",
      "Epoch 135/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2801 - acc: 0.8712 - val_loss: 0.6948 - val_acc: 0.8171\n",
      "Epoch 136/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3119 - acc: 0.8814 - val_loss: 0.6837 - val_acc: 0.8171\n",
      "Epoch 137/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3148 - acc: 0.8814 - val_loss: 0.6900 - val_acc: 0.8049\n",
      "Epoch 138/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3264 - acc: 0.8712 - val_loss: 0.6915 - val_acc: 0.8110\n",
      "Epoch 139/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3223 - acc: 0.8855 - val_loss: 0.6689 - val_acc: 0.8232\n",
      "Epoch 140/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3627 - acc: 0.8609 - val_loss: 0.6710 - val_acc: 0.8415\n",
      "Epoch 141/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3224 - acc: 0.8773 - val_loss: 0.6992 - val_acc: 0.7744\n",
      "Epoch 142/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.3245 - acc: 0.9018 - val_loss: 0.6627 - val_acc: 0.8415\n",
      "Epoch 143/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.3294 - acc: 0.8569 - val_loss: 0.6705 - val_acc: 0.8232\n",
      "Epoch 144/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3036 - acc: 0.8855 - val_loss: 0.6626 - val_acc: 0.8171\n",
      "Epoch 145/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3151 - acc: 0.8753 - val_loss: 0.6746 - val_acc: 0.8110\n",
      "Epoch 146/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3368 - acc: 0.8732 - val_loss: 0.6337 - val_acc: 0.8415\n",
      "Epoch 147/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3198 - acc: 0.8671 - val_loss: 0.6943 - val_acc: 0.8171\n",
      "Epoch 148/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.3106 - acc: 0.8691 - val_loss: 0.7039 - val_acc: 0.8293\n",
      "Epoch 149/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3646 - acc: 0.8671 - val_loss: 0.7229 - val_acc: 0.8049\n",
      "Epoch 150/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3459 - acc: 0.8650 - val_loss: 0.7407 - val_acc: 0.7927\n",
      "Epoch 151/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3074 - acc: 0.8773 - val_loss: 0.7108 - val_acc: 0.8171\n",
      "Epoch 152/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3157 - acc: 0.8712 - val_loss: 0.7178 - val_acc: 0.8171\n",
      "Epoch 153/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3305 - acc: 0.8834 - val_loss: 0.6983 - val_acc: 0.8232\n",
      "Epoch 154/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3028 - acc: 0.8855 - val_loss: 0.7121 - val_acc: 0.8110\n",
      "Epoch 155/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3151 - acc: 0.8671 - val_loss: 0.6766 - val_acc: 0.8171\n",
      "Epoch 156/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.3112 - acc: 0.8834 - val_loss: 0.6716 - val_acc: 0.8293\n",
      "Epoch 157/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3351 - acc: 0.8650 - val_loss: 0.6353 - val_acc: 0.8171\n",
      "Epoch 158/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3857 - acc: 0.8405 - val_loss: 0.6653 - val_acc: 0.7073\n",
      "Epoch 159/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3243 - acc: 0.8609 - val_loss: 0.6222 - val_acc: 0.8476\n",
      "Epoch 160/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3821 - acc: 0.8528 - val_loss: 0.6663 - val_acc: 0.7317\n",
      "Epoch 161/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3323 - acc: 0.8732 - val_loss: 0.6635 - val_acc: 0.7866\n",
      "Epoch 162/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3496 - acc: 0.8753 - val_loss: 0.6677 - val_acc: 0.8049\n",
      "Epoch 163/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3542 - acc: 0.8773 - val_loss: 0.6797 - val_acc: 0.7988\n",
      "Epoch 164/1000\n",
      "489/489 [==============================] - 0s 140us/step - loss: 0.2958 - acc: 0.8957 - val_loss: 0.6820 - val_acc: 0.7805\n",
      "Epoch 165/1000\n",
      "489/489 [==============================] - 0s 146us/step - loss: 0.3309 - acc: 0.8630 - val_loss: 0.6933 - val_acc: 0.7988\n",
      "Epoch 166/1000\n",
      "489/489 [==============================] - 0s 146us/step - loss: 0.3474 - acc: 0.8691 - val_loss: 0.7062 - val_acc: 0.7805\n",
      "Epoch 167/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3051 - acc: 0.8589 - val_loss: 0.7150 - val_acc: 0.7927\n",
      "Epoch 168/1000\n",
      "489/489 [==============================] - 0s 142us/step - loss: 0.3442 - acc: 0.8753 - val_loss: 0.7204 - val_acc: 0.7927\n",
      "Epoch 169/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3184 - acc: 0.8896 - val_loss: 0.6708 - val_acc: 0.8354\n",
      "Epoch 170/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3434 - acc: 0.8589 - val_loss: 0.6877 - val_acc: 0.8232\n",
      "Epoch 171/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.3603 - acc: 0.8609 - val_loss: 0.7151 - val_acc: 0.7500\n",
      "Epoch 172/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3512 - acc: 0.8650 - val_loss: 0.7078 - val_acc: 0.7500\n",
      "Epoch 173/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.3421 - acc: 0.8609 - val_loss: 0.7070 - val_acc: 0.7683\n",
      "Epoch 174/1000\n",
      "489/489 [==============================] - 0s 190us/step - loss: 0.3548 - acc: 0.8487 - val_loss: 0.6861 - val_acc: 0.7744\n",
      "Epoch 175/1000\n",
      "489/489 [==============================] - 0s 194us/step - loss: 0.3293 - acc: 0.8569 - val_loss: 0.6687 - val_acc: 0.7866\n",
      "Epoch 176/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.3597 - acc: 0.8875 - val_loss: 0.6585 - val_acc: 0.8049\n",
      "Epoch 177/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.3632 - acc: 0.8732 - val_loss: 0.7052 - val_acc: 0.7500\n",
      "Epoch 178/1000\n",
      "489/489 [==============================] - 0s 190us/step - loss: 0.3702 - acc: 0.8446 - val_loss: 0.7660 - val_acc: 0.7134\n",
      "Epoch 179/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 179us/step - loss: 0.3358 - acc: 0.8712 - val_loss: 0.7554 - val_acc: 0.7805\n",
      "Epoch 180/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.3939 - acc: 0.8589 - val_loss: 0.7668 - val_acc: 0.7561\n",
      "Epoch 181/1000\n",
      "489/489 [==============================] - 0s 164us/step - loss: 0.4452 - acc: 0.8650 - val_loss: 0.7179 - val_acc: 0.7988\n",
      "Epoch 182/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.3245 - acc: 0.8814 - val_loss: 0.6838 - val_acc: 0.8110\n",
      "Epoch 183/1000\n",
      "489/489 [==============================] - 0s 194us/step - loss: 0.3279 - acc: 0.8773 - val_loss: 0.7233 - val_acc: 0.7866\n",
      "Epoch 184/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.3077 - acc: 0.8834 - val_loss: 0.7061 - val_acc: 0.8171\n",
      "Epoch 185/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.3094 - acc: 0.8691 - val_loss: 0.7359 - val_acc: 0.7988\n",
      "Epoch 186/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3188 - acc: 0.8712 - val_loss: 0.7909 - val_acc: 0.7622\n",
      "Epoch 187/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3501 - acc: 0.8650 - val_loss: 0.7605 - val_acc: 0.7622\n",
      "Epoch 188/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3171 - acc: 0.8650 - val_loss: 0.7184 - val_acc: 0.7927\n",
      "Epoch 189/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3233 - acc: 0.8793 - val_loss: 0.7068 - val_acc: 0.7927\n",
      "Epoch 190/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2939 - acc: 0.8814 - val_loss: 0.7275 - val_acc: 0.8049\n",
      "Epoch 191/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3142 - acc: 0.8814 - val_loss: 0.7010 - val_acc: 0.7866\n",
      "Epoch 192/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3776 - acc: 0.8569 - val_loss: 0.6810 - val_acc: 0.7561\n",
      "Epoch 193/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3114 - acc: 0.8671 - val_loss: 0.6670 - val_acc: 0.8354\n",
      "Epoch 194/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3369 - acc: 0.8609 - val_loss: 0.6478 - val_acc: 0.8354\n",
      "Epoch 195/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3026 - acc: 0.8569 - val_loss: 0.6753 - val_acc: 0.7927\n",
      "Epoch 196/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3362 - acc: 0.8569 - val_loss: 0.6702 - val_acc: 0.7866\n",
      "Epoch 197/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3180 - acc: 0.8834 - val_loss: 0.6861 - val_acc: 0.8049\n",
      "Epoch 198/1000\n",
      "489/489 [==============================] - 0s 188us/step - loss: 0.2984 - acc: 0.8732 - val_loss: 0.7472 - val_acc: 0.7500\n",
      "Epoch 199/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2917 - acc: 0.8773 - val_loss: 0.7714 - val_acc: 0.7683\n",
      "Epoch 200/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.3685 - acc: 0.8589 - val_loss: 0.7564 - val_acc: 0.7744\n",
      "Epoch 201/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2999 - acc: 0.8753 - val_loss: 0.7121 - val_acc: 0.8049\n",
      "Epoch 202/1000\n",
      "489/489 [==============================] - 0s 140us/step - loss: 0.3146 - acc: 0.8732 - val_loss: 0.7381 - val_acc: 0.7927\n",
      "Epoch 203/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3291 - acc: 0.8609 - val_loss: 0.7331 - val_acc: 0.7622\n",
      "Epoch 204/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.3424 - acc: 0.8609 - val_loss: 0.7349 - val_acc: 0.7927\n",
      "Epoch 205/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3398 - acc: 0.8650 - val_loss: 0.7123 - val_acc: 0.7988\n",
      "Epoch 206/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.3181 - acc: 0.8671 - val_loss: 0.7286 - val_acc: 0.7744\n",
      "Epoch 207/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3787 - acc: 0.8691 - val_loss: 0.7317 - val_acc: 0.7683\n",
      "Epoch 208/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3246 - acc: 0.8773 - val_loss: 0.7076 - val_acc: 0.8049\n",
      "Epoch 209/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3198 - acc: 0.8773 - val_loss: 0.7343 - val_acc: 0.7683\n",
      "Epoch 210/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3263 - acc: 0.8589 - val_loss: 0.7183 - val_acc: 0.8049\n",
      "Epoch 211/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.3352 - acc: 0.8609 - val_loss: 0.7159 - val_acc: 0.7500\n",
      "Epoch 212/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.3006 - acc: 0.8875 - val_loss: 0.7063 - val_acc: 0.7805\n",
      "Epoch 213/1000\n",
      "489/489 [==============================] - 0s 172us/step - loss: 0.3378 - acc: 0.8528 - val_loss: 0.6925 - val_acc: 0.7927\n",
      "Epoch 214/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2907 - acc: 0.8753 - val_loss: 0.7165 - val_acc: 0.8110\n",
      "Epoch 215/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3080 - acc: 0.8732 - val_loss: 0.7132 - val_acc: 0.8049\n",
      "Epoch 216/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3321 - acc: 0.8712 - val_loss: 0.6691 - val_acc: 0.8354\n",
      "Epoch 217/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3525 - acc: 0.8630 - val_loss: 0.6344 - val_acc: 0.8293\n",
      "Epoch 218/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3387 - acc: 0.8732 - val_loss: 0.6756 - val_acc: 0.7927\n",
      "Epoch 219/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3050 - acc: 0.8753 - val_loss: 0.7581 - val_acc: 0.7622\n",
      "Epoch 220/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3093 - acc: 0.8732 - val_loss: 0.7550 - val_acc: 0.7866\n",
      "Epoch 221/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3612 - acc: 0.8691 - val_loss: 0.7354 - val_acc: 0.7927\n",
      "Epoch 222/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3226 - acc: 0.8732 - val_loss: 0.6771 - val_acc: 0.8354\n",
      "Epoch 223/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.3036 - acc: 0.8691 - val_loss: 0.6908 - val_acc: 0.8171\n",
      "Epoch 224/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.3277 - acc: 0.8548 - val_loss: 0.6749 - val_acc: 0.8049\n",
      "Epoch 225/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.3053 - acc: 0.8793 - val_loss: 0.7181 - val_acc: 0.8110\n",
      "Epoch 226/1000\n",
      "489/489 [==============================] - 0s 186us/step - loss: 0.3454 - acc: 0.8507 - val_loss: 0.7139 - val_acc: 0.7744\n",
      "Epoch 227/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3307 - acc: 0.8630 - val_loss: 0.6956 - val_acc: 0.7866\n",
      "Epoch 228/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3459 - acc: 0.8650 - val_loss: 0.6814 - val_acc: 0.8110\n",
      "Epoch 229/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.2805 - acc: 0.8896 - val_loss: 0.6505 - val_acc: 0.8354\n",
      "Epoch 230/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.3534 - acc: 0.8814 - val_loss: 0.6868 - val_acc: 0.8110\n",
      "Epoch 231/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2715 - acc: 0.8916 - val_loss: 0.6804 - val_acc: 0.8293\n",
      "Epoch 232/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2961 - acc: 0.8896 - val_loss: 0.6975 - val_acc: 0.8415\n",
      "Epoch 233/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2988 - acc: 0.8855 - val_loss: 0.7079 - val_acc: 0.8110\n",
      "Epoch 234/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3354 - acc: 0.8691 - val_loss: 0.7650 - val_acc: 0.7805\n",
      "Epoch 235/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3442 - acc: 0.8732 - val_loss: 0.7402 - val_acc: 0.7683\n",
      "Epoch 236/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3101 - acc: 0.8814 - val_loss: 0.7119 - val_acc: 0.8110\n",
      "Epoch 237/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3145 - acc: 0.8712 - val_loss: 0.6987 - val_acc: 0.8232\n",
      "Epoch 238/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 153us/step - loss: 0.2971 - acc: 0.8998 - val_loss: 0.7312 - val_acc: 0.7805\n",
      "Epoch 239/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3092 - acc: 0.8855 - val_loss: 0.7137 - val_acc: 0.8110\n",
      "Epoch 240/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2823 - acc: 0.8937 - val_loss: 0.7235 - val_acc: 0.8171\n",
      "Epoch 241/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.3287 - acc: 0.8712 - val_loss: 0.7380 - val_acc: 0.7927\n",
      "Epoch 242/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2690 - acc: 0.8998 - val_loss: 0.7262 - val_acc: 0.8171\n",
      "Epoch 243/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3068 - acc: 0.8855 - val_loss: 0.6986 - val_acc: 0.8293\n",
      "Epoch 244/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2810 - acc: 0.8855 - val_loss: 0.7020 - val_acc: 0.8293\n",
      "Epoch 245/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3251 - acc: 0.8630 - val_loss: 0.6905 - val_acc: 0.8415\n",
      "Epoch 246/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2855 - acc: 0.8896 - val_loss: 0.7039 - val_acc: 0.8354\n",
      "Epoch 247/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2869 - acc: 0.8937 - val_loss: 0.7399 - val_acc: 0.8110\n",
      "Epoch 248/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2881 - acc: 0.8978 - val_loss: 0.7485 - val_acc: 0.8171\n",
      "Epoch 249/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3128 - acc: 0.8896 - val_loss: 0.7099 - val_acc: 0.8293\n",
      "Epoch 250/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.2669 - acc: 0.8875 - val_loss: 0.6800 - val_acc: 0.8232\n",
      "Epoch 251/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2850 - acc: 0.8793 - val_loss: 0.6853 - val_acc: 0.8049\n",
      "Epoch 252/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3115 - acc: 0.8834 - val_loss: 0.6674 - val_acc: 0.8415\n",
      "Epoch 253/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3506 - acc: 0.8855 - val_loss: 0.7051 - val_acc: 0.7866\n",
      "Epoch 254/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3489 - acc: 0.8712 - val_loss: 0.6702 - val_acc: 0.8293\n",
      "Epoch 255/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3202 - acc: 0.8896 - val_loss: 0.7961 - val_acc: 0.7561\n",
      "Epoch 256/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.3605 - acc: 0.8528 - val_loss: 0.8025 - val_acc: 0.7195\n",
      "Epoch 257/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3634 - acc: 0.8487 - val_loss: 0.7291 - val_acc: 0.7561\n",
      "Epoch 258/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3288 - acc: 0.8793 - val_loss: 0.7266 - val_acc: 0.7744\n",
      "Epoch 259/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2904 - acc: 0.8875 - val_loss: 0.7526 - val_acc: 0.7927\n",
      "Epoch 260/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.3065 - acc: 0.8753 - val_loss: 0.7406 - val_acc: 0.7683\n",
      "Epoch 261/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3400 - acc: 0.8548 - val_loss: 0.7274 - val_acc: 0.7683\n",
      "Epoch 262/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3584 - acc: 0.8650 - val_loss: 0.6888 - val_acc: 0.7683\n",
      "Epoch 263/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3789 - acc: 0.8691 - val_loss: 0.7016 - val_acc: 0.7561\n",
      "Epoch 264/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3044 - acc: 0.8732 - val_loss: 0.7540 - val_acc: 0.7500\n",
      "Epoch 265/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3080 - acc: 0.8650 - val_loss: 0.7227 - val_acc: 0.7805\n",
      "Epoch 266/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2877 - acc: 0.8753 - val_loss: 0.7585 - val_acc: 0.7683\n",
      "Epoch 267/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3028 - acc: 0.8998 - val_loss: 0.7774 - val_acc: 0.7622\n",
      "Epoch 268/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.3600 - acc: 0.8732 - val_loss: 0.7839 - val_acc: 0.7683\n",
      "Epoch 269/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3220 - acc: 0.8671 - val_loss: 0.7626 - val_acc: 0.7805\n",
      "Epoch 270/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3076 - acc: 0.8773 - val_loss: 0.7932 - val_acc: 0.7439\n",
      "Epoch 271/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.3129 - acc: 0.8753 - val_loss: 0.7641 - val_acc: 0.7439\n",
      "Epoch 272/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3442 - acc: 0.8753 - val_loss: 0.7464 - val_acc: 0.7317\n",
      "Epoch 273/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3129 - acc: 0.8712 - val_loss: 0.7110 - val_acc: 0.7988\n",
      "Epoch 274/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3139 - acc: 0.8753 - val_loss: 0.7193 - val_acc: 0.7256\n",
      "Epoch 275/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.3144 - acc: 0.8814 - val_loss: 0.7259 - val_acc: 0.7256\n",
      "Epoch 276/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2973 - acc: 0.8793 - val_loss: 0.7424 - val_acc: 0.7622\n",
      "Epoch 277/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2930 - acc: 0.8712 - val_loss: 0.7719 - val_acc: 0.7805\n",
      "Epoch 278/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2983 - acc: 0.8712 - val_loss: 0.7667 - val_acc: 0.7805\n",
      "Epoch 279/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.2916 - acc: 0.8773 - val_loss: 0.7358 - val_acc: 0.7927\n",
      "Epoch 280/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3186 - acc: 0.8896 - val_loss: 0.6817 - val_acc: 0.8049\n",
      "Epoch 281/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3352 - acc: 0.8609 - val_loss: 0.6588 - val_acc: 0.8354\n",
      "Epoch 282/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3158 - acc: 0.8712 - val_loss: 0.6781 - val_acc: 0.7378\n",
      "Epoch 283/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3256 - acc: 0.8609 - val_loss: 0.6868 - val_acc: 0.7561\n",
      "Epoch 284/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3839 - acc: 0.8691 - val_loss: 0.7090 - val_acc: 0.8171\n",
      "Epoch 285/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3204 - acc: 0.8609 - val_loss: 0.7178 - val_acc: 0.8171\n",
      "Epoch 286/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.3604 - acc: 0.8589 - val_loss: 0.7305 - val_acc: 0.8293\n",
      "Epoch 287/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3049 - acc: 0.8712 - val_loss: 0.7161 - val_acc: 0.7805\n",
      "Epoch 288/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3366 - acc: 0.8671 - val_loss: 0.6798 - val_acc: 0.7988\n",
      "Epoch 289/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3101 - acc: 0.8896 - val_loss: 0.7060 - val_acc: 0.7988\n",
      "Epoch 290/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.2790 - acc: 0.9039 - val_loss: 0.7222 - val_acc: 0.7988\n",
      "Epoch 291/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2657 - acc: 0.8978 - val_loss: 0.7053 - val_acc: 0.7988\n",
      "Epoch 292/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3605 - acc: 0.8671 - val_loss: 0.7666 - val_acc: 0.6707\n",
      "Epoch 293/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3156 - acc: 0.8630 - val_loss: 0.6933 - val_acc: 0.7622\n",
      "Epoch 294/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3315 - acc: 0.8875 - val_loss: 0.6626 - val_acc: 0.7866\n",
      "Epoch 295/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2719 - acc: 0.8753 - val_loss: 0.6825 - val_acc: 0.8354\n",
      "Epoch 296/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3408 - acc: 0.8569 - val_loss: 0.6946 - val_acc: 0.8232\n",
      "Epoch 297/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 145us/step - loss: 0.3259 - acc: 0.8528 - val_loss: 0.6971 - val_acc: 0.7683\n",
      "Epoch 298/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3211 - acc: 0.8650 - val_loss: 0.7172 - val_acc: 0.7927\n",
      "Epoch 299/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3306 - acc: 0.8855 - val_loss: 0.7763 - val_acc: 0.7317\n",
      "Epoch 300/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.3990 - acc: 0.8732 - val_loss: 0.7666 - val_acc: 0.7500\n",
      "Epoch 301/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3277 - acc: 0.8814 - val_loss: 0.7308 - val_acc: 0.8110\n",
      "Epoch 302/1000\n",
      "489/489 [==============================] - 0s 126us/step - loss: 0.3155 - acc: 0.8712 - val_loss: 0.6801 - val_acc: 0.8354\n",
      "Epoch 303/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2904 - acc: 0.8814 - val_loss: 0.6713 - val_acc: 0.8354\n",
      "Epoch 304/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.2988 - acc: 0.8773 - val_loss: 0.6663 - val_acc: 0.8537\n",
      "Epoch 305/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3112 - acc: 0.8814 - val_loss: 0.6850 - val_acc: 0.8293\n",
      "Epoch 306/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2974 - acc: 0.8834 - val_loss: 0.7370 - val_acc: 0.7927\n",
      "Epoch 307/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2768 - acc: 0.8875 - val_loss: 0.7486 - val_acc: 0.7988\n",
      "Epoch 308/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2905 - acc: 0.8937 - val_loss: 0.7408 - val_acc: 0.8049\n",
      "Epoch 309/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3050 - acc: 0.8671 - val_loss: 0.7449 - val_acc: 0.7866\n",
      "Epoch 310/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3677 - acc: 0.8814 - val_loss: 0.7196 - val_acc: 0.8049\n",
      "Epoch 311/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3378 - acc: 0.8793 - val_loss: 0.7063 - val_acc: 0.8232\n",
      "Epoch 312/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3080 - acc: 0.8773 - val_loss: 0.7612 - val_acc: 0.7195\n",
      "Epoch 313/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3197 - acc: 0.8650 - val_loss: 0.7603 - val_acc: 0.7195\n",
      "Epoch 314/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2779 - acc: 0.8916 - val_loss: 0.7932 - val_acc: 0.7988\n",
      "Epoch 315/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3166 - acc: 0.8630 - val_loss: 0.7747 - val_acc: 0.7439\n",
      "Epoch 316/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2996 - acc: 0.8916 - val_loss: 0.7483 - val_acc: 0.7561\n",
      "Epoch 317/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2693 - acc: 0.8793 - val_loss: 0.7230 - val_acc: 0.8110\n",
      "Epoch 318/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3300 - acc: 0.8712 - val_loss: 0.6888 - val_acc: 0.8110\n",
      "Epoch 319/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2863 - acc: 0.8937 - val_loss: 0.7033 - val_acc: 0.8293\n",
      "Epoch 320/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3133 - acc: 0.8855 - val_loss: 0.7503 - val_acc: 0.8293\n",
      "Epoch 321/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3271 - acc: 0.8875 - val_loss: 0.7215 - val_acc: 0.8415\n",
      "Epoch 322/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2715 - acc: 0.8814 - val_loss: 0.7066 - val_acc: 0.7988\n",
      "Epoch 323/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2870 - acc: 0.8875 - val_loss: 0.7354 - val_acc: 0.7866\n",
      "Epoch 324/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2988 - acc: 0.8998 - val_loss: 0.7496 - val_acc: 0.8110\n",
      "Epoch 325/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2890 - acc: 0.8896 - val_loss: 0.7542 - val_acc: 0.7988\n",
      "Epoch 326/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2762 - acc: 0.8998 - val_loss: 0.7446 - val_acc: 0.7988\n",
      "Epoch 327/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2837 - acc: 0.8773 - val_loss: 0.7608 - val_acc: 0.7988\n",
      "Epoch 328/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2837 - acc: 0.8814 - val_loss: 0.7711 - val_acc: 0.7805\n",
      "Epoch 329/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2866 - acc: 0.8916 - val_loss: 0.7356 - val_acc: 0.8110\n",
      "Epoch 330/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2650 - acc: 0.8855 - val_loss: 0.7500 - val_acc: 0.7744\n",
      "Epoch 331/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2818 - acc: 0.8957 - val_loss: 0.7764 - val_acc: 0.8171\n",
      "Epoch 332/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2846 - acc: 0.8671 - val_loss: 0.7972 - val_acc: 0.7805\n",
      "Epoch 333/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2561 - acc: 0.8957 - val_loss: 0.8508 - val_acc: 0.7866\n",
      "Epoch 334/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2770 - acc: 0.8712 - val_loss: 0.8186 - val_acc: 0.8049\n",
      "Epoch 335/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2851 - acc: 0.8978 - val_loss: 0.7847 - val_acc: 0.8232\n",
      "Epoch 336/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2622 - acc: 0.8753 - val_loss: 0.7891 - val_acc: 0.8232\n",
      "Epoch 337/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2765 - acc: 0.8957 - val_loss: 0.7806 - val_acc: 0.8049\n",
      "Epoch 338/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3297 - acc: 0.8855 - val_loss: 0.8218 - val_acc: 0.8110\n",
      "Epoch 339/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3502 - acc: 0.8814 - val_loss: 0.7926 - val_acc: 0.7866\n",
      "Epoch 340/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2861 - acc: 0.8814 - val_loss: 0.8112 - val_acc: 0.8232\n",
      "Epoch 341/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2721 - acc: 0.8793 - val_loss: 0.7728 - val_acc: 0.7988\n",
      "Epoch 342/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3043 - acc: 0.8957 - val_loss: 0.7533 - val_acc: 0.8598\n",
      "Epoch 343/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3014 - acc: 0.8650 - val_loss: 0.8129 - val_acc: 0.8049\n",
      "Epoch 344/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3432 - acc: 0.8650 - val_loss: 0.8339 - val_acc: 0.7195\n",
      "Epoch 345/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.3200 - acc: 0.8793 - val_loss: 0.8043 - val_acc: 0.7622\n",
      "Epoch 346/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3189 - acc: 0.8548 - val_loss: 0.7696 - val_acc: 0.8049\n",
      "Epoch 347/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3656 - acc: 0.8528 - val_loss: 0.7248 - val_acc: 0.8415\n",
      "Epoch 348/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2962 - acc: 0.9059 - val_loss: 0.7338 - val_acc: 0.8171\n",
      "Epoch 349/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2904 - acc: 0.8834 - val_loss: 0.7610 - val_acc: 0.8049\n",
      "Epoch 350/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2745 - acc: 0.8978 - val_loss: 0.7650 - val_acc: 0.8110\n",
      "Epoch 351/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2935 - acc: 0.8773 - val_loss: 0.7375 - val_acc: 0.8537\n",
      "Epoch 352/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3174 - acc: 0.8834 - val_loss: 0.7588 - val_acc: 0.8171\n",
      "Epoch 353/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.3815 - acc: 0.8691 - val_loss: 0.7576 - val_acc: 0.8537\n",
      "Epoch 354/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3270 - acc: 0.8691 - val_loss: 0.7881 - val_acc: 0.7866\n",
      "Epoch 355/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.3152 - acc: 0.8753 - val_loss: 0.8128 - val_acc: 0.7927\n",
      "Epoch 356/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 151us/step - loss: 0.2996 - acc: 0.8855 - val_loss: 0.7796 - val_acc: 0.8537\n",
      "Epoch 357/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2898 - acc: 0.8875 - val_loss: 0.8165 - val_acc: 0.7927\n",
      "Epoch 358/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2993 - acc: 0.8589 - val_loss: 0.7803 - val_acc: 0.8171\n",
      "Epoch 359/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.3148 - acc: 0.8916 - val_loss: 0.7903 - val_acc: 0.8110\n",
      "Epoch 360/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.3016 - acc: 0.8589 - val_loss: 0.7762 - val_acc: 0.7988\n",
      "Epoch 361/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2686 - acc: 0.9039 - val_loss: 0.7917 - val_acc: 0.8354\n",
      "Epoch 362/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3083 - acc: 0.8978 - val_loss: 0.8073 - val_acc: 0.8171\n",
      "Epoch 363/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2647 - acc: 0.8978 - val_loss: 0.8587 - val_acc: 0.8232\n",
      "Epoch 364/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2961 - acc: 0.8937 - val_loss: 0.8879 - val_acc: 0.8110\n",
      "Epoch 365/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3367 - acc: 0.8896 - val_loss: 0.8711 - val_acc: 0.8049\n",
      "Epoch 366/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2839 - acc: 0.8773 - val_loss: 0.8659 - val_acc: 0.8232\n",
      "Epoch 367/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2786 - acc: 0.8916 - val_loss: 0.8844 - val_acc: 0.8171\n",
      "Epoch 368/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2736 - acc: 0.8937 - val_loss: 0.8694 - val_acc: 0.8293\n",
      "Epoch 369/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2971 - acc: 0.8875 - val_loss: 0.8457 - val_acc: 0.8415\n",
      "Epoch 370/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2962 - acc: 0.8834 - val_loss: 0.8503 - val_acc: 0.8110\n",
      "Epoch 371/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2861 - acc: 0.8937 - val_loss: 0.8433 - val_acc: 0.8293\n",
      "Epoch 372/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2758 - acc: 0.9100 - val_loss: 0.8217 - val_acc: 0.8476\n",
      "Epoch 373/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3080 - acc: 0.8691 - val_loss: 0.8587 - val_acc: 0.7744\n",
      "Epoch 374/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.2986 - acc: 0.8834 - val_loss: 0.8227 - val_acc: 0.8049\n",
      "Epoch 375/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2584 - acc: 0.8998 - val_loss: 0.7965 - val_acc: 0.8354\n",
      "Epoch 376/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3003 - acc: 0.8998 - val_loss: 0.8366 - val_acc: 0.8171\n",
      "Epoch 377/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2964 - acc: 0.8732 - val_loss: 0.8098 - val_acc: 0.8476\n",
      "Epoch 378/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2612 - acc: 0.8978 - val_loss: 0.7843 - val_acc: 0.8415\n",
      "Epoch 379/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3590 - acc: 0.8773 - val_loss: 0.7699 - val_acc: 0.8171\n",
      "Epoch 380/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3032 - acc: 0.8793 - val_loss: 0.7943 - val_acc: 0.8476\n",
      "Epoch 381/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2756 - acc: 0.9039 - val_loss: 0.8124 - val_acc: 0.8110\n",
      "Epoch 382/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2809 - acc: 0.8773 - val_loss: 0.7865 - val_acc: 0.8415\n",
      "Epoch 383/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2775 - acc: 0.9018 - val_loss: 0.8064 - val_acc: 0.8232\n",
      "Epoch 384/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2779 - acc: 0.8732 - val_loss: 0.7888 - val_acc: 0.8476\n",
      "Epoch 385/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2951 - acc: 0.8896 - val_loss: 0.8072 - val_acc: 0.8293\n",
      "Epoch 386/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2679 - acc: 0.8957 - val_loss: 0.7896 - val_acc: 0.8171\n",
      "Epoch 387/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2706 - acc: 0.8937 - val_loss: 0.7684 - val_acc: 0.7988\n",
      "Epoch 388/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3453 - acc: 0.8671 - val_loss: 0.7387 - val_acc: 0.8476\n",
      "Epoch 389/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.3115 - acc: 0.8712 - val_loss: 0.7216 - val_acc: 0.8537\n",
      "Epoch 390/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.3095 - acc: 0.8814 - val_loss: 0.7337 - val_acc: 0.8049\n",
      "Epoch 391/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.3023 - acc: 0.8650 - val_loss: 0.7834 - val_acc: 0.7744\n",
      "Epoch 392/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.3686 - acc: 0.8916 - val_loss: 0.7417 - val_acc: 0.8293\n",
      "Epoch 393/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.3709 - acc: 0.8773 - val_loss: 0.7057 - val_acc: 0.8293\n",
      "Epoch 394/1000\n",
      "489/489 [==============================] - 0s 198us/step - loss: 0.2629 - acc: 0.8978 - val_loss: 0.6807 - val_acc: 0.8598\n",
      "Epoch 395/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.2786 - acc: 0.9039 - val_loss: 0.7249 - val_acc: 0.7866\n",
      "Epoch 396/1000\n",
      "489/489 [==============================] - 0s 190us/step - loss: 0.2954 - acc: 0.8793 - val_loss: 0.7450 - val_acc: 0.8171\n",
      "Epoch 397/1000\n",
      "489/489 [==============================] - 0s 192us/step - loss: 0.2813 - acc: 0.8957 - val_loss: 0.7318 - val_acc: 0.7988\n",
      "Epoch 398/1000\n",
      "489/489 [==============================] - 0s 188us/step - loss: 0.2968 - acc: 0.8855 - val_loss: 0.7138 - val_acc: 0.7988\n",
      "Epoch 399/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.2963 - acc: 0.8834 - val_loss: 0.7133 - val_acc: 0.8171\n",
      "Epoch 400/1000\n",
      "489/489 [==============================] - 0s 188us/step - loss: 0.2905 - acc: 0.8998 - val_loss: 0.7432 - val_acc: 0.7866\n",
      "Epoch 401/1000\n",
      "489/489 [==============================] - 0s 194us/step - loss: 0.2961 - acc: 0.8896 - val_loss: 0.7897 - val_acc: 0.7866\n",
      "Epoch 402/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2767 - acc: 0.8855 - val_loss: 0.7382 - val_acc: 0.8293\n",
      "Epoch 403/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2435 - acc: 0.8998 - val_loss: 0.7188 - val_acc: 0.8232\n",
      "Epoch 404/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3257 - acc: 0.8896 - val_loss: 0.7425 - val_acc: 0.7988\n",
      "Epoch 405/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3363 - acc: 0.8834 - val_loss: 0.7454 - val_acc: 0.7988\n",
      "Epoch 406/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2996 - acc: 0.8732 - val_loss: 0.7676 - val_acc: 0.7622\n",
      "Epoch 407/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2701 - acc: 0.8957 - val_loss: 0.8564 - val_acc: 0.7622\n",
      "Epoch 408/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3397 - acc: 0.8773 - val_loss: 0.8074 - val_acc: 0.7866\n",
      "Epoch 409/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.3588 - acc: 0.8773 - val_loss: 0.7642 - val_acc: 0.8171\n",
      "Epoch 410/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2925 - acc: 0.8937 - val_loss: 0.7185 - val_acc: 0.8171\n",
      "Epoch 411/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2978 - acc: 0.8773 - val_loss: 0.7659 - val_acc: 0.8049\n",
      "Epoch 412/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3101 - acc: 0.8793 - val_loss: 0.8088 - val_acc: 0.7866\n",
      "Epoch 413/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3100 - acc: 0.8793 - val_loss: 0.7270 - val_acc: 0.8415\n",
      "Epoch 414/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3605 - acc: 0.8753 - val_loss: 0.7602 - val_acc: 0.7866\n",
      "Epoch 415/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 159us/step - loss: 0.3295 - acc: 0.8630 - val_loss: 0.7562 - val_acc: 0.8415\n",
      "Epoch 416/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2886 - acc: 0.8875 - val_loss: 0.7563 - val_acc: 0.8476\n",
      "Epoch 417/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2699 - acc: 0.8773 - val_loss: 0.8074 - val_acc: 0.8232\n",
      "Epoch 418/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2908 - acc: 0.8875 - val_loss: 0.8224 - val_acc: 0.8293\n",
      "Epoch 419/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3551 - acc: 0.8916 - val_loss: 0.8049 - val_acc: 0.8293\n",
      "Epoch 420/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2788 - acc: 0.8978 - val_loss: 0.7821 - val_acc: 0.8293\n",
      "Epoch 421/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2865 - acc: 0.8916 - val_loss: 0.7833 - val_acc: 0.8110\n",
      "Epoch 422/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2526 - acc: 0.8978 - val_loss: 0.8060 - val_acc: 0.8171\n",
      "Epoch 423/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3464 - acc: 0.8834 - val_loss: 0.7957 - val_acc: 0.8415\n",
      "Epoch 424/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.2646 - acc: 0.8855 - val_loss: 0.7809 - val_acc: 0.8354\n",
      "Epoch 425/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3259 - acc: 0.8712 - val_loss: 0.7292 - val_acc: 0.8354\n",
      "Epoch 426/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2900 - acc: 0.8732 - val_loss: 0.7220 - val_acc: 0.8293\n",
      "Epoch 427/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3818 - acc: 0.8814 - val_loss: 0.7446 - val_acc: 0.8110\n",
      "Epoch 428/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.2943 - acc: 0.8978 - val_loss: 0.7159 - val_acc: 0.8415\n",
      "Epoch 429/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.2859 - acc: 0.8916 - val_loss: 0.7302 - val_acc: 0.8110\n",
      "Epoch 430/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.2847 - acc: 0.8957 - val_loss: 0.7723 - val_acc: 0.7500\n",
      "Epoch 431/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3033 - acc: 0.8712 - val_loss: 0.7800 - val_acc: 0.7927\n",
      "Epoch 432/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2427 - acc: 0.9018 - val_loss: 0.8107 - val_acc: 0.8354\n",
      "Epoch 433/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3496 - acc: 0.8896 - val_loss: 0.8424 - val_acc: 0.8354\n",
      "Epoch 434/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3158 - acc: 0.8916 - val_loss: 0.7916 - val_acc: 0.7988\n",
      "Epoch 435/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2992 - acc: 0.8691 - val_loss: 0.7557 - val_acc: 0.7988\n",
      "Epoch 436/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.2371 - acc: 0.9059 - val_loss: 0.7729 - val_acc: 0.8415\n",
      "Epoch 437/1000\n",
      "489/489 [==============================] - 0s 190us/step - loss: 0.2819 - acc: 0.8978 - val_loss: 0.7630 - val_acc: 0.8354\n",
      "Epoch 438/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2920 - acc: 0.9059 - val_loss: 0.7845 - val_acc: 0.8110\n",
      "Epoch 439/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.3884 - acc: 0.8875 - val_loss: 0.8179 - val_acc: 0.7927\n",
      "Epoch 440/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3250 - acc: 0.8855 - val_loss: 0.7656 - val_acc: 0.8232\n",
      "Epoch 441/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3715 - acc: 0.8875 - val_loss: 0.7792 - val_acc: 0.8476\n",
      "Epoch 442/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2782 - acc: 0.9080 - val_loss: 0.7862 - val_acc: 0.8171\n",
      "Epoch 443/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2676 - acc: 0.8671 - val_loss: 0.8078 - val_acc: 0.8354\n",
      "Epoch 444/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.3981 - acc: 0.8773 - val_loss: 0.8426 - val_acc: 0.8110\n",
      "Epoch 445/1000\n",
      "489/489 [==============================] - 0s 214us/step - loss: 0.3287 - acc: 0.8671 - val_loss: 0.8074 - val_acc: 0.8293\n",
      "Epoch 446/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3068 - acc: 0.8732 - val_loss: 0.8337 - val_acc: 0.7683\n",
      "Epoch 447/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3110 - acc: 0.8834 - val_loss: 0.8064 - val_acc: 0.7561\n",
      "Epoch 448/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3027 - acc: 0.8916 - val_loss: 0.7709 - val_acc: 0.7866\n",
      "Epoch 449/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2497 - acc: 0.9039 - val_loss: 0.7733 - val_acc: 0.8232\n",
      "Epoch 450/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3033 - acc: 0.8712 - val_loss: 0.8020 - val_acc: 0.7927\n",
      "Epoch 451/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2640 - acc: 0.8957 - val_loss: 0.8351 - val_acc: 0.7927\n",
      "Epoch 452/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.2643 - acc: 0.8834 - val_loss: 0.8594 - val_acc: 0.7439\n",
      "Epoch 453/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2613 - acc: 0.9059 - val_loss: 0.8210 - val_acc: 0.8171\n",
      "Epoch 454/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2564 - acc: 0.8937 - val_loss: 0.8405 - val_acc: 0.8110\n",
      "Epoch 455/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2253 - acc: 0.9059 - val_loss: 0.8127 - val_acc: 0.8598\n",
      "Epoch 456/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2714 - acc: 0.8875 - val_loss: 0.8095 - val_acc: 0.8476\n",
      "Epoch 457/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.2624 - acc: 0.8998 - val_loss: 0.8170 - val_acc: 0.8415\n",
      "Epoch 458/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.2435 - acc: 0.8957 - val_loss: 0.7880 - val_acc: 0.8537\n",
      "Epoch 459/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2692 - acc: 0.8896 - val_loss: 0.8514 - val_acc: 0.8232\n",
      "Epoch 460/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2862 - acc: 0.8916 - val_loss: 0.8077 - val_acc: 0.8476\n",
      "Epoch 461/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3000 - acc: 0.8712 - val_loss: 0.8083 - val_acc: 0.8293\n",
      "Epoch 462/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2789 - acc: 0.8896 - val_loss: 0.7665 - val_acc: 0.8598\n",
      "Epoch 463/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3000 - acc: 0.8814 - val_loss: 0.8039 - val_acc: 0.8537\n",
      "Epoch 464/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3120 - acc: 0.8875 - val_loss: 0.8267 - val_acc: 0.7927\n",
      "Epoch 465/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2758 - acc: 0.8753 - val_loss: 0.8415 - val_acc: 0.7561\n",
      "Epoch 466/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2792 - acc: 0.8691 - val_loss: 0.8182 - val_acc: 0.7744\n",
      "Epoch 467/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3083 - acc: 0.8732 - val_loss: 0.7835 - val_acc: 0.8293\n",
      "Epoch 468/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2676 - acc: 0.9039 - val_loss: 0.7845 - val_acc: 0.8537\n",
      "Epoch 469/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.2976 - acc: 0.8671 - val_loss: 0.7888 - val_acc: 0.7927\n",
      "Epoch 470/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3014 - acc: 0.8793 - val_loss: 0.7914 - val_acc: 0.8232\n",
      "Epoch 471/1000\n",
      "489/489 [==============================] - 0s 186us/step - loss: 0.2506 - acc: 0.8998 - val_loss: 0.7921 - val_acc: 0.8354\n",
      "Epoch 472/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3351 - acc: 0.8650 - val_loss: 0.7956 - val_acc: 0.8354\n",
      "Epoch 473/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3148 - acc: 0.8814 - val_loss: 0.8308 - val_acc: 0.8293\n",
      "Epoch 474/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 153us/step - loss: 0.2996 - acc: 0.8916 - val_loss: 0.8538 - val_acc: 0.8049\n",
      "Epoch 475/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.2700 - acc: 0.8916 - val_loss: 0.8412 - val_acc: 0.7744\n",
      "Epoch 476/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2972 - acc: 0.8855 - val_loss: 0.8055 - val_acc: 0.7927\n",
      "Epoch 477/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3174 - acc: 0.8773 - val_loss: 0.7827 - val_acc: 0.8110\n",
      "Epoch 478/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2836 - acc: 0.8650 - val_loss: 0.7963 - val_acc: 0.8110\n",
      "Epoch 479/1000\n",
      "489/489 [==============================] - 0s 128us/step - loss: 0.3782 - acc: 0.8609 - val_loss: 0.7574 - val_acc: 0.8293\n",
      "Epoch 480/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2812 - acc: 0.8773 - val_loss: 0.7656 - val_acc: 0.8293\n",
      "Epoch 481/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2856 - acc: 0.8691 - val_loss: 0.7729 - val_acc: 0.8293\n",
      "Epoch 482/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2885 - acc: 0.8814 - val_loss: 0.7594 - val_acc: 0.8415\n",
      "Epoch 483/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3200 - acc: 0.8569 - val_loss: 0.8367 - val_acc: 0.8049\n",
      "Epoch 484/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3075 - acc: 0.8916 - val_loss: 0.8132 - val_acc: 0.8171\n",
      "Epoch 485/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.2774 - acc: 0.8732 - val_loss: 0.8003 - val_acc: 0.8232\n",
      "Epoch 486/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2900 - acc: 0.8793 - val_loss: 0.8161 - val_acc: 0.8171\n",
      "Epoch 487/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3074 - acc: 0.8834 - val_loss: 0.7626 - val_acc: 0.8354\n",
      "Epoch 488/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2774 - acc: 0.8834 - val_loss: 0.7655 - val_acc: 0.8293\n",
      "Epoch 489/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2550 - acc: 0.8937 - val_loss: 0.7483 - val_acc: 0.8293\n",
      "Epoch 490/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3124 - acc: 0.8753 - val_loss: 0.7694 - val_acc: 0.7866\n",
      "Epoch 491/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.2533 - acc: 0.8896 - val_loss: 0.7730 - val_acc: 0.8049\n",
      "Epoch 492/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3656 - acc: 0.8814 - val_loss: 0.7702 - val_acc: 0.8049\n",
      "Epoch 493/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2343 - acc: 0.9141 - val_loss: 0.7631 - val_acc: 0.8354\n",
      "Epoch 494/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3162 - acc: 0.9100 - val_loss: 0.7575 - val_acc: 0.8171\n",
      "Epoch 495/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3203 - acc: 0.8875 - val_loss: 0.7460 - val_acc: 0.8110\n",
      "Epoch 496/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2391 - acc: 0.9121 - val_loss: 0.7538 - val_acc: 0.8293\n",
      "Epoch 497/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2710 - acc: 0.9162 - val_loss: 0.7581 - val_acc: 0.8232\n",
      "Epoch 498/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.3012 - acc: 0.8834 - val_loss: 0.7825 - val_acc: 0.8232\n",
      "Epoch 499/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2902 - acc: 0.8937 - val_loss: 0.7823 - val_acc: 0.8232\n",
      "Epoch 500/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.2689 - acc: 0.9100 - val_loss: 0.7905 - val_acc: 0.8232\n",
      "Epoch 501/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2440 - acc: 0.9100 - val_loss: 0.8473 - val_acc: 0.7683\n",
      "Epoch 502/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2705 - acc: 0.8937 - val_loss: 0.8228 - val_acc: 0.8110\n",
      "Epoch 503/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2950 - acc: 0.8916 - val_loss: 0.7796 - val_acc: 0.8171\n",
      "Epoch 504/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2989 - acc: 0.8671 - val_loss: 0.7807 - val_acc: 0.7927\n",
      "Epoch 505/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.2951 - acc: 0.8753 - val_loss: 0.7668 - val_acc: 0.8232\n",
      "Epoch 506/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2674 - acc: 0.9039 - val_loss: 0.8358 - val_acc: 0.7744\n",
      "Epoch 507/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3021 - acc: 0.8875 - val_loss: 0.7956 - val_acc: 0.8293\n",
      "Epoch 508/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.3068 - acc: 0.8691 - val_loss: 0.7804 - val_acc: 0.8049\n",
      "Epoch 509/1000\n",
      "489/489 [==============================] - 0s 192us/step - loss: 0.2519 - acc: 0.8957 - val_loss: 0.7450 - val_acc: 0.8476\n",
      "Epoch 510/1000\n",
      "489/489 [==============================] - 0s 190us/step - loss: 0.2707 - acc: 0.8978 - val_loss: 0.7681 - val_acc: 0.8415\n",
      "Epoch 511/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2550 - acc: 0.8957 - val_loss: 0.7907 - val_acc: 0.8171\n",
      "Epoch 512/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.2538 - acc: 0.9080 - val_loss: 0.7972 - val_acc: 0.8171\n",
      "Epoch 513/1000\n",
      "489/489 [==============================] - 0s 186us/step - loss: 0.2579 - acc: 0.8896 - val_loss: 0.7935 - val_acc: 0.8049\n",
      "Epoch 514/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2942 - acc: 0.8916 - val_loss: 0.7716 - val_acc: 0.8476\n",
      "Epoch 515/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2979 - acc: 0.8753 - val_loss: 0.7823 - val_acc: 0.7927\n",
      "Epoch 516/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.2763 - acc: 0.9018 - val_loss: 0.7998 - val_acc: 0.8049\n",
      "Epoch 517/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.2560 - acc: 0.8937 - val_loss: 0.7998 - val_acc: 0.8232\n",
      "Epoch 518/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.2939 - acc: 0.8875 - val_loss: 0.7412 - val_acc: 0.8171\n",
      "Epoch 519/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2900 - acc: 0.8937 - val_loss: 0.7333 - val_acc: 0.8171\n",
      "Epoch 520/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2796 - acc: 0.8875 - val_loss: 0.7500 - val_acc: 0.8232\n",
      "Epoch 521/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3004 - acc: 0.8896 - val_loss: 0.7300 - val_acc: 0.8232\n",
      "Epoch 522/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.2822 - acc: 0.8937 - val_loss: 0.7390 - val_acc: 0.7866\n",
      "Epoch 523/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3078 - acc: 0.8978 - val_loss: 0.7372 - val_acc: 0.8110\n",
      "Epoch 524/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2633 - acc: 0.8875 - val_loss: 0.7216 - val_acc: 0.8293\n",
      "Epoch 525/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2686 - acc: 0.8875 - val_loss: 0.7387 - val_acc: 0.8354\n",
      "Epoch 526/1000\n",
      "489/489 [==============================] - 0s 192us/step - loss: 0.2714 - acc: 0.8834 - val_loss: 0.7778 - val_acc: 0.8171\n",
      "Epoch 527/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3144 - acc: 0.8978 - val_loss: 0.8119 - val_acc: 0.7927\n",
      "Epoch 528/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3413 - acc: 0.8691 - val_loss: 0.8154 - val_acc: 0.8110\n",
      "Epoch 529/1000\n",
      "489/489 [==============================] - 0s 192us/step - loss: 0.3139 - acc: 0.8732 - val_loss: 0.7873 - val_acc: 0.8110\n",
      "Epoch 530/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2910 - acc: 0.8998 - val_loss: 0.7720 - val_acc: 0.8171\n",
      "Epoch 531/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3223 - acc: 0.8753 - val_loss: 0.7071 - val_acc: 0.8171\n",
      "Epoch 532/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2977 - acc: 0.8793 - val_loss: 0.7193 - val_acc: 0.8293\n",
      "Epoch 533/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 147us/step - loss: 0.2555 - acc: 0.8957 - val_loss: 0.7431 - val_acc: 0.8354\n",
      "Epoch 534/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2748 - acc: 0.8916 - val_loss: 0.7462 - val_acc: 0.7866\n",
      "Epoch 535/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2395 - acc: 0.8896 - val_loss: 0.7723 - val_acc: 0.8049\n",
      "Epoch 536/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3160 - acc: 0.8875 - val_loss: 0.7963 - val_acc: 0.8110\n",
      "Epoch 537/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2711 - acc: 0.8916 - val_loss: 0.7599 - val_acc: 0.8415\n",
      "Epoch 538/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.2649 - acc: 0.8896 - val_loss: 0.7720 - val_acc: 0.8293\n",
      "Epoch 539/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2563 - acc: 0.9018 - val_loss: 0.7484 - val_acc: 0.8171\n",
      "Epoch 540/1000\n",
      "489/489 [==============================] - 0s 186us/step - loss: 0.3059 - acc: 0.8896 - val_loss: 0.7648 - val_acc: 0.8110\n",
      "Epoch 541/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2693 - acc: 0.9059 - val_loss: 0.7472 - val_acc: 0.8232\n",
      "Epoch 542/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2716 - acc: 0.9018 - val_loss: 0.7776 - val_acc: 0.8293\n",
      "Epoch 543/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3033 - acc: 0.8834 - val_loss: 0.7299 - val_acc: 0.8354\n",
      "Epoch 544/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.2709 - acc: 0.9039 - val_loss: 0.7285 - val_acc: 0.8293\n",
      "Epoch 545/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2554 - acc: 0.8937 - val_loss: 0.7218 - val_acc: 0.8537\n",
      "Epoch 546/1000\n",
      "489/489 [==============================] - 0s 190us/step - loss: 0.2623 - acc: 0.8875 - val_loss: 0.7328 - val_acc: 0.8598\n",
      "Epoch 547/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2699 - acc: 0.8855 - val_loss: 0.7740 - val_acc: 0.8415\n",
      "Epoch 548/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.2824 - acc: 0.8998 - val_loss: 0.8195 - val_acc: 0.8232\n",
      "Epoch 549/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.2552 - acc: 0.9039 - val_loss: 0.8438 - val_acc: 0.8110\n",
      "Epoch 550/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.2986 - acc: 0.8671 - val_loss: 0.8062 - val_acc: 0.8110\n",
      "Epoch 551/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2711 - acc: 0.8896 - val_loss: 0.7973 - val_acc: 0.8415\n",
      "Epoch 552/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.2676 - acc: 0.9018 - val_loss: 0.7865 - val_acc: 0.8232\n",
      "Epoch 553/1000\n",
      "489/489 [==============================] - 0s 200us/step - loss: 0.2959 - acc: 0.8753 - val_loss: 0.7773 - val_acc: 0.8415\n",
      "Epoch 554/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3074 - acc: 0.8896 - val_loss: 0.7738 - val_acc: 0.8354\n",
      "Epoch 555/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3415 - acc: 0.8773 - val_loss: 0.8481 - val_acc: 0.7683\n",
      "Epoch 556/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3173 - acc: 0.8691 - val_loss: 0.7575 - val_acc: 0.8232\n",
      "Epoch 557/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2710 - acc: 0.8875 - val_loss: 0.7681 - val_acc: 0.8232\n",
      "Epoch 558/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3000 - acc: 0.8855 - val_loss: 0.7442 - val_acc: 0.8476\n",
      "Epoch 559/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2450 - acc: 0.9018 - val_loss: 0.7499 - val_acc: 0.8354\n",
      "Epoch 560/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2666 - acc: 0.8916 - val_loss: 0.7583 - val_acc: 0.8232\n",
      "Epoch 561/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3045 - acc: 0.9018 - val_loss: 0.7874 - val_acc: 0.7988\n",
      "Epoch 562/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2791 - acc: 0.9018 - val_loss: 0.7761 - val_acc: 0.7988\n",
      "Epoch 563/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2968 - acc: 0.8978 - val_loss: 0.7479 - val_acc: 0.8232\n",
      "Epoch 564/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3237 - acc: 0.8855 - val_loss: 0.7788 - val_acc: 0.8171\n",
      "Epoch 565/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2498 - acc: 0.9059 - val_loss: 0.7902 - val_acc: 0.7988\n",
      "Epoch 566/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3026 - acc: 0.8855 - val_loss: 0.8042 - val_acc: 0.8110\n",
      "Epoch 567/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.2377 - acc: 0.9100 - val_loss: 0.8051 - val_acc: 0.8049\n",
      "Epoch 568/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.3025 - acc: 0.9059 - val_loss: 0.7942 - val_acc: 0.8049\n",
      "Epoch 569/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2614 - acc: 0.8793 - val_loss: 0.8077 - val_acc: 0.7988\n",
      "Epoch 570/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2613 - acc: 0.8916 - val_loss: 0.7674 - val_acc: 0.8232\n",
      "Epoch 571/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.3036 - acc: 0.8814 - val_loss: 0.7418 - val_acc: 0.8171\n",
      "Epoch 572/1000\n",
      "489/489 [==============================] - 0s 188us/step - loss: 0.2840 - acc: 0.8793 - val_loss: 0.7689 - val_acc: 0.8110\n",
      "Epoch 573/1000\n",
      "489/489 [==============================] - 0s 190us/step - loss: 0.2385 - acc: 0.9080 - val_loss: 0.7918 - val_acc: 0.8293\n",
      "Epoch 574/1000\n",
      "489/489 [==============================] - 0s 190us/step - loss: 0.2789 - acc: 0.8875 - val_loss: 0.7941 - val_acc: 0.8537\n",
      "Epoch 575/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.2597 - acc: 0.8937 - val_loss: 0.9007 - val_acc: 0.7561\n",
      "Epoch 576/1000\n",
      "489/489 [==============================] - 0s 188us/step - loss: 0.2817 - acc: 0.8753 - val_loss: 0.8417 - val_acc: 0.8049\n",
      "Epoch 577/1000\n",
      "489/489 [==============================] - 0s 198us/step - loss: 0.2832 - acc: 0.8875 - val_loss: 0.7677 - val_acc: 0.8537\n",
      "Epoch 578/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.2880 - acc: 0.8875 - val_loss: 0.8027 - val_acc: 0.7866\n",
      "Epoch 579/1000\n",
      "489/489 [==============================] - 0s 186us/step - loss: 0.2580 - acc: 0.8896 - val_loss: 0.8758 - val_acc: 0.7988\n",
      "Epoch 580/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.2972 - acc: 0.8875 - val_loss: 0.8210 - val_acc: 0.8110\n",
      "Epoch 581/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.3393 - acc: 0.8896 - val_loss: 0.8169 - val_acc: 0.8354\n",
      "Epoch 582/1000\n",
      "489/489 [==============================] - 0s 180us/step - loss: 0.2476 - acc: 0.8998 - val_loss: 0.8302 - val_acc: 0.7927\n",
      "Epoch 583/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.2325 - acc: 0.9100 - val_loss: 0.8443 - val_acc: 0.8232\n",
      "Epoch 584/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2940 - acc: 0.8937 - val_loss: 0.8187 - val_acc: 0.8232\n",
      "Epoch 585/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2648 - acc: 0.9162 - val_loss: 0.8449 - val_acc: 0.8171\n",
      "Epoch 586/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2320 - acc: 0.9182 - val_loss: 0.8391 - val_acc: 0.8232\n",
      "Epoch 587/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2676 - acc: 0.9080 - val_loss: 0.8015 - val_acc: 0.8415\n",
      "Epoch 588/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2447 - acc: 0.8998 - val_loss: 0.7952 - val_acc: 0.8354\n",
      "Epoch 589/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2771 - acc: 0.9018 - val_loss: 0.8292 - val_acc: 0.8232\n",
      "Epoch 590/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3911 - acc: 0.8998 - val_loss: 0.8683 - val_acc: 0.8110\n",
      "Epoch 591/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.3052 - acc: 0.8896 - val_loss: 0.8525 - val_acc: 0.7988\n",
      "Epoch 592/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 151us/step - loss: 0.2648 - acc: 0.8957 - val_loss: 0.8461 - val_acc: 0.7988\n",
      "Epoch 593/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.3538 - acc: 0.8855 - val_loss: 0.8144 - val_acc: 0.8110\n",
      "Epoch 594/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2586 - acc: 0.8896 - val_loss: 0.7754 - val_acc: 0.8354\n",
      "Epoch 595/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2942 - acc: 0.9100 - val_loss: 0.7501 - val_acc: 0.8293\n",
      "Epoch 596/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2931 - acc: 0.8753 - val_loss: 0.8017 - val_acc: 0.7561\n",
      "Epoch 597/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.3737 - acc: 0.8834 - val_loss: 0.7264 - val_acc: 0.8354\n",
      "Epoch 598/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3440 - acc: 0.8650 - val_loss: 0.6730 - val_acc: 0.8537\n",
      "Epoch 599/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2819 - acc: 0.8855 - val_loss: 0.7180 - val_acc: 0.8415\n",
      "Epoch 600/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2816 - acc: 0.8978 - val_loss: 0.7099 - val_acc: 0.8415\n",
      "Epoch 601/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2776 - acc: 0.9100 - val_loss: 0.7125 - val_acc: 0.8598\n",
      "Epoch 602/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2485 - acc: 0.8957 - val_loss: 0.7262 - val_acc: 0.8537\n",
      "Epoch 603/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3345 - acc: 0.8589 - val_loss: 0.6884 - val_acc: 0.8659\n",
      "Epoch 604/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3623 - acc: 0.8609 - val_loss: 0.7258 - val_acc: 0.8049\n",
      "Epoch 605/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3134 - acc: 0.8650 - val_loss: 0.7611 - val_acc: 0.8232\n",
      "Epoch 606/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3006 - acc: 0.8650 - val_loss: 0.7982 - val_acc: 0.8171\n",
      "Epoch 607/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2834 - acc: 0.8793 - val_loss: 0.7695 - val_acc: 0.8171\n",
      "Epoch 608/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.2870 - acc: 0.8896 - val_loss: 0.7621 - val_acc: 0.8415\n",
      "Epoch 609/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.3084 - acc: 0.8650 - val_loss: 0.7606 - val_acc: 0.8354\n",
      "Epoch 610/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3298 - acc: 0.8896 - val_loss: 0.7389 - val_acc: 0.8415\n",
      "Epoch 611/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.2569 - acc: 0.8916 - val_loss: 0.7486 - val_acc: 0.8476\n",
      "Epoch 612/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.2834 - acc: 0.8916 - val_loss: 0.7598 - val_acc: 0.8476\n",
      "Epoch 613/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2763 - acc: 0.8855 - val_loss: 0.7772 - val_acc: 0.8415\n",
      "Epoch 614/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2414 - acc: 0.9100 - val_loss: 0.7911 - val_acc: 0.8415\n",
      "Epoch 615/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2597 - acc: 0.9059 - val_loss: 0.7641 - val_acc: 0.8780\n",
      "Epoch 616/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.2874 - acc: 0.8998 - val_loss: 0.7680 - val_acc: 0.8476\n",
      "Epoch 617/1000\n",
      "489/489 [==============================] - 0s 190us/step - loss: 0.3083 - acc: 0.9018 - val_loss: 0.7439 - val_acc: 0.8293\n",
      "Epoch 618/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.2807 - acc: 0.8937 - val_loss: 0.7548 - val_acc: 0.8415\n",
      "Epoch 619/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2578 - acc: 0.8998 - val_loss: 0.7805 - val_acc: 0.8232\n",
      "Epoch 620/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2566 - acc: 0.8916 - val_loss: 0.7589 - val_acc: 0.8232\n",
      "Epoch 621/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2561 - acc: 0.8834 - val_loss: 0.7799 - val_acc: 0.8232\n",
      "Epoch 622/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2516 - acc: 0.8937 - val_loss: 0.7707 - val_acc: 0.8110\n",
      "Epoch 623/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2279 - acc: 0.8957 - val_loss: 0.7971 - val_acc: 0.8232\n",
      "Epoch 624/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.2505 - acc: 0.8978 - val_loss: 0.8226 - val_acc: 0.8232\n",
      "Epoch 625/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.2342 - acc: 0.9059 - val_loss: 0.8044 - val_acc: 0.8232\n",
      "Epoch 626/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2999 - acc: 0.9121 - val_loss: 0.7767 - val_acc: 0.8049\n",
      "Epoch 627/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2371 - acc: 0.9059 - val_loss: 0.7737 - val_acc: 0.8415\n",
      "Epoch 628/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2674 - acc: 0.8896 - val_loss: 0.7682 - val_acc: 0.8110\n",
      "Epoch 629/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2594 - acc: 0.9121 - val_loss: 0.7965 - val_acc: 0.8110\n",
      "Epoch 630/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3231 - acc: 0.8855 - val_loss: 0.7970 - val_acc: 0.7744\n",
      "Epoch 631/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3333 - acc: 0.8855 - val_loss: 0.8136 - val_acc: 0.7622\n",
      "Epoch 632/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3058 - acc: 0.8773 - val_loss: 0.7989 - val_acc: 0.7866\n",
      "Epoch 633/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2912 - acc: 0.8916 - val_loss: 0.7960 - val_acc: 0.8110\n",
      "Epoch 634/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2894 - acc: 0.9121 - val_loss: 0.7980 - val_acc: 0.8110\n",
      "Epoch 635/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3075 - acc: 0.8875 - val_loss: 0.8158 - val_acc: 0.8110\n",
      "Epoch 636/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2499 - acc: 0.8937 - val_loss: 0.8074 - val_acc: 0.8232\n",
      "Epoch 637/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.2820 - acc: 0.8814 - val_loss: 0.7954 - val_acc: 0.8171\n",
      "Epoch 638/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2935 - acc: 0.8793 - val_loss: 0.8009 - val_acc: 0.8049\n",
      "Epoch 639/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2640 - acc: 0.8957 - val_loss: 0.7582 - val_acc: 0.8293\n",
      "Epoch 640/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3148 - acc: 0.8834 - val_loss: 0.7616 - val_acc: 0.8415\n",
      "Epoch 641/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3010 - acc: 0.8916 - val_loss: 0.7554 - val_acc: 0.8354\n",
      "Epoch 642/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2736 - acc: 0.8855 - val_loss: 0.7786 - val_acc: 0.8171\n",
      "Epoch 643/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3336 - acc: 0.8937 - val_loss: 0.7748 - val_acc: 0.8354\n",
      "Epoch 644/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2738 - acc: 0.8937 - val_loss: 0.8066 - val_acc: 0.8171\n",
      "Epoch 645/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2987 - acc: 0.9039 - val_loss: 0.7594 - val_acc: 0.8293\n",
      "Epoch 646/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2600 - acc: 0.8937 - val_loss: 0.7627 - val_acc: 0.8293\n",
      "Epoch 647/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2481 - acc: 0.9080 - val_loss: 0.7850 - val_acc: 0.8232\n",
      "Epoch 648/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2439 - acc: 0.8978 - val_loss: 0.7890 - val_acc: 0.8415\n",
      "Epoch 649/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.2791 - acc: 0.8937 - val_loss: 0.7789 - val_acc: 0.8415\n",
      "Epoch 650/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2817 - acc: 0.9018 - val_loss: 0.7969 - val_acc: 0.8476\n",
      "Epoch 651/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 184us/step - loss: 0.2522 - acc: 0.9018 - val_loss: 0.8100 - val_acc: 0.8232\n",
      "Epoch 652/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3514 - acc: 0.8773 - val_loss: 0.7790 - val_acc: 0.7866\n",
      "Epoch 653/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.3026 - acc: 0.9018 - val_loss: 0.7497 - val_acc: 0.8354\n",
      "Epoch 654/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2553 - acc: 0.8896 - val_loss: 0.8113 - val_acc: 0.8171\n",
      "Epoch 655/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2554 - acc: 0.8875 - val_loss: 0.7823 - val_acc: 0.8171\n",
      "Epoch 656/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2810 - acc: 0.8998 - val_loss: 0.7824 - val_acc: 0.8232\n",
      "Epoch 657/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3135 - acc: 0.8916 - val_loss: 0.7589 - val_acc: 0.8232\n",
      "Epoch 658/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2585 - acc: 0.8998 - val_loss: 0.7667 - val_acc: 0.8232\n",
      "Epoch 659/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2326 - acc: 0.9039 - val_loss: 0.7592 - val_acc: 0.8354\n",
      "Epoch 660/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2505 - acc: 0.9121 - val_loss: 0.7634 - val_acc: 0.8415\n",
      "Epoch 661/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2702 - acc: 0.9059 - val_loss: 0.7647 - val_acc: 0.8049\n",
      "Epoch 662/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2534 - acc: 0.8978 - val_loss: 0.7595 - val_acc: 0.8354\n",
      "Epoch 663/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2383 - acc: 0.8998 - val_loss: 0.8008 - val_acc: 0.8232\n",
      "Epoch 664/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2591 - acc: 0.9059 - val_loss: 0.8224 - val_acc: 0.8354\n",
      "Epoch 665/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.2408 - acc: 0.9162 - val_loss: 0.8121 - val_acc: 0.8049\n",
      "Epoch 666/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3192 - acc: 0.8630 - val_loss: 0.7692 - val_acc: 0.8232\n",
      "Epoch 667/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3025 - acc: 0.8896 - val_loss: 0.7747 - val_acc: 0.8110\n",
      "Epoch 668/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.2939 - acc: 0.8937 - val_loss: 0.7688 - val_acc: 0.8049\n",
      "Epoch 669/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2593 - acc: 0.8896 - val_loss: 0.7785 - val_acc: 0.8171\n",
      "Epoch 670/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2611 - acc: 0.8896 - val_loss: 0.7806 - val_acc: 0.8293\n",
      "Epoch 671/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2755 - acc: 0.9080 - val_loss: 0.8001 - val_acc: 0.8232\n",
      "Epoch 672/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2582 - acc: 0.8937 - val_loss: 0.8076 - val_acc: 0.8537\n",
      "Epoch 673/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2714 - acc: 0.8834 - val_loss: 0.8413 - val_acc: 0.8415\n",
      "Epoch 674/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2692 - acc: 0.8978 - val_loss: 0.7620 - val_acc: 0.8415\n",
      "Epoch 675/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2899 - acc: 0.8855 - val_loss: 0.7297 - val_acc: 0.8537\n",
      "Epoch 676/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3078 - acc: 0.8916 - val_loss: 0.7466 - val_acc: 0.8354\n",
      "Epoch 677/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3150 - acc: 0.8773 - val_loss: 0.7884 - val_acc: 0.7500\n",
      "Epoch 678/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3028 - acc: 0.8834 - val_loss: 0.8148 - val_acc: 0.7805\n",
      "Epoch 679/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.2968 - acc: 0.8896 - val_loss: 0.7661 - val_acc: 0.8232\n",
      "Epoch 680/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.2652 - acc: 0.9141 - val_loss: 0.7496 - val_acc: 0.8232\n",
      "Epoch 681/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2598 - acc: 0.8916 - val_loss: 0.7343 - val_acc: 0.8293\n",
      "Epoch 682/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2777 - acc: 0.8732 - val_loss: 0.7475 - val_acc: 0.8293\n",
      "Epoch 683/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2760 - acc: 0.8916 - val_loss: 0.7700 - val_acc: 0.8232\n",
      "Epoch 684/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2866 - acc: 0.9162 - val_loss: 0.7841 - val_acc: 0.8171\n",
      "Epoch 685/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.3554 - acc: 0.8834 - val_loss: 0.7668 - val_acc: 0.8354\n",
      "Epoch 686/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3203 - acc: 0.8814 - val_loss: 0.7540 - val_acc: 0.8232\n",
      "Epoch 687/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2884 - acc: 0.8712 - val_loss: 0.7415 - val_acc: 0.8476\n",
      "Epoch 688/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2949 - acc: 0.8896 - val_loss: 0.7584 - val_acc: 0.8110\n",
      "Epoch 689/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2765 - acc: 0.8773 - val_loss: 0.7646 - val_acc: 0.8232\n",
      "Epoch 690/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2817 - acc: 0.9018 - val_loss: 0.7588 - val_acc: 0.8354\n",
      "Epoch 691/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2935 - acc: 0.8834 - val_loss: 0.7912 - val_acc: 0.7866\n",
      "Epoch 692/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2893 - acc: 0.8896 - val_loss: 0.7748 - val_acc: 0.8171\n",
      "Epoch 693/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.2918 - acc: 0.8875 - val_loss: 0.7198 - val_acc: 0.8537\n",
      "Epoch 694/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.2825 - acc: 0.8875 - val_loss: 0.6964 - val_acc: 0.8537\n",
      "Epoch 695/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3015 - acc: 0.8753 - val_loss: 0.7321 - val_acc: 0.8476\n",
      "Epoch 696/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2971 - acc: 0.8916 - val_loss: 0.7207 - val_acc: 0.8537\n",
      "Epoch 697/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2540 - acc: 0.8978 - val_loss: 0.7262 - val_acc: 0.8537\n",
      "Epoch 698/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2747 - acc: 0.8814 - val_loss: 0.7294 - val_acc: 0.8476\n",
      "Epoch 699/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2452 - acc: 0.9162 - val_loss: 0.7783 - val_acc: 0.8415\n",
      "Epoch 700/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.2953 - acc: 0.8814 - val_loss: 0.7731 - val_acc: 0.8476\n",
      "Epoch 701/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2438 - acc: 0.9018 - val_loss: 0.7407 - val_acc: 0.8293\n",
      "Epoch 702/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2895 - acc: 0.8916 - val_loss: 0.7781 - val_acc: 0.8232\n",
      "Epoch 703/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3245 - acc: 0.8855 - val_loss: 0.7711 - val_acc: 0.8293\n",
      "Epoch 704/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2997 - acc: 0.8753 - val_loss: 0.7754 - val_acc: 0.8354\n",
      "Epoch 705/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2733 - acc: 0.8978 - val_loss: 0.7560 - val_acc: 0.8354\n",
      "Epoch 706/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.2455 - acc: 0.8855 - val_loss: 0.7676 - val_acc: 0.8354\n",
      "Epoch 707/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2581 - acc: 0.8937 - val_loss: 0.7592 - val_acc: 0.8537\n",
      "Epoch 708/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.2620 - acc: 0.9039 - val_loss: 0.7443 - val_acc: 0.8415\n",
      "Epoch 709/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2651 - acc: 0.8978 - val_loss: 0.7616 - val_acc: 0.8232\n",
      "Epoch 710/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 155us/step - loss: 0.3159 - acc: 0.8875 - val_loss: 0.7739 - val_acc: 0.8293\n",
      "Epoch 711/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.3107 - acc: 0.8834 - val_loss: 0.7568 - val_acc: 0.7927\n",
      "Epoch 712/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2466 - acc: 0.8834 - val_loss: 0.7165 - val_acc: 0.8537\n",
      "Epoch 713/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2496 - acc: 0.9121 - val_loss: 0.7582 - val_acc: 0.8354\n",
      "Epoch 714/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2701 - acc: 0.9039 - val_loss: 0.7831 - val_acc: 0.8293\n",
      "Epoch 715/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3159 - acc: 0.8691 - val_loss: 0.8806 - val_acc: 0.7195\n",
      "Epoch 716/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3346 - acc: 0.8712 - val_loss: 0.8106 - val_acc: 0.7500\n",
      "Epoch 717/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2910 - acc: 0.8916 - val_loss: 0.7698 - val_acc: 0.7927\n",
      "Epoch 718/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.3163 - acc: 0.8589 - val_loss: 0.7947 - val_acc: 0.7439\n",
      "Epoch 719/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3184 - acc: 0.8712 - val_loss: 0.7780 - val_acc: 0.7622\n",
      "Epoch 720/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3579 - acc: 0.8834 - val_loss: 0.7195 - val_acc: 0.7683\n",
      "Epoch 721/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.3094 - acc: 0.8793 - val_loss: 0.7492 - val_acc: 0.7866\n",
      "Epoch 722/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.3326 - acc: 0.8896 - val_loss: 0.7736 - val_acc: 0.7927\n",
      "Epoch 723/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.3836 - acc: 0.8793 - val_loss: 0.7624 - val_acc: 0.7988\n",
      "Epoch 724/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3240 - acc: 0.8855 - val_loss: 0.7473 - val_acc: 0.7622\n",
      "Epoch 725/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3112 - acc: 0.8773 - val_loss: 0.7735 - val_acc: 0.7744\n",
      "Epoch 726/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3065 - acc: 0.8855 - val_loss: 0.7512 - val_acc: 0.8049\n",
      "Epoch 727/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3083 - acc: 0.8732 - val_loss: 0.7480 - val_acc: 0.8049\n",
      "Epoch 728/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2446 - acc: 0.8937 - val_loss: 0.7675 - val_acc: 0.7988\n",
      "Epoch 729/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2708 - acc: 0.8855 - val_loss: 0.7955 - val_acc: 0.7988\n",
      "Epoch 730/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2901 - acc: 0.8875 - val_loss: 0.7716 - val_acc: 0.7805\n",
      "Epoch 731/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3228 - acc: 0.8814 - val_loss: 0.7537 - val_acc: 0.7988\n",
      "Epoch 732/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.2964 - acc: 0.8875 - val_loss: 0.7500 - val_acc: 0.8110\n",
      "Epoch 733/1000\n",
      "489/489 [==============================] - 0s 186us/step - loss: 0.2886 - acc: 0.8650 - val_loss: 0.7339 - val_acc: 0.8171\n",
      "Epoch 734/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3174 - acc: 0.8793 - val_loss: 0.7304 - val_acc: 0.8110\n",
      "Epoch 735/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2683 - acc: 0.8998 - val_loss: 0.7457 - val_acc: 0.8293\n",
      "Epoch 736/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.3119 - acc: 0.9100 - val_loss: 0.7808 - val_acc: 0.8232\n",
      "Epoch 737/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.2820 - acc: 0.8978 - val_loss: 0.7902 - val_acc: 0.7927\n",
      "Epoch 738/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2528 - acc: 0.8896 - val_loss: 0.7909 - val_acc: 0.8049\n",
      "Epoch 739/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2947 - acc: 0.9018 - val_loss: 0.7631 - val_acc: 0.8476\n",
      "Epoch 740/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.1862 - acc: 0.9243 - val_loss: 0.7839 - val_acc: 0.8415\n",
      "Epoch 741/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.3172 - acc: 0.8998 - val_loss: 0.7948 - val_acc: 0.8049\n",
      "Epoch 742/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2935 - acc: 0.8896 - val_loss: 0.7774 - val_acc: 0.8232\n",
      "Epoch 743/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2678 - acc: 0.9018 - val_loss: 0.7681 - val_acc: 0.8476\n",
      "Epoch 744/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2815 - acc: 0.8753 - val_loss: 0.7685 - val_acc: 0.8293\n",
      "Epoch 745/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2975 - acc: 0.9080 - val_loss: 0.7762 - val_acc: 0.8293\n",
      "Epoch 746/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3480 - acc: 0.8896 - val_loss: 0.7599 - val_acc: 0.8232\n",
      "Epoch 747/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2803 - acc: 0.9018 - val_loss: 0.7722 - val_acc: 0.8476\n",
      "Epoch 748/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2431 - acc: 0.8998 - val_loss: 0.8126 - val_acc: 0.8354\n",
      "Epoch 749/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3050 - acc: 0.8793 - val_loss: 0.8014 - val_acc: 0.8476\n",
      "Epoch 750/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.3342 - acc: 0.8732 - val_loss: 0.7848 - val_acc: 0.8659\n",
      "Epoch 751/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2766 - acc: 0.8712 - val_loss: 0.8067 - val_acc: 0.7927\n",
      "Epoch 752/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2723 - acc: 0.8875 - val_loss: 0.8027 - val_acc: 0.8049\n",
      "Epoch 753/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2666 - acc: 0.8834 - val_loss: 0.7864 - val_acc: 0.8110\n",
      "Epoch 754/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2409 - acc: 0.9141 - val_loss: 0.8189 - val_acc: 0.8232\n",
      "Epoch 755/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2353 - acc: 0.8957 - val_loss: 0.7914 - val_acc: 0.8476\n",
      "Epoch 756/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.2837 - acc: 0.8957 - val_loss: 0.7857 - val_acc: 0.8354\n",
      "Epoch 757/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2434 - acc: 0.9018 - val_loss: 0.7915 - val_acc: 0.8537\n",
      "Epoch 758/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2532 - acc: 0.8978 - val_loss: 0.8098 - val_acc: 0.8354\n",
      "Epoch 759/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.2453 - acc: 0.9059 - val_loss: 0.8235 - val_acc: 0.8354\n",
      "Epoch 760/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2423 - acc: 0.8978 - val_loss: 0.7999 - val_acc: 0.8354\n",
      "Epoch 761/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.3160 - acc: 0.8957 - val_loss: 0.7773 - val_acc: 0.8293\n",
      "Epoch 762/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2818 - acc: 0.8834 - val_loss: 0.7654 - val_acc: 0.8415\n",
      "Epoch 763/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2935 - acc: 0.8916 - val_loss: 0.7272 - val_acc: 0.8537\n",
      "Epoch 764/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.2535 - acc: 0.8978 - val_loss: 0.7835 - val_acc: 0.8293\n",
      "Epoch 765/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.3354 - acc: 0.8814 - val_loss: 0.7576 - val_acc: 0.8232\n",
      "Epoch 766/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2388 - acc: 0.9162 - val_loss: 0.7518 - val_acc: 0.8415\n",
      "Epoch 767/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2812 - acc: 0.8957 - val_loss: 0.7685 - val_acc: 0.8415\n",
      "Epoch 768/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2940 - acc: 0.8998 - val_loss: 0.7776 - val_acc: 0.8354\n",
      "Epoch 769/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 149us/step - loss: 0.2503 - acc: 0.9018 - val_loss: 0.7303 - val_acc: 0.8476\n",
      "Epoch 770/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2431 - acc: 0.9059 - val_loss: 0.7404 - val_acc: 0.8476\n",
      "Epoch 771/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2705 - acc: 0.8916 - val_loss: 0.7511 - val_acc: 0.8110\n",
      "Epoch 772/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2269 - acc: 0.9162 - val_loss: 0.7883 - val_acc: 0.8293\n",
      "Epoch 773/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.1876 - acc: 0.9141 - val_loss: 0.7831 - val_acc: 0.8537\n",
      "Epoch 774/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2783 - acc: 0.9059 - val_loss: 0.7988 - val_acc: 0.8415\n",
      "Epoch 775/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2594 - acc: 0.8957 - val_loss: 0.8159 - val_acc: 0.8171\n",
      "Epoch 776/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2734 - acc: 0.8998 - val_loss: 0.7988 - val_acc: 0.8415\n",
      "Epoch 777/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2068 - acc: 0.9141 - val_loss: 0.7860 - val_acc: 0.8354\n",
      "Epoch 778/1000\n",
      "489/489 [==============================] - 0s 188us/step - loss: 0.2143 - acc: 0.9100 - val_loss: 0.7909 - val_acc: 0.8415\n",
      "Epoch 779/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2270 - acc: 0.9059 - val_loss: 0.8367 - val_acc: 0.8415\n",
      "Epoch 780/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2529 - acc: 0.8957 - val_loss: 0.8482 - val_acc: 0.8110\n",
      "Epoch 781/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2732 - acc: 0.8998 - val_loss: 0.7645 - val_acc: 0.8293\n",
      "Epoch 782/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2982 - acc: 0.8773 - val_loss: 0.7704 - val_acc: 0.8293\n",
      "Epoch 783/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2530 - acc: 0.9039 - val_loss: 0.7766 - val_acc: 0.8354\n",
      "Epoch 784/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.2613 - acc: 0.8875 - val_loss: 0.7399 - val_acc: 0.8476\n",
      "Epoch 785/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2350 - acc: 0.9018 - val_loss: 0.7512 - val_acc: 0.8354\n",
      "Epoch 786/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2096 - acc: 0.9121 - val_loss: 0.7714 - val_acc: 0.8110\n",
      "Epoch 787/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2383 - acc: 0.9080 - val_loss: 0.7635 - val_acc: 0.8354\n",
      "Epoch 788/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2915 - acc: 0.8957 - val_loss: 0.7873 - val_acc: 0.8110\n",
      "Epoch 789/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2511 - acc: 0.9141 - val_loss: 0.7690 - val_acc: 0.8232\n",
      "Epoch 790/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2229 - acc: 0.9059 - val_loss: 0.7638 - val_acc: 0.8171\n",
      "Epoch 791/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2265 - acc: 0.9039 - val_loss: 0.8497 - val_acc: 0.8171\n",
      "Epoch 792/1000\n",
      "489/489 [==============================] - 0s 194us/step - loss: 0.2862 - acc: 0.9018 - val_loss: 0.8370 - val_acc: 0.8415\n",
      "Epoch 793/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2479 - acc: 0.8998 - val_loss: 0.7755 - val_acc: 0.8537\n",
      "Epoch 794/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2315 - acc: 0.9059 - val_loss: 0.7696 - val_acc: 0.8110\n",
      "Epoch 795/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2546 - acc: 0.9039 - val_loss: 0.7823 - val_acc: 0.8476\n",
      "Epoch 796/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2416 - acc: 0.9243 - val_loss: 0.7799 - val_acc: 0.8232\n",
      "Epoch 797/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2974 - acc: 0.8998 - val_loss: 0.7764 - val_acc: 0.8354\n",
      "Epoch 798/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2245 - acc: 0.8957 - val_loss: 0.8268 - val_acc: 0.8476\n",
      "Epoch 799/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2726 - acc: 0.8998 - val_loss: 0.7923 - val_acc: 0.8293\n",
      "Epoch 800/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2417 - acc: 0.8875 - val_loss: 0.7691 - val_acc: 0.8171\n",
      "Epoch 801/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2706 - acc: 0.8957 - val_loss: 0.7538 - val_acc: 0.8354\n",
      "Epoch 802/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2355 - acc: 0.9059 - val_loss: 0.7491 - val_acc: 0.8415\n",
      "Epoch 803/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3135 - acc: 0.8875 - val_loss: 0.7709 - val_acc: 0.8354\n",
      "Epoch 804/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2313 - acc: 0.8937 - val_loss: 0.7762 - val_acc: 0.8476\n",
      "Epoch 805/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2494 - acc: 0.8896 - val_loss: 0.8056 - val_acc: 0.8476\n",
      "Epoch 806/1000\n",
      "489/489 [==============================] - 0s 186us/step - loss: 0.2676 - acc: 0.9223 - val_loss: 0.7941 - val_acc: 0.8354\n",
      "Epoch 807/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2912 - acc: 0.8957 - val_loss: 0.7820 - val_acc: 0.7988\n",
      "Epoch 808/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2661 - acc: 0.8875 - val_loss: 0.7876 - val_acc: 0.8232\n",
      "Epoch 809/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2532 - acc: 0.9039 - val_loss: 0.7740 - val_acc: 0.8171\n",
      "Epoch 810/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2814 - acc: 0.8998 - val_loss: 0.7957 - val_acc: 0.8110\n",
      "Epoch 811/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.2532 - acc: 0.8855 - val_loss: 0.8619 - val_acc: 0.7866\n",
      "Epoch 812/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2536 - acc: 0.9162 - val_loss: 0.8571 - val_acc: 0.8110\n",
      "Epoch 813/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2363 - acc: 0.9080 - val_loss: 0.7999 - val_acc: 0.8293\n",
      "Epoch 814/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2004 - acc: 0.9202 - val_loss: 0.8163 - val_acc: 0.8293\n",
      "Epoch 815/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2691 - acc: 0.8937 - val_loss: 0.8209 - val_acc: 0.8537\n",
      "Epoch 816/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.2996 - acc: 0.9039 - val_loss: 0.8065 - val_acc: 0.8171\n",
      "Epoch 817/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2720 - acc: 0.8978 - val_loss: 0.7900 - val_acc: 0.8232\n",
      "Epoch 818/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2721 - acc: 0.9202 - val_loss: 0.8504 - val_acc: 0.8232\n",
      "Epoch 819/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2667 - acc: 0.8978 - val_loss: 0.9561 - val_acc: 0.7744\n",
      "Epoch 820/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.3307 - acc: 0.8834 - val_loss: 0.9118 - val_acc: 0.7683\n",
      "Epoch 821/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3236 - acc: 0.8793 - val_loss: 0.8238 - val_acc: 0.7500\n",
      "Epoch 822/1000\n",
      "489/489 [==============================] - 0s 190us/step - loss: 0.2710 - acc: 0.8834 - val_loss: 0.7792 - val_acc: 0.7500\n",
      "Epoch 823/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.3293 - acc: 0.8650 - val_loss: 0.7509 - val_acc: 0.7622\n",
      "Epoch 824/1000\n",
      "489/489 [==============================] - 0s 188us/step - loss: 0.3167 - acc: 0.8589 - val_loss: 0.7779 - val_acc: 0.8110\n",
      "Epoch 825/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.3064 - acc: 0.8773 - val_loss: 0.8096 - val_acc: 0.8232\n",
      "Epoch 826/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.3704 - acc: 0.8691 - val_loss: 0.8026 - val_acc: 0.8293\n",
      "Epoch 827/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2772 - acc: 0.8937 - val_loss: 0.7845 - val_acc: 0.8110\n",
      "Epoch 828/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 145us/step - loss: 0.2765 - acc: 0.8793 - val_loss: 0.8111 - val_acc: 0.7866\n",
      "Epoch 829/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2814 - acc: 0.8793 - val_loss: 0.7799 - val_acc: 0.8354\n",
      "Epoch 830/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3671 - acc: 0.8753 - val_loss: 0.7921 - val_acc: 0.7988\n",
      "Epoch 831/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2916 - acc: 0.8834 - val_loss: 0.8099 - val_acc: 0.8049\n",
      "Epoch 832/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2624 - acc: 0.8855 - val_loss: 0.8228 - val_acc: 0.8232\n",
      "Epoch 833/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.2988 - acc: 0.8793 - val_loss: 0.8516 - val_acc: 0.7988\n",
      "Epoch 834/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2920 - acc: 0.8793 - val_loss: 0.8300 - val_acc: 0.8171\n",
      "Epoch 835/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.3131 - acc: 0.8916 - val_loss: 0.8058 - val_acc: 0.8415\n",
      "Epoch 836/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2553 - acc: 0.8978 - val_loss: 0.8114 - val_acc: 0.8354\n",
      "Epoch 837/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.2759 - acc: 0.8916 - val_loss: 0.8386 - val_acc: 0.8232\n",
      "Epoch 838/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2745 - acc: 0.8773 - val_loss: 0.8165 - val_acc: 0.8171\n",
      "Epoch 839/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2810 - acc: 0.8855 - val_loss: 0.8123 - val_acc: 0.8049\n",
      "Epoch 840/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2422 - acc: 0.8998 - val_loss: 0.8243 - val_acc: 0.8110\n",
      "Epoch 841/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2729 - acc: 0.9018 - val_loss: 0.8265 - val_acc: 0.8415\n",
      "Epoch 842/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2807 - acc: 0.8712 - val_loss: 0.8246 - val_acc: 0.8354\n",
      "Epoch 843/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2716 - acc: 0.8916 - val_loss: 0.8197 - val_acc: 0.8415\n",
      "Epoch 844/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2791 - acc: 0.8978 - val_loss: 0.8351 - val_acc: 0.8110\n",
      "Epoch 845/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2892 - acc: 0.8691 - val_loss: 0.9036 - val_acc: 0.7317\n",
      "Epoch 846/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3404 - acc: 0.8466 - val_loss: 0.8437 - val_acc: 0.7500\n",
      "Epoch 847/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2793 - acc: 0.8793 - val_loss: 0.8064 - val_acc: 0.7866\n",
      "Epoch 848/1000\n",
      "489/489 [==============================] - 0s 186us/step - loss: 0.2946 - acc: 0.8793 - val_loss: 0.8072 - val_acc: 0.7927\n",
      "Epoch 849/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.3382 - acc: 0.8773 - val_loss: 0.8179 - val_acc: 0.7927\n",
      "Epoch 850/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2957 - acc: 0.8589 - val_loss: 0.8362 - val_acc: 0.7744\n",
      "Epoch 851/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2832 - acc: 0.8875 - val_loss: 0.8299 - val_acc: 0.8049\n",
      "Epoch 852/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3042 - acc: 0.8937 - val_loss: 0.8180 - val_acc: 0.8110\n",
      "Epoch 853/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2674 - acc: 0.8916 - val_loss: 0.8148 - val_acc: 0.8354\n",
      "Epoch 854/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2869 - acc: 0.8793 - val_loss: 0.7657 - val_acc: 0.8354\n",
      "Epoch 855/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2789 - acc: 0.8896 - val_loss: 0.7684 - val_acc: 0.8293\n",
      "Epoch 856/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2674 - acc: 0.8937 - val_loss: 0.7976 - val_acc: 0.8171\n",
      "Epoch 857/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2792 - acc: 0.8793 - val_loss: 0.7868 - val_acc: 0.7866\n",
      "Epoch 858/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2639 - acc: 0.8978 - val_loss: 0.7841 - val_acc: 0.8049\n",
      "Epoch 859/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2590 - acc: 0.8814 - val_loss: 0.8095 - val_acc: 0.8110\n",
      "Epoch 860/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2864 - acc: 0.8650 - val_loss: 0.8048 - val_acc: 0.8232\n",
      "Epoch 861/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3017 - acc: 0.8855 - val_loss: 0.8331 - val_acc: 0.7866\n",
      "Epoch 862/1000\n",
      "489/489 [==============================] - 0s 186us/step - loss: 0.3113 - acc: 0.8834 - val_loss: 0.7793 - val_acc: 0.8110\n",
      "Epoch 863/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.2779 - acc: 0.8650 - val_loss: 0.7388 - val_acc: 0.8293\n",
      "Epoch 864/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2721 - acc: 0.8916 - val_loss: 0.7487 - val_acc: 0.8415\n",
      "Epoch 865/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.3280 - acc: 0.8609 - val_loss: 0.7539 - val_acc: 0.8049\n",
      "Epoch 866/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.3122 - acc: 0.8793 - val_loss: 0.7381 - val_acc: 0.7866\n",
      "Epoch 867/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2848 - acc: 0.8773 - val_loss: 0.7747 - val_acc: 0.7866\n",
      "Epoch 868/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2710 - acc: 0.8855 - val_loss: 0.8317 - val_acc: 0.7683\n",
      "Epoch 869/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3034 - acc: 0.8937 - val_loss: 0.7675 - val_acc: 0.7805\n",
      "Epoch 870/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2886 - acc: 0.8773 - val_loss: 0.7466 - val_acc: 0.8293\n",
      "Epoch 871/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2770 - acc: 0.8814 - val_loss: 0.7233 - val_acc: 0.8232\n",
      "Epoch 872/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2497 - acc: 0.8814 - val_loss: 0.7197 - val_acc: 0.8232\n",
      "Epoch 873/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2608 - acc: 0.8957 - val_loss: 0.7290 - val_acc: 0.7927\n",
      "Epoch 874/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2635 - acc: 0.8855 - val_loss: 0.7224 - val_acc: 0.8232\n",
      "Epoch 875/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.2763 - acc: 0.8834 - val_loss: 0.7101 - val_acc: 0.8110\n",
      "Epoch 876/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.3219 - acc: 0.8814 - val_loss: 0.7176 - val_acc: 0.8232\n",
      "Epoch 877/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.2590 - acc: 0.8916 - val_loss: 0.8489 - val_acc: 0.7622\n",
      "Epoch 878/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2726 - acc: 0.8937 - val_loss: 0.7239 - val_acc: 0.8110\n",
      "Epoch 879/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.2877 - acc: 0.8834 - val_loss: 0.7830 - val_acc: 0.8232\n",
      "Epoch 880/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2607 - acc: 0.8916 - val_loss: 0.7778 - val_acc: 0.8171\n",
      "Epoch 881/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2973 - acc: 0.8896 - val_loss: 0.7854 - val_acc: 0.8171\n",
      "Epoch 882/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2701 - acc: 0.8855 - val_loss: 0.7778 - val_acc: 0.7866\n",
      "Epoch 883/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2324 - acc: 0.8937 - val_loss: 0.8046 - val_acc: 0.8110\n",
      "Epoch 884/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2322 - acc: 0.8998 - val_loss: 0.8308 - val_acc: 0.8110\n",
      "Epoch 885/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2641 - acc: 0.8855 - val_loss: 0.8115 - val_acc: 0.7927\n",
      "Epoch 886/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2617 - acc: 0.9059 - val_loss: 0.8110 - val_acc: 0.8293\n",
      "Epoch 887/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 184us/step - loss: 0.2564 - acc: 0.8875 - val_loss: 0.7394 - val_acc: 0.8232\n",
      "Epoch 888/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2571 - acc: 0.8793 - val_loss: 0.7390 - val_acc: 0.8232\n",
      "Epoch 889/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.2531 - acc: 0.8814 - val_loss: 0.7359 - val_acc: 0.8354\n",
      "Epoch 890/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.2951 - acc: 0.8978 - val_loss: 0.7331 - val_acc: 0.8171\n",
      "Epoch 891/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2510 - acc: 0.8896 - val_loss: 0.7484 - val_acc: 0.8110\n",
      "Epoch 892/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2679 - acc: 0.8937 - val_loss: 0.7388 - val_acc: 0.8293\n",
      "Epoch 893/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2516 - acc: 0.8875 - val_loss: 0.7302 - val_acc: 0.8232\n",
      "Epoch 894/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2674 - acc: 0.8855 - val_loss: 0.7202 - val_acc: 0.8171\n",
      "Epoch 895/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3129 - acc: 0.8937 - val_loss: 0.7361 - val_acc: 0.8293\n",
      "Epoch 896/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2404 - acc: 0.8957 - val_loss: 0.7611 - val_acc: 0.8232\n",
      "Epoch 897/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2400 - acc: 0.8957 - val_loss: 0.7625 - val_acc: 0.8354\n",
      "Epoch 898/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2554 - acc: 0.9141 - val_loss: 0.7601 - val_acc: 0.8049\n",
      "Epoch 899/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2787 - acc: 0.8998 - val_loss: 0.7150 - val_acc: 0.8293\n",
      "Epoch 900/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2734 - acc: 0.8998 - val_loss: 0.7192 - val_acc: 0.8354\n",
      "Epoch 901/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2749 - acc: 0.8814 - val_loss: 0.7289 - val_acc: 0.8232\n",
      "Epoch 902/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2829 - acc: 0.8957 - val_loss: 0.7499 - val_acc: 0.8049\n",
      "Epoch 903/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.1992 - acc: 0.9223 - val_loss: 0.7940 - val_acc: 0.8415\n",
      "Epoch 904/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.2854 - acc: 0.8896 - val_loss: 0.8055 - val_acc: 0.7622\n",
      "Epoch 905/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2598 - acc: 0.8916 - val_loss: 0.8291 - val_acc: 0.7988\n",
      "Epoch 906/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2664 - acc: 0.8916 - val_loss: 0.8315 - val_acc: 0.8293\n",
      "Epoch 907/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.3081 - acc: 0.8712 - val_loss: 0.7833 - val_acc: 0.8049\n",
      "Epoch 908/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.3113 - acc: 0.8773 - val_loss: 0.6704 - val_acc: 0.8232\n",
      "Epoch 909/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2699 - acc: 0.8753 - val_loss: 0.6670 - val_acc: 0.8476\n",
      "Epoch 910/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2705 - acc: 0.8978 - val_loss: 0.6813 - val_acc: 0.8476\n",
      "Epoch 911/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2659 - acc: 0.8650 - val_loss: 0.6802 - val_acc: 0.8476\n",
      "Epoch 912/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2621 - acc: 0.8875 - val_loss: 0.6879 - val_acc: 0.8293\n",
      "Epoch 913/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2518 - acc: 0.8957 - val_loss: 0.7034 - val_acc: 0.8293\n",
      "Epoch 914/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2605 - acc: 0.8732 - val_loss: 0.7180 - val_acc: 0.8476\n",
      "Epoch 915/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2671 - acc: 0.8834 - val_loss: 0.7055 - val_acc: 0.8476\n",
      "Epoch 916/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.2702 - acc: 0.8834 - val_loss: 0.7222 - val_acc: 0.8293\n",
      "Epoch 917/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2411 - acc: 0.8957 - val_loss: 0.7731 - val_acc: 0.8110\n",
      "Epoch 918/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.2880 - acc: 0.8855 - val_loss: 0.7724 - val_acc: 0.8049\n",
      "Epoch 919/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2523 - acc: 0.8834 - val_loss: 0.7744 - val_acc: 0.8537\n",
      "Epoch 920/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2619 - acc: 0.8753 - val_loss: 0.7702 - val_acc: 0.8049\n",
      "Epoch 921/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.2694 - acc: 0.8875 - val_loss: 0.7848 - val_acc: 0.8476\n",
      "Epoch 922/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2783 - acc: 0.8855 - val_loss: 0.7727 - val_acc: 0.8354\n",
      "Epoch 923/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2458 - acc: 0.9162 - val_loss: 0.7937 - val_acc: 0.8293\n",
      "Epoch 924/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2535 - acc: 0.8978 - val_loss: 0.7995 - val_acc: 0.8293\n",
      "Epoch 925/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2708 - acc: 0.8896 - val_loss: 0.8324 - val_acc: 0.7683\n",
      "Epoch 926/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2527 - acc: 0.8937 - val_loss: 0.8419 - val_acc: 0.8171\n",
      "Epoch 927/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.2732 - acc: 0.9080 - val_loss: 0.8682 - val_acc: 0.8110\n",
      "Epoch 928/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.2405 - acc: 0.9059 - val_loss: 0.9173 - val_acc: 0.7988\n",
      "Epoch 929/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2418 - acc: 0.9080 - val_loss: 0.9341 - val_acc: 0.7622\n",
      "Epoch 930/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.3009 - acc: 0.8937 - val_loss: 0.8474 - val_acc: 0.8110\n",
      "Epoch 931/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2614 - acc: 0.8978 - val_loss: 0.8244 - val_acc: 0.8232\n",
      "Epoch 932/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.2619 - acc: 0.8998 - val_loss: 0.8170 - val_acc: 0.8171\n",
      "Epoch 933/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.2614 - acc: 0.8834 - val_loss: 0.7950 - val_acc: 0.7805\n",
      "Epoch 934/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2372 - acc: 0.9121 - val_loss: 0.8082 - val_acc: 0.8293\n",
      "Epoch 935/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2559 - acc: 0.9141 - val_loss: 0.8043 - val_acc: 0.8110\n",
      "Epoch 936/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.2815 - acc: 0.8896 - val_loss: 0.8180 - val_acc: 0.7805\n",
      "Epoch 937/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2570 - acc: 0.8957 - val_loss: 0.7549 - val_acc: 0.7866\n",
      "Epoch 938/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2361 - acc: 0.9018 - val_loss: 0.7829 - val_acc: 0.8354\n",
      "Epoch 939/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2415 - acc: 0.8998 - val_loss: 0.7621 - val_acc: 0.8293\n",
      "Epoch 940/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2674 - acc: 0.8957 - val_loss: 0.7965 - val_acc: 0.7927\n",
      "Epoch 941/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.2417 - acc: 0.9039 - val_loss: 0.8203 - val_acc: 0.8232\n",
      "Epoch 942/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2338 - acc: 0.9162 - val_loss: 0.8560 - val_acc: 0.8171\n",
      "Epoch 943/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2886 - acc: 0.8773 - val_loss: 0.8673 - val_acc: 0.8293\n",
      "Epoch 944/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2615 - acc: 0.8937 - val_loss: 0.8162 - val_acc: 0.8354\n",
      "Epoch 945/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2468 - acc: 0.8998 - val_loss: 0.7866 - val_acc: 0.8171\n",
      "Epoch 946/1000\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "489/489 [==============================] - 0s 171us/step - loss: 0.2256 - acc: 0.8916 - val_loss: 0.7886 - val_acc: 0.8476\n",
      "Epoch 947/1000\n",
      "489/489 [==============================] - 0s 163us/step - loss: 0.2398 - acc: 0.8998 - val_loss: 0.8260 - val_acc: 0.8110\n",
      "Epoch 948/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2554 - acc: 0.8793 - val_loss: 0.8218 - val_acc: 0.8171\n",
      "Epoch 949/1000\n",
      "489/489 [==============================] - 0s 131us/step - loss: 0.2460 - acc: 0.9059 - val_loss: 0.7983 - val_acc: 0.8171\n",
      "Epoch 950/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2459 - acc: 0.9100 - val_loss: 0.8139 - val_acc: 0.8293\n",
      "Epoch 951/1000\n",
      "489/489 [==============================] - 0s 133us/step - loss: 0.2510 - acc: 0.9018 - val_loss: 0.8501 - val_acc: 0.8232\n",
      "Epoch 952/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2470 - acc: 0.9080 - val_loss: 0.8821 - val_acc: 0.8232\n",
      "Epoch 953/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2004 - acc: 0.9162 - val_loss: 0.8976 - val_acc: 0.8293\n",
      "Epoch 954/1000\n",
      "489/489 [==============================] - 0s 135us/step - loss: 0.2298 - acc: 0.9039 - val_loss: 0.9081 - val_acc: 0.8171\n",
      "Epoch 955/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2336 - acc: 0.9162 - val_loss: 0.8596 - val_acc: 0.8415\n",
      "Epoch 956/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2142 - acc: 0.9039 - val_loss: 0.8392 - val_acc: 0.8293\n",
      "Epoch 957/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2569 - acc: 0.9162 - val_loss: 0.8904 - val_acc: 0.8171\n",
      "Epoch 958/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2264 - acc: 0.8998 - val_loss: 0.8647 - val_acc: 0.8293\n",
      "Epoch 959/1000\n",
      "489/489 [==============================] - 0s 149us/step - loss: 0.2411 - acc: 0.9080 - val_loss: 0.8524 - val_acc: 0.8171\n",
      "Epoch 960/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2227 - acc: 0.9141 - val_loss: 0.8612 - val_acc: 0.8049\n",
      "Epoch 961/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.1948 - acc: 0.9080 - val_loss: 0.8753 - val_acc: 0.8110\n",
      "Epoch 962/1000\n",
      "489/489 [==============================] - 0s 157us/step - loss: 0.2703 - acc: 0.9059 - val_loss: 0.8655 - val_acc: 0.8171\n",
      "Epoch 963/1000\n",
      "489/489 [==============================] - 0s 137us/step - loss: 0.2357 - acc: 0.8916 - val_loss: 0.7989 - val_acc: 0.8232\n",
      "Epoch 964/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2550 - acc: 0.9080 - val_loss: 0.7939 - val_acc: 0.8232\n",
      "Epoch 965/1000\n",
      "489/489 [==============================] - 0s 145us/step - loss: 0.2548 - acc: 0.8957 - val_loss: 0.8430 - val_acc: 0.8171\n",
      "Epoch 966/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.2409 - acc: 0.9182 - val_loss: 0.8367 - val_acc: 0.8415\n",
      "Epoch 967/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.2079 - acc: 0.9141 - val_loss: 0.8648 - val_acc: 0.8354\n",
      "Epoch 968/1000\n",
      "489/489 [==============================] - 0s 190us/step - loss: 0.1997 - acc: 0.9182 - val_loss: 0.8630 - val_acc: 0.8110\n",
      "Epoch 969/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.2229 - acc: 0.8978 - val_loss: 0.8888 - val_acc: 0.8049\n",
      "Epoch 970/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.2490 - acc: 0.8978 - val_loss: 0.9083 - val_acc: 0.8232\n",
      "Epoch 971/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.2246 - acc: 0.9059 - val_loss: 0.8766 - val_acc: 0.8415\n",
      "Epoch 972/1000\n",
      "489/489 [==============================] - 0s 171us/step - loss: 0.2682 - acc: 0.8957 - val_loss: 0.8673 - val_acc: 0.8354\n",
      "Epoch 973/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.2975 - acc: 0.8875 - val_loss: 0.8505 - val_acc: 0.7927\n",
      "Epoch 974/1000\n",
      "489/489 [==============================] - 0s 184us/step - loss: 0.2772 - acc: 0.8937 - val_loss: 0.8095 - val_acc: 0.8354\n",
      "Epoch 975/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.2466 - acc: 0.9162 - val_loss: 0.7887 - val_acc: 0.8232\n",
      "Epoch 976/1000\n",
      "489/489 [==============================] - 0s 177us/step - loss: 0.2238 - acc: 0.9080 - val_loss: 0.7920 - val_acc: 0.8110\n",
      "Epoch 977/1000\n",
      "489/489 [==============================] - 0s 180us/step - loss: 0.2783 - acc: 0.8732 - val_loss: 0.8283 - val_acc: 0.8171\n",
      "Epoch 978/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.2453 - acc: 0.8937 - val_loss: 0.8427 - val_acc: 0.7927\n",
      "Epoch 979/1000\n",
      "489/489 [==============================] - 0s 182us/step - loss: 0.2444 - acc: 0.8998 - val_loss: 0.8433 - val_acc: 0.8232\n",
      "Epoch 980/1000\n",
      "489/489 [==============================] - 0s 179us/step - loss: 0.2373 - acc: 0.9182 - val_loss: 0.8050 - val_acc: 0.8049\n",
      "Epoch 981/1000\n",
      "489/489 [==============================] - 0s 198us/step - loss: 0.2435 - acc: 0.8998 - val_loss: 0.8626 - val_acc: 0.8354\n",
      "Epoch 982/1000\n",
      "489/489 [==============================] - 0s 173us/step - loss: 0.2767 - acc: 0.9039 - val_loss: 0.8707 - val_acc: 0.8110\n",
      "Epoch 983/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.3075 - acc: 0.8793 - val_loss: 0.8245 - val_acc: 0.8476\n",
      "Epoch 984/1000\n",
      "489/489 [==============================] - 0s 139us/step - loss: 0.2333 - acc: 0.9059 - val_loss: 0.7730 - val_acc: 0.8293\n",
      "Epoch 985/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2731 - acc: 0.9100 - val_loss: 0.8063 - val_acc: 0.8537\n",
      "Epoch 986/1000\n",
      "489/489 [==============================] - 0s 147us/step - loss: 0.2331 - acc: 0.9080 - val_loss: 0.7971 - val_acc: 0.8232\n",
      "Epoch 987/1000\n",
      "489/489 [==============================] - 0s 175us/step - loss: 0.2505 - acc: 0.9039 - val_loss: 0.8236 - val_acc: 0.8293\n",
      "Epoch 988/1000\n",
      "489/489 [==============================] - 0s 169us/step - loss: 0.2466 - acc: 0.8978 - val_loss: 0.8405 - val_acc: 0.8171\n",
      "Epoch 989/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2407 - acc: 0.9039 - val_loss: 0.8266 - val_acc: 0.8415\n",
      "Epoch 990/1000\n",
      "489/489 [==============================] - 0s 155us/step - loss: 0.2360 - acc: 0.8978 - val_loss: 0.8385 - val_acc: 0.8354\n",
      "Epoch 991/1000\n",
      "489/489 [==============================] - 0s 165us/step - loss: 0.2464 - acc: 0.9018 - val_loss: 0.8607 - val_acc: 0.8293\n",
      "Epoch 992/1000\n",
      "489/489 [==============================] - 0s 161us/step - loss: 0.2682 - acc: 0.9059 - val_loss: 0.8476 - val_acc: 0.8171\n",
      "Epoch 993/1000\n",
      "489/489 [==============================] - 0s 153us/step - loss: 0.2501 - acc: 0.9018 - val_loss: 0.8506 - val_acc: 0.8110\n",
      "Epoch 994/1000\n",
      "489/489 [==============================] - 0s 167us/step - loss: 0.2348 - acc: 0.8937 - val_loss: 0.8556 - val_acc: 0.8537\n",
      "Epoch 995/1000\n",
      "489/489 [==============================] - 0s 159us/step - loss: 0.2915 - acc: 0.8875 - val_loss: 0.8416 - val_acc: 0.8232\n",
      "Epoch 996/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2299 - acc: 0.9080 - val_loss: 0.7994 - val_acc: 0.8293\n",
      "Epoch 997/1000\n",
      "489/489 [==============================] - 0s 141us/step - loss: 0.2236 - acc: 0.9182 - val_loss: 0.8377 - val_acc: 0.8110\n",
      "Epoch 998/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2852 - acc: 0.8896 - val_loss: 0.8442 - val_acc: 0.8232\n",
      "Epoch 999/1000\n",
      "489/489 [==============================] - 0s 151us/step - loss: 0.2424 - acc: 0.9018 - val_loss: 0.7707 - val_acc: 0.8293\n",
      "Epoch 1000/1000\n",
      "489/489 [==============================] - 0s 143us/step - loss: 0.2828 - acc: 0.8937 - val_loss: 0.7733 - val_acc: 0.8110\n"
     ]
    }
   ],
   "source": [
    "history = model.fit(x_train,y_train,epochs=1000,validation_data=(x_test,y_test))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 95,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<matplotlib.legend.Legend at 0x22e9a6269e8>"
      ]
     },
     "execution_count": 95,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAD8CAYAAABw1c+bAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXe4FcX5x79zC/2CVDUggoooSlEQQURUNGKJYAeNQWNi\nVMDCzxgsSVSwRWMsGLAGsRHAAsaCDSOIoqh0gyJKEUGaUgRum98fc8ednTMzO1tOvfN5nvPsOXu2\nzO7Ofvfdd955h1BK4XA4HI7aQ1G2C+BwOByOzOKE3+FwOGoZTvgdDoejluGE3+FwOGoZTvgdDoej\nluGE3+FwOGoZTvgdDoejluGE3+FwOGoZTvgdDoejllGS7QKoaNGiBW3Xrl22i+FwOBx5wyeffLKR\nUtrSZtmcFP527dph3rx52S6Gw+Fw5A2EkJW2yzpXj8PhcNQynPA7HA5HLcMJv8PhcNQynPA7HA5H\nLcMJv8PhcNQynPA7HA5HLcMJv8PhcNQynPA7HI7keP99YOHCeNuYN499HGkjJztwORyOPOXoo9k0\nzljeRxwRfxsOI87idzgcucvIkcCQIZnd55YtQIsWwAcfZHa/GcQJv8PhyF3+8Q9g0iRm/X/6KbB0\nKbBzZ3r3+f77wKZNwJgx6d1PFnHC73A4cp+nnwa6dwcOOQS46KL07ou7mAhJ736yiBN+hyPXePZZ\nYMKEbJcit1iwwPv+zjuZ2WdSwn/FFUCXLkBFBfv90kvAY4/5l/nkE+Cvf01mfxY44Xc4bKioAKqq\nMrOvCy4ALr44fduvqgIqK1PnV1amHuPu3cnum5/HXbvCNd6K7p10N/qatl9erv5fPE/l5cD27d45\nHjcOWLQIeO89ttwZZwC//71//R49gFtvzViDthN+h8OGOnWAfv3Svx9uFaaTI48ESktT55eWAscd\n5/2ePh2oVw/47LPk9s3PY/36wJ//bL/eP//pfc+U8MsW/44dQN26wM03++e/9RY7T7wxuG5doKws\n9Rx/9RVbzkR1deRih8EJv8MRxFNPsen776d/X7t2pX8fn3ySOu/JJ9l01ixv3n/+w6Zz50bf14oV\nzLUhws/jbbcBL7zgzZ8xA1iyJPq+kmD+fODtt9l3Wfh//JFNH32UTbdsAR5/HHj1VfZbFQUknjub\nvgkZeqt0cfwOh4lNm4Df/CZz+1O5YNLNl1+mr8G0a1fm9tBZ6WedxY65uBgYMMBum+m0+A87zPsu\nC39xMZtycT79dGD2bDYFmIUvv7H16uV9L7Kwsysr2VtRmnEWv8NhItOdiKK4esaOZSLVuTPQoIFn\ngcp8953a1aB7y+DCx6eVlUCrVizCxpbt24OX2bgROPdc+21mq2MX3y93x8yezabl5Wxapw6wbZt+\nff7gMJEhi98JvyO3+PxzYPPmbJfCQ74RFy1KbttbtwKLF/vnrV3r3/fixWy5uXP9ZfngA0+ARoxg\n08WLWSOoLjrkrbfUjbW20Ss7dwIbNgCXXab+X4y8kTH5rr/7Dpgyxa4MQOaEXz4v/PzLx/LTT2xa\nWmpuD5GFf86c1PqVoTc+J/yO3KJTJ6/Lfi4g35hduiS37UGDmJUu7kN0Nfz0E/u/WTPmMhg9ms3/\n73+Bo44C7rlHvV2dkOtcDbbCzwVP1YFq506gWzf9uqY3mXXr7PbPyZbFHyT8deoAJ5wQvD6nT5/U\nhmJn8TtqLStWJLu9N94A7rgj2rq6G3HzZub7N73aB/HRR2w6bpw/aoXDt83LwC3qLVvYlLsaZLiQ\nl5cDl1wCrFrFfsvC/+CDwPPP2wu/TviA1IfB0KF+N49J+L/7zm7/nGxb/D/8wKKSOKLFb0L1wHz9\ndf9vZ/E7CpY4YhmG6mrmPz7pJOCGG7z5mzfbd/vXCf/tt7Non0ceMe/f5OPm/vYRI4Bhw1L/51Ek\nHC7czZqx6YYN+m0DrKPTE094MeOyqFx5JXD22fYhhCZRks/nxIks4oWzZYt+PzqLnwuqTNIhj7b1\nUawLYrsIP/Yga111vXbu9NcRZ/E7CpL//hdo3JiF7skkfUOPGQO0bOmfN3Mm0Lw5awS1QXcjchE0\nNdiNGcPiubmFLrJyJYsYMiELP7dAucW7caN6Pb4cf1DwY9A9hGytTJMoqURatMzbtgWuuUa9rk74\nGzZUz0/S4n/6aVYf5bYWINXi150nfuy668Hh4bHyumVlwftIGCf8DnteeCF+nPWcOWw6cyab7trF\nEnFVVvrdAdXVwP33+wXl+edZki5b5AbDNWuASy9NXe7jj5k7SIVK7B5+GPj+e/bdJPzPP8+md9zB\n3g6++cb776uv9OtxfvjB/1uMrgH0lipfjsfLRxV+LrCUAsuWAc89py+rSvjlc8fj32V43LwtSQr/\nK6+wqWoMAZ2rR4Zb/Lo3FBNff223j4SxiuMnhAwAcD+AYgCPUUrvlP5vCuAJAPsD2AXgt5TSxTbr\nOvKIs85i0zg3HhdKbt3fdx9w/fWsYUyMl3/hBeDqq9mNcd99bN7ZZ4fbv2zV9+8PLF+eulzPnvrt\nqm5EMarFFJvdvDmb3n03m3bsCPzvf+y7jV89SPh1Qs6Xu/VWNuXHoHrzELdnonNns59eJXq2b3DZ\n7LQlv0Wp/uPoRHnHDjblYZ1xyBWLnxBSDOAhACcD6ARgCCGkk7TYDQDmU0q7APgNmNDbrlv7oJT5\nd8eOzXZJMo/sfuBisnat/8bhonb//f7lbRkxwms85YgWty1B+x0+nPmzZcrLvbcazrJlXmPq8ccH\n73voUP9vWfh37PAablXLcWbNYvPk88GRxaaoCPjXv7ztXH55cP8ClfBfd53/d1KuPJVI79rFHvT/\n/ne4bZkewLbCz+eLobKm6B4Tf/xjtPVCYuPq6QlgOaV0BaW0HMAkAAOlZToBeAcAKKX/A9COELKn\n5bq1j6oqVkmuvDKz+/36a5bT3BRrvGhRcKNhEJs2MTFU+Txli79xYzbdts0v/LIVKPq7X3uNuWdM\nqB6qskUmu0pUDwabB87Ikd735cuZ/17XF4GHZNogl5cLkSjCDz+cup5OzHQuFVnUKQWuvdaujByb\nxvKk3Biq7cyfz8oQNnqLR0rZvEUGWeM84+bQodGT202fHm29kNi4eloDWC38XgPgSGmZBQDOBDCL\nENITwL4A2liuCwAghFwK4FIAaNu2rU3Z8xdegWy6cCfJfvt533UVvUsX1jtz/fro++nTh1m3BxzA\n0gGIyBZ/kyZs+uOPfqGTY9RFt8cpp7Dprl0sIVZUBg70p/ht3z71vNi8eovWbocObLpmjXpZm96b\nOmSLH2DRRXFRHWNYd56N8CflxlC9ffA2k44dw23L5Gaytfg5vN3nnHPYm5iY9yjHSEp57gSwByFk\nPoARAD4DEOrxTil9hFLag1Lao6UciVFoZEv4RSZM8NwoMrwCi4QRgmXL2HT5cmZVn3mm1yOVC9+D\nD7Jpo0ZsOnGi2Ueq8k+LYvPdd+xGHT2aJf+yQXbFqLCxUnfuZOGi553nzYti8YnnuEeP1P95fQkS\nUJv2A7HuTZumLospVFUmCf82wHIGhUkJAQBvvum9dU2e7Dc2xoxh81SIrqcLL2TtL6IhQQgLBuAB\nCbZvLCUlrB0oSmNvhrCx+L8FsI/wu03NvJ+hlG4FcDEAEEIIgK8BrABQP2jdWkmQ8G/a5DUMpgue\n7/2qq+yWj5ou+G9/A158kTXeqjopiSKlE4+tW9U30apVwB57sO88xe9f/hKtnDpsb3bZxSCHYnKC\ncr1z+JuQiMriV1FZyc6ZiTp1vFj0hx4KV07dPpOiaVPz//vv7//9y1/6f19+OUtPAXj1QswFtHEj\nG1NXbhz/9a/9D+zdu1kk0qOPsutpK+TFxexaiZ28dLRrF63tKSY2JufHADoQQtoTQuoAGAzA54gi\nhOxR8x8A/A7AezUPg8B1ayUm4Z80iVXKOKlw00FU4efjlv773+xhJguzrlOMSJMm6v137Ro+FDAs\nUf3Shx+unm9q4OTRIYDahcXXDRLZDz9UPzgAds6uvTb4jSTs9U5qHIFu3VjMv4mgh5IYFy/zwgus\nb8f776c+HOV01eK1atIk9QGjo0RhT6uMHoC1u9m+oSZIoPBTSisBDAcwA8DnACZTSpcQQi4jhPC4\ntoMBLCaELAOL4LnKtG7yhyFRWckssHQPyhwVk/BzIVPFFWcTnTX+3Xee2wbwf1chu2zEm0vXwQdI\nDW3khOl5GpadO5O/KU1lFV0UqiyaOuHnPXltKSoKFk9b63blSmD8eL3wc1fevfem/vfAA15fB4CF\nvV55JXDooeZ9VlSwh+Sdd6ofzI0aMf+6XBeXL/dChkePZm+iJqKGLauE/4IL9MvrOqqlEas4fkrp\nqwBeleaNF75/AOBA23XTzr/+xXyuO3Z4FmcuwW9clS+WV+Rs+P9NFV13Y59+OhtgYmBNsFbYSCVR\nCE2NYStXquf/8IP3Wp8ElHrX5bbbUnOpxMUk/L/6lfe9f3//ICVAaggsZ599wmU0TbJunXoqayDV\nZQTdvZuNuLXXXqn/HXKI/82mZ8/g9glCmBHy178Cf/878ItfpC5TVgYcc0zq/J49vbeqGTPUvceb\nNvWMk6gGhUr4TT3FbVxCCVOYPXe57277di9XORfbO+9MbhDlqJgsfl7Z4kR/yHTtyvyeprIAZreG\nzuLn/sk//hHYd9/wZTNZQiJiyKRMnHFhX3vN/5tn3+zePT2v4CYx4WG0zz+vdldUVbEH3RVXsN93\n3cWmpvosh+a2b5+M8HOfOX8T07UpVFSwuqx6g2nY0C+SQUnOhg9nx15R4b/HZfhbhggh+g5sIuI9\nwHv1qhDDaOVyq+5d1cOAIwq/3HcjTRSm8HPLtagIGDWKfedun+uv9y9jyzvvsG3s3s2yKYqv5S+8\nEM7XbBJ+XvG+/FLdyzQsW7Ywt9H48er/V6zwhoRT+Y4/+oiJh87i57HwusiJTBGU90bHb3/r/714\nMRPnTz+NXyYVn38evEyDBuqHWVUVS2/B4Q3bpge2/ACZMCEZw2fKFPY2xC12nSsOYKInCv+VVwLP\nPJM69m/QA6mqyhvlSh4NSyToAWLCtpF6772977wvCkcU+Xff9Xpr6xCFP07ZQ1CYQy9yq4oQ77tc\nQaqr7a3qZcvYq/fFFzNrgvsOKWXizFMZlJfbXTgbi//229knbl4S0X2ggsc9U6qu9EceyeL/ZcuY\nE8faTjLnStjUvhxVgjCxkTUbNGkCtGmTOn/7di8NA2An/PIwfk2aJNf2dfLJ3neTNS1b/HXrAuef\nz76LIhl0P1ZWsgdZeblZ+ONgK/xin5iRI4Ebb/R+i8fUr1/wtkTh51qSZgrb4heFX7ZYH32UuSdO\nPDG1k5EM958uXZra4UOs8LaRDTbCz3nySftu3I884q+AgLmXrsgpp6S+rvPzuGKF39Wj8+eGRZcY\nLcp2koyCCgqFtMU2A6jMoYeqGzjlBxIPe6yuZha0ClM7kin6JSx8QHXVw/yrr/zCL4q1aCjZCL+N\nxR/njcbmHl60yAu3LivzRkDjmNw6Krhr6uST7ccdjkntEX75SX755ax36Ftvee4gmbVr2bZU25P3\nBdgLP+8Vy1MaUAp8W9O9Qa7IF12kH2lJ5g9/SO3JaevPfe21VHeVeDzi91tv9c5NHJKq5FFyHv3h\nD/r/ogh/797+382b23UQk7niCr0gy5Y6t/ht316feopN5V7T6eaLL/TCL4pk167e908/ZW+r//wn\n0Lo1m8cHIq+u9sQ96WE6beo0Id6bVFVV6rnXXYsnn1TPP/poltcoTIe5mBS+8PPvJlFWiePChazC\niY044vbkfQXtQ0RM4LRqFRP2Nm2Y2yjp0MQw25OPTRQauXG3dWt9u0E+oOu1DNgLP7/B33orNeXw\nn//sZf1UIXdC4oidqeShDGWLnz8gqqrsRPzXv2bTdAQQBCHuS2XxN2jgvw8PO4zlrbn8cmDqVDbv\niCO85fk2VCkr4vQpUPWYlqlXzxN+1UNXZ/GL2WdFiotZQ73KvZcmCk/4V670RzvoXD0iXNDvvNOz\nxnkSsLlzk7P4V61iIWhyeXnnjn//Wy/UFRVsfE4exbB7N3O5BPlr4zxIxA5VqmNLylWTblQRTaa2\nGFvh5zd/SUlquKLsW5f5/HMvX76O997zR6iIwv/FF57AVFczl6XM1Vert8vrbCaFX6w/KovfZGn3\n6sUaSIcP986r6W1K1xFQxZtvet9nzrQL0th/f7/FLxuOYV09WaDwhP/yyz2/u63FTwgT+uuv98Kp\nuBumeXP1GwRHFNagfCWDBqVmPdy922uYvOkmfWPVhAnALbewfgmVlexhceutwW6gMMIvnyPxBlId\nmzyIRKYQh1EU0Q3SrvKXm1xgKuEfMSI11S6/+YuLUztR8f9++1u1VVpaygZMN1FW5g9XFIV///1Z\n+GzPnuxtQyXiurYhXid+/etwSe6iphE//XSgk5CNXefjN9GxI7v/+INw0SL9srYBB40aAcce6/3e\nb7/UCB0Zfr5KS1kywsmTU899NnNwWZL7JQyL2KAm9lCURU30OYpx/jzHCg+ba9FCHSUEsKRc4oNg\n0CB/a78IIeqG1vJy/42gE2o+ctT06azS8dffIOs0TM4fUWTGjWN5RACvQU2Gp7TNNLr4+iOViV9T\nb0QxGkWFyg00dKjfOgT8Vj0h/vzz/L/HH/dCiOMiCn9REbsuc+eyiDOZ0lJ15ybAq2O/+EW4h/ew\nYak59m249loWucJ7yoqhkDYWv4iqI5iMrcX/8cd+6zzoLQ3wd76cPZvd83JjcqYGg49B4Qm/GBol\nXhBZuEQxWLXKi9vmF40/2Rs18tadPdu/ncmT/Rd5/nz1jWSyQHbv9ot9kIXO48B5xynTtr/4gvXq\ntEUUfu4uA9jNEcZvqho4PBPoLC3ZIguK+lD1IO7ePXWe+Lovb9dGRMIS5+1NtZ2iomCLX76WUdxD\n/FwMHMjcmWLkGbf4o46qpkLXiCoyeTJw0EH+ea1asemCBWxsaBU2UURO+LOAWOFNwi/+/uADLySL\nXzT+ZK+o8C8rDwBic5FNnYvKy8MJP4ffsCb3UseO+lGXgNQcIaLwi9ZiaWm4tLs8RjvT6K6F/EBI\nque22MAnk+Zu+PPm6VP+o6hIPbYwR+zgGCT8UaPERPh5IoT1+BXdO2Et/qA8PjaceirLmc8ZMICN\nHcGPrUsXfZI9G2zesvffX21MZIjCEv6SEv/Qa2Il5eKtGsVI5KOPgCee8P4fOZLFseuIK/xySlbb\nDin82HjUkTgOrIxO6ORh+0Th/+AD77vO1aMjnY1bpvOtE9uwFr8tJuG3cUkA/nDhEBxxhCGJZVWV\nemQuThiLX35ziRIsYNoHvza2fQr23jvVtfXee+plx40D/u//Uufz8FDOa6+l9uWJ2oOWUruka8uX\nez3ms0BhCb8smiqL3+amv+QSv9CZOiypkodVVLAK8PXXzN1iusByBkFdMjIZ+aEm3ujbt3v9AgD9\n67GcP0UeipBTv344i7+0lN2Mv/+9/TqTJsUfdu6mm9TzZbFKWvh5vRMF3Fb4Y5DyvHjtNa/tR+T9\n91lwAEcU/pKS1EgzEVkAxXOpa2SXMbm96tZl+589225bqjKVlanHFCguTn1DOftsNkZEEGGNl7Fj\nmas3Tygs4ZeJKvzi8oB67FiOKjZ3xw6WIXS//Zi7Rc4HIyIL6hdf2JVPPI4hQ/z/lZX5Y4J1wi9X\nbp3wl5XpLf5TT02dV1wM9O3rPze8oVjHoEGp6SXk/OeDB6euJ54vndUot4OEFX5dR7OTTmJT3o4i\nKjH3F2eSAQPUXf6POsqf/EsUfoC91cpvS6edxqby25Io/LprKnbEAoLbO0aOTPW3m5C316yZl7hO\npKQk9Vo/95xdnwddW4YuMGDYsNTjzmFqn/DbEidfy7Zt9qkSog5sLj4wxJzmKmyFX04DzOH5UWy2\nAajTTsuDXMjYpK9QuRlsXqtl4Q/jp162TJ+3fdQo9lZ38MH++SNGqLNRmti8WZ3oTJU6OC6y8AOp\nAjllivfWuGmTVzbxGuiOcc4c/5jNSTd0ixb/11/rfV58JCx5XlTWrtXfI3lG7RP+TMTY/vRTcDxw\nXMJ0UtEJv+25aNhQ/+BUbUO1bJCVJW/n6KNZz2DRchZF59lnWRy6jcDGsfg7dNDvo04dv9XLO9Pp\neuWaaNpUfY7SMQSnSvhffNHfCaxePa+Bv1kzr2ziNahfn7nX5HDVBg381y1p4Re3Z3qTVFn8Ud18\nd93F2hfCPtBl/vWvjKVeNlHYwi/65rlQZiIX/0EHpX8/SQi/LTNn6gdYEWOyTfsLesjI/8+axQRU\nfHMSRWfIEJZ7xiZ6Rj5XtsLcv7/5OspvO3yQ+pYt7bZvQzrS9KqE/5e/9Hpim4Y+FB/q9eqxkax0\n4xzzDnVhOonZYGu1qyz+qETpv6Dioov87S1ZorCFX+Sf/2SvrrYVQdcRy5YkcumbCCP8opXy7rv+\n//ig6zKXXAK8GjBw2tixwIUX+ueddhrQuXPqsjYjK6nQ5XjhyKKycCHr6yA2FooW//TpXgew+fPN\naauDom1kAeJuu3wRftU5nz3bnOlUbFPh9UpXxjffZA+TpIcW1F0XeawDcTS1KMyZE33dHKewhN80\n9uisWXYJmDi2qZB1mAamSIIwefC5ZXrDDan5wXW9XY8/PriHq2q821NOSV1OlZpA1VALsNdg0eUh\nWtUq4ZffFDp3Zm9cffowd9Fhh/nP1a9+5bkKunY196oNEn7Z4ucZPw87TL9O8+apD0sT6XBNqix+\nTp8+5ogk0VDgwq8rY5Mm6hxC6UJuIK6ujnf+5IyrBURhCX/QKFDr1tlbADzXeVTiZAjUcaAwrHGQ\nKInulpIStrwq1YHutdnmhiktNb92c0tQFW3z2GNecjqRCRP8D02xHGFjyGfNYj2yTVZz797AGWeo\n/zOdY0pTzxFP4dGihX69jRuBiRP1/8ukw2UoduAKy+DBXhw8F/5sDWVqyrAK+NM3O3wUlvDb+P5s\nXSQ81znA3CPPPReuLDwKhoteUNlMbyucPfe0379ojZpiklU3/003MWs+iNJSliRMlYQMYD7eO+5Q\nd6EvLraLlW7a1EuOFjXT6E03AccdpzcMxD4Q4nVS7e/tt6P1N5g50xusJAIPYji6IqE4cZPFbwM3\nagIaOi+8MFxXjtAENRrHtfgLmMI6K0HiSoh9D0ExKufII/WuCR3cvcCHytM1Qvbty6aqHoYyBxxg\nv39RVE0NoKpzNnq0WpTPPNP/u7SUnVOdu4QQFvLIH1hifvkwlhjvnCNnx7SlUSM2ZrLYTV+kZUsv\ndbP4pqey+I8/Png4SxXHHsty1YSlJqrnSjyIhWBx4rEzwahyC4VBJ/xSj9qnn2YvdonDxTyo/aO6\nWu/KtCXp9okcoXYJP6X6TkoyojUhVzCbKAVu8fObQxdZ8+abrFHQphHvtNPYYOAq5PXF32JKXBn5\nnK1dq1+2fXv/b/E82DwYxRzqYSyxAw9k5Ro50n6dsDzwAIs9F7vz50KyrebN/eMCX3opqjf/GG+b\ncS1+3k9DNCg2bgReeSVeuWzh91JQb/LqatZOJfZiD8u6dV7G3gIi90cMCEOSA0uI25K3u+eeqXlu\nANaYyKNJuFXELWed1V23LvvYuD322AM45BD1f61a+Sv4Qw957ho5QumZZ7w0EvKxqcIzOc2asYZL\nHmIpWox3382E0/QAEN1nYUXHVK4//9kcgmhDSQk7h2LcfC4IP+B38R11FKoaxuwj8o9/MPFWpXO2\nQWXxp6O/gQ4u/EGdLMX001ERB8IpIArL4teJZ5SMfiUlqWLCrUGdb1EcM5NbI7zyicKvemMQLXRd\nVIWpEoodWaZP9z8g5IfO+ecD997LvocR4F279G0dbdowd4qhUXzbNqAaxNtvSFeD1vC69Vbgd7+z\n3k5FBetjV1Xlz0sHwO8aSGgYzB07Emzr33tv6zx+Wg48kPUIjtq/w9LHnza4+8VW+B0pFJbw6yz+\na66Jtq158/wxzfPmsY/OEhQfCFz4+V0qiq8q26co/Lq3A92Nduqpfv83If6HS1gfv8iqVV762BUr\nIseV//ADazb5M0Z7ZQzB7NnsheE//4m0ex+9ezPtuPxy1uTju5y33up1SErI4m/UiA1CFRVf+qaD\nD44v/HHhBUi6Y5YtUYR/+XI2fKMDQG0R/iihmSUlrNFPHDB7r72YCOoEQRRFk8WvevUU31bEgSpE\nZB87Z9w4f8OrLPym6Icg4d9nH+Dll9n3oUMjd7/nmaknQXAFhXA18CzRuvExwsDTBvGoSl+zT0mJ\nl5QtAeHnm3j99ejb8NkJbdpk35Dl2S2zNbYsD78991zzcqIxtP/+LGGiA0BtEX7bTInHHRe8LcDO\n4ucNctxKD0rTKz40zj3XG/ydU1amjkiilIlzly5etkg5z3rYcE6Zvfdm+znxxNg9SSkES799e+t8\n9KbOplHhgVspfe34NYvblwNe+p44+AJLCMm+xf/HP2a3/aNjR7Z/U4fM9evDRcHVMmqH8Kt6jooD\ntnDEHpUmsdSZXKr9DxrEEjzxMXw5s2f7UyKIgsq/616l33yTBUjLHaDEQeFNUUlBZTYhNtCGIK5g\nxw1EUcGfo1u2SH9068auV5iOVhpSth0BsSpSaj9WT60myUCPAqTwhb9p01TVOfBA/2siz2cviqWh\n4iwrb48v0CH1j/r18S76YSsEy7y0FOt+cx0+XtkK/8Ux3n99+vhTIqg6XOnS5p5wAmtI5rHnHFEd\nxfYA00NMPE4bdRYeRqtWRRtvvRLFGDuWZZM2DW7GWbbMS8OSpPDzoQs/+wxYtEj4gxDg6qsTyblj\nk7lj1y52LlRD/QJe7jQgWeHfuJGN28KHmxaZPBm4+WZg9Wr2e/58dq2iuKxWr2bneMkS73ovW6Ye\nemLlSpZuScWHH3p58AJJWPirq5mdluSLzuuvs3by7dtZXERGoZQGfgAMALAMwHIAoxT/NwHwMoAF\nAJYAuFj47xsAiwDMBzDPZn/du3enkVixgjsOvE9ZGftPnNepk3/eueey6aRJ3rx167S74YvI+9qw\ngX09FS/CXtGFAAAgAElEQVR787dvp3vu6f08Ca+pN/rii95C1dVs3t//7s1r3Dj4+I8/ni375pvs\n90EHsd9z5ujXmTGDLdOhA6U7dgTvQzgBP58HC776ii3bHl/Rm3Cr79RZ7o4ClN54o93+bLcXphxR\nmDOHbbu4WL/Mb3/rqy4+1q71l7Giwj8vDp06qbezerU3/+ST2TyxDO++a7d903nWld90XACl7dtb\n7nTrVrtCWvLgg2yzzzyTzPbefZdtb9QoSs86i31ftSreNm31lVIaHMdPCCkG8BCAEwGsAfAxIWQ6\npXSpsNgwAEsppb8ihLQEsIwQ8gyllPewOI5SahjGKiFUT3ne2UREtoDvuYdVF7FHpo3F0KsXsO++\nP7uNtm5ls5dC6DBVVOQbk2JRk77qbYnumKBxgXU8/DCLaee9gblz2GTx8xBRSu3D+x58kC17Sbji\nAQD22gvLm5zGzIgI5FsPfF79TJai+LYhV1e5jSBJi3/pUvV8cZ9LlqT+/913wdtOVxPA119bLpiw\nxc9HRI3TF0yEa8Ly5V6fzJTQ4jRicxv1BLCcUrqiRsgnAZD7nlMAZYQQAqARgM0AFIqbZlQXWyWe\nshDusw97t5UTmwUxZQobK1baVSmEfUpKRRppuoDz/fUVHgyqh5aJAw5gcfbcHcOHgjMlDeONzmFG\nAhs+3DycpIn6DUAON2SvDCDfhJ/XCVMkjijkQeOGVFen38cfFDVkUy2z3g6R4z5+sTkuG3nkbG6j\n1gBWC7/X1MwTGQvgYABrwdw6V1FKefWhAN4ihHxCCLlUtxNCyKWEkHmEkHkbog5HaGvx21QKm2W4\nCn35JbBq1c8RnHVQnrpMENziF2uBWPYotWPsWJZTXBcGCnjCn6Fu6XEtwXwTfhuRNF1mWUCTtPh1\nVSpo+zb7Tzo5behjzkPhz2SgVFK30UlgPvxfAOgGYCwhhPcrP5pS2g3AyQCGEUKOUW2AUvoIpbQH\npbRHy6iNarIqNGzI3BIyNpVCsPhXrwZuuYVdGDG1+zsLmSU9+dMDcN61+/ycimYxOuN7tFSWSStc\nRUWYhPPw1pbu3rygOOUg6tcPzinOh8mTo44SRhQZ0zNs/Hg2CuB557EB1OS+d6rzt3ChufiPPsoa\nBpOiogL405/Y8KvPPGNeVhb+HTvYYE6iO0VcRr755XQ0FRXshcuWnTtZ9KXKjaAK9ho71j888qpV\nqemhdA+z6mrmabz1VjbVMWaM//fq1awhWTz2wYPZfi+5hAUQhH35TUL4169nXVdOPJF5gwH99aaU\nJam1HX+JD90rCn9G+2cENQIA6A1ghvD7egDXS8u8AqCv8PsdAD0V27oZwLVB+4zcuLt+vb61Tpx/\nxhn+earldu/+edaRR7JZixZRunmzvqFK/AzCCz831Irz27bVlH3mTHXDFt9hkybRzkkaidK4264d\npeefb3eZVJ/bbgtfDvn/uI27U6farzdtmn+50aPZ9zvu8Jbp2NFb5ocf/OvPn+/f12efhSvz3/7G\nlrv11tT/Gjb0tsPjCVTnZY89/L/Hj1fv69NPg6+f6pz36cO+y8fKP0OGsEZvq2Nu3jzcxTQwZYp9\nPfnuO/bffvvZbZtva/BgSg85xNOXOCBE466Nxf8xgA6EkPaEkDoABgOQE5KvAtAfAAghewLoCGAF\nIaQhIaSsZn5DAL8EoEkvmQDU4l3poYeAxx8PXk6w+HkK/8pKe8ujHDWhoZJ5m2+uiqRIypoxnT+b\nyw/EH/s7TKYCub5wl4XO4pfPk2zxb95sv2/A6zGtarISLX6Va4bnhpPLoLsHbIY/VsEzL+jqyM6d\nISz+jz8Gnn02WkEkwrireNl/+incPkR5sK2/SRDYgkkprSSEDAcwA0AxgCcopUsIIZfV/D8ewGgA\nEwghiwAQAH+ilG4khOwH4EXW5osSAM9SSmN0Xg/AJtf+FVfYbUtQGP61qsre10g0WdO1bg7dVecN\nzlEbU3ME8fDiNGaZhL+y0q5jcePGLIY9Ko1DJMeUBYt7IERRMbl6ZPEJK/zcxaPK7ycLv3zueFnl\nY9CJcNRO3fyYdbcApSGEv317c5tWCMK4l3jZw9ZtQrw6nVPCDwCU0lcBvCrNGy98XwtmzcvrrQBq\nRo/IBA0asJqecCpVfgMkIfyhLf66ddkxRTWncgQbi9+m4ueC8Id5Y5DFg1ve4vx0Wvwm4RffAsrL\nU3MA8novl0F3D0RpdBbXMQltaB9/AkRpoI5i1GTDx194joeGDVkL1dSpwcs+/jjw1FOBi3GxUYXS\n8fQ4KetAfRUJYaMR8kadESOCi4mGDWP5iP7xDzZUAOdvf2N5yJ57jkWAxq1w4vpTp7J8WPJ54st8\n842+gezUU4P3JZ8Gnj8OYL0fW7dmx3rLLaxnpCpNf5BdMGMGyzKty7Njc77++leWkVOO2OGDlYnn\nR/xOKdv/vvuy/cviw8dz5zz+ODune+zh9YT94gvW6XjlSm/Uy3r1WPqoli293taiy6qiIlVcdVHA\npsbdsJSUeOURAyds9wmw8/yXv3j3lLydqipWz6dMsSvTli3s2N97z255wG+0LFrEhq7g/R1692aD\nr7Vrlzry67PPej2Vc6pxNxufyI27JkpL7Rp9FC04vXuzWbNne42UQZ+BePHn7YjzDzxQ01i0ZEno\nRkZbdI2b/JTs2hVvu+Xl3ryyMjbvxx/9yy5ZYm7gE7dn+tx7r3+77dp5/3Xp4l/20EPV++nb11wO\n3uF5yRL1cc+aFdzYx/97/HH1vq680lu2RQtv/rp1lHbuTH9u7HzlleBzcvvtbDpqFNveddf55wOU\nPvUUa5QFKP3979lyAwd6/69Z429ABSh9uaYD+oAB/vmqBvaga2zzeeMN9fxBgyj95hv9+ZaXl3vA\nbtzI5jdtqi63zMsvm8tZWZm6Di9f69bs/AKUPvxwavkWL1aXGaD0o4/syqcDCTfuFgZr1tjHWknw\nV94wnWdMFr8S0/CIaSIpC0O0xrhbQNXxKAlMLz6yN0zniw8qC7eydS6dMC4NnaWqc/VQ6k83z8+n\nPM7M6NHedzkOnJ8HuQGZ9yzn50V29chlLStjb28qC1pF3P4FsjXMCRNUAaS6+4Kup0xQ/VCVUzx2\nlTuPYxpCIJPurNoj/K1asVocgSiNu6GFPwvwY+GCERXRHSEPQ8CJuw+OSfhlH3VU4efHoOu8HeYh\npvMTi/PFOlVd7Rd+vpwcSaTq7cvPMT8PokCJws9jIMRtqFw9xcVMLGWxStLVI6ITRZXLy4RcPv7g\nsm18DqqrJuEnxN+AL2/LJPyZ7O1ce4Q/gB072I2huub8Qm7dav9Ujiv8u3frx5Km1MssKVNdzRr0\nKirYDWNzM5qWqajwV3RKUzsDqaJS5G2a9uEbCCWAMMKvC63TlYV3GOfnnQvOjh3+dXTr8+MQLW1d\n/hWdxb91qxfItX2715hrEn4Zfh7EDvBbtnjlq6hINWLKy1MFrbiYiaXcEJ4u4RdzWons2qXeZ3m5\nul2gqorVw9Wr2TRI+MU6/dNPwRlVxfNUXc3qh3jNRYtfvk7r1+sfLE74M8yuXazBr0kT4L7r1wOv\nvOL7n4vNwIG+1DxGdFE9tsE5ZWX6MaLvv5+lF3rttdT/brqJrdu9OxOQYcOC92W6YXv08Jf5gQfY\n9sVTpLopdY27Kho3Bt5/P7icgFn45WheuXHu44/NZWnViokEty47dWJD8DZq5O+JqrpBJ01ix7Fo\nkT/l06hR6n2J50zc3sEHe6N9PvMMcNll7LvspjCdT36OJkzw5l13HRsCAgBuu40NPSFb/HxgKw63\n+OXU2TrrO65wXX21er5O+Dt2VI9GWlXF0ly3bct6bXOh1rl67riD1Z0NG9jb1kUXmcspGhQjR7L6\nwdNiEeIJf1VVarmHDGH3rwon/BlGtGj+/U4r4JRTfP+LYiM9E7ToLP7WcpYjDRUVXucbGR4FoMqS\nyB9MPOOjOP67DpOIyLnRX3qJTefM8eapbsqwrh7bIRVNwt9Qk/+Ow1MRmI530ya/sH32GZuKY7Ko\n1n/+eTa1HdZVJ/yAl3N+xgxvnuh2+stfzK4eXXSMmIrhuedSw0j5g5HDLX4Z3RtauqJSVAIKsGgm\n3fL87eGzz4KF/+mn2ZQPmhcEd5mJ64rwc6Zrm9AZj87Hn2HEC6mqvGLaD1tfo074k7i4pqROpj5s\nuhszzA3Lbx7R6rER/qB9BI2bzTEJv65xUF7XVJYfflC72GQ/vAwvv+0bnUn4OaLYi3Vw//39ZZDr\nQ9B5UO1XVZe4xS+jy+dnOq9R27ZatNALv47KSu9ab9/unQ+dq4eX23Yf4qhqTZr4/xMtfp3w6x7M\nzuLPMGJFVp18UWxsK4fO1ZPkU91G+MVldBUrjPDzm0cU6riuHnl7UQka35aLj+kG27JF/XAXy69a\nn/uIba+vjQEhir34vaTEfAy24/zaCD+/3uLwwzrhN5Upap/K+vX9wm/zAKmq8s7vtm3e+dAJPy+3\n7QNTbAOQAwhE4Vc1mJv244Q/Jps2sSyP8qvbV1+ph3sLEn7ucwXYkHE2zAMbCJpn7OSoKkJlZbhI\nU5PlarL4dVFJuoZikbffZhWWW4BBwh/W1WMr/JQyd4pqyMbpcgYpCT58o+khtG6d+v/16703AdUA\nJlz4bXvW2jwgdBa/TvjDWvzvvut9nz8/9f/iYm8/rVp581XCX1Hhd03JRBX+evVYGfgQkFz4167V\nryM+KGSL//XXU3sL83vPoi8nAOCDD9j0669Ty7Fypdep8M03w1n8U6faDXKTBAUp/Mcey4ajPfhg\n//wDDmANQjKiz1K+oXbv1vvaTSzDQViB9jj+eP981U05ZgzQQTGErw6Tq8fk566sBObNS53f1SKp\nxgknsPS+PLpElWRMFIR0uXqqq9l1jRKZy1M3m8pi6t153XVsqmqw5efDtq7YWHdiOWXh54PFNW9u\n7+M3calipIziYq++ipnSVZFKt9/u71sg83//F75MABP+6mrPgOKJ49q1068julh++sk7htmz2TDX\nd93lLXvnnd73cePsysTbt/bbTz0GML/H5s5NTWkN6K/Pk09mrjtPQQo/P9k2A10D+u7zQDzXzI9H\nnZI6T2Et2Qw4LhJm4AbZ1RMmdFLmyy/VPn5+zlTzOEkJfxL9AUxlMY1HoxqUXEZsL+Ko8uvpQnVF\nxHMiC//xxwPHHAMcemiq+yMpd6K4T9GXrXK38KEJdVx1FXDtteHLwC1+XpYDDmBTk6tMtPhVfnae\nIgKwe9uVx3KwGZyPozIETA9mW82KS0EKf1hMMdpxhGbHXWNT5okNQ5wwFQmIPmJPVZW9G0BFnTpm\n4RfFLOx5tE1nm27hN/nHbR5Oqgcrt1JFbK6DeE5k4QfYtVA9QJIUfn6+g3q9Bg3XXFwcHHWlggs/\nF3qbNyVR+CsqUh8S4sPdJqFv8+b+32HqYBgffyZxwg9zw10coVE97VVP9LCDBcURftuGPxWi8Isi\nyM+ZeIOFdfXYDjSdxIM508Iv+sdt9qNCJfylpf5zzs9FUsMeivsUAxxUFr9J1EtK2DrZEP7ycm9d\nvn/xrcwmxbbcKBwmGEL1YI7iikuakLZm/vHSS6zyDBjgzXv7baB/f/Z9/nw2NgtHbmS19fupUKX+\nVd3wYky8TGVl6huBmEIiDGPGpFovYdBZ/IsXM//piSd688RIiZEjg+PbbYdGFIU+iujffbf5xjW9\neSxb5sXriwwa5B2frcX/6aesbvIhj4MQhVe0+OfN83zKvOHbtpEyCFH4xfMSRfiDltFRrx5zx3z9\nNfs9d25wXRLdO1u2sLYpwDueDz4AunVjwzvaWPyqznOme1bk3nvV5cs2BSn8YjQC740oisQJJ3i/\nDzssdX3Rp/inP0Uvh8rfq0IVJcKpqEgVflP+bpMYPvCAXXl01K3rlUV8XeU9HcXIEF62RYviPTxl\nxOMrLw8fH37ddcxfvf/+7KaXo1mCLPGzz06dN22a91315qJ72Mo9ZU2IdYBboH37+vcNsMCGMOyz\njxcxI1NcDPzzn+zB/dBDrDFThyyOjRuzz5o1XnkHD2Yd4Sor/Z3JTKxaxaazZnnzgjoliha/Lkpm\nwQL2ue224DLIFj+lQJ8+wesB6sbdXMC5ehREeRVT3dxJvNKZhDyj+bvhv7lVbxsqH79NI2YYdG45\nlTtFR1UVE01Vmog4rjBAbfHLnXyioHL1XHmlfxlKU99YmjUzb5dbw7p9HnYYi6gRB7VSPWzluvjq\nqyzFCeB1amvWjL3ZhRkgS3Xugoa+DNPhyybYIUy6jLiEbe+LSkEKf9wGwCg3f5AQRkV1LFFdPXER\nbwDVjaXKOJm08IvnQ9xfmGH/qqvZOVQJWDqEP4qLQ0bl6pFFgtJUUQxqPzL1NNatq6qTcl0sLfXO\nr5xPJ0y9VYl80PmsrLRv57Dpd6Gy+NNFpsbkdsKvIEqreyaFP6qrJy6i8KtuLJXFn1RDI0c8PvHh\nE3Y4xKIi9U2WDuGPO7i7DBd81YNLN3yijijCr0K+zrxBV1WmMBazqgxB91UYi9+m30UmLf6wgR5R\nKSjh/+gjVtlsxK+sTG857NrF/POHHmq/b5XwJ+Hq4ZXsu+9YI+GSJdkZoxNgNwA/t7o0uZzqauCN\nN/RDU0ZF7Aj09797ibrCWPzl5ayxUCX8cUPtVH0ykhB+1UAfKmQhD3KBmcIww1j8cuet0lLv/MrC\nH8Y4US0bFPqrE37VtlSN9TLyeeCJ+9KBs/gjYOo5KLN9u74C7drFOqQsWWK/vSCLv1OncNvj8Mo6\nbRrrJfjAA3Y5Z9KB6C8O6opeVcU67aSTO+7wvocV17ffztygOFGEX248FB/yco90DqV+kTrpJH82\n2UcfTV1HFP7HH/f/Zyv8KkEVXT3yw4gfS9++6u0DLBpmwgT/tvl2ogp/0m+fOqZNY2mvg+jYkUXa\niTiLPwJJPS137Qpv+QUJ4cUXM/Hfe+9w2+U3iZifJ1sWvxxRI2PqwBWWIUPCLR/G4udkyrqKIvwH\nHeT/zR/yv/mN3uKn1P8we/11Nmg855xzUtcRRVn+31b4Vf0bbFw9poi5jh2BoUP9++rTh20zSPh1\nWTEzFUZ5+unAww97vy++WL1caSlw443+eZkS/oIK50zKgrMduUokyOKPKjK84tsKfzp9/OL+gjqm\nxBX+sDdAFHHNBYtf7oTF0TWIBkV9mK6/qg7qMoCqfuv2oeqNbnL1yMaMCbEe8W0GdaTTWfyZjJ8X\nG6VbtFAvE5QCPp3UGuEP42/ftCkZv6wojlEvKL/J+I0flB7YNvVBFIJcS+INuWNHPH952PMVxeLP\nlPCbwg914qcTS9N52bbNnHtGtS/xHMj/68omC7+qN7opqieM8Iv74sIf1dWTScRj0wm/KWIv3RSU\n8IcZj9XEoEHB8c82iA8bfsOGtcj5TcKH4Kuq8ubJFsO2baxBNV0EWfFPPOF9j9uoGybWG4gm/JnC\nVDadQaITS5PF/+ST5nLYCH9JSXDuexvhLynRW/zdurF6utdeLMhCjIQqKmLHyl2i4r4IsRP+MOGc\nmUDXwN6zZ+q8TBkjzsevwTavugmV8ItvAZdf7n0/8kj1NuSbrLpaP6B5mMybqvzrQYj7VhG3o1Kv\nXt73/v1Zt3hTb1ERQlhPzK1bgd69U/+Xc7KYhtDkGSBFxIeaat+q9f/wBxYkUFTEjkX255qQG0R1\nrp6grJgiQcJfXGyXrVKuA6o3O9Hil+vFmDGsI1fXrmyMjGXLWJTVlCnMzfrhh16PenlfRUXB1jyl\n5iyrKr78Evj223DryKxapb4eOuEfP55NV69mPdwfeyzZXu4mCsriT+Jp2bFj8GArdevauY5UPn4x\nTrx7d+97377+AV84JuG3TSjXrFnqg8wmB79MkPDHjTISRa2khD0IwrjcunRh0969vcEyOIcf7h94\nxPRG17kzy9kk+t+POEK/fJ06/vrA62HnzmzAb16mr75Sr1+/fmr/AVuLn2/fBpXwi/OKitR5hWTk\nOqBq7xGFXxy9i//HDZ2WLb1c/zzHvmgEyUNM2gp/2PTG++8fXz/22Uc9f4891PP5NW7Thn3ChI/H\npaAs/iSE38ZlYPtmofLx6/zeQeOBir91rh5dx5Ywbi4T6RZ+Vf75pKJ15O3YXEPRN2/yrcs+fC7i\n8g2v26fqGKM27poIsvht7x+5DqiMoJISb75O+KLsq6go2I1TXa1ucDaRThdLnONPF074JWxuLFvh\nf+8973uQj1+8+cUhI9es8cf/79zpjUY0YwazbLduZW4OnTsiKLeJLdXVZnFPh/BHETob4bepK7bC\nL7+VcOGXLd0w9VMn/HGiPoKE3xaxDr/6KrBwYeoypaWeu0U+D2GQjZswFn8S7XRJkETKjqSxkjBC\nyABCyDJCyHJCSMrAc4SQJoSQlwkhCwghSwghF9uumyRyxTZ1R9dh41qwFX5RCPkNO3y4ellRmA48\n0Pves6f/FXD6dO9BsGgRcNRRLG64W7fUziCcJC1+UwNvdXU8i/SSS7zvunw0NqjETL6uJsHj7Qp/\n+IM3z5TOWn6wcktXblfQ1RtVFlf5mvE04iefnLqsaoSvU1IHf0s55rZt4xlLP/4InHoqcMstqf8V\nF3ttTjY573XYunrEIVUpZec0ygNHbnDlQ1yq+kAE0bIlK1eYBIKZIlDCCCHFAB4CcDKATgCGEELk\nkSGHAVhKKe0K4FgAfyeE1LFcNzHkShw2RS2QrKtHtc4DD3hpjOVQNU7Y4RFlf7aMzuJ/7jnzeqr2\nBZPwV1XZCfWZZ6aOwVpZ6c8UGdbiF8sa19XTujU7FvEhLQt/z55eY61sYHBhCvOwkZG32bs3K9Mx\nx6Qu+9hjwIgR3u+qKuA//1Fvl0eFVVWxdBdxLH6d25LXX/4ADBqdy2ZfgFn4333Xa5ytqmLrRTH8\nPvzQb7D9+9/etKpKPWa3jvXrgc8/Z+WQ34ZNwQKZwEbCegJYTildQSktBzAJwEBpGQqgjBBCADQC\nsBlApeW6iSFX4ighXUm6ekS4kBGivtnihCMGHafO4g+66eXtJiX8Yq9OTnFxaoQJXzYscV09deqw\nbZjcKoR4AiSfX53wh6k38ja56OnKIr4h6TKP8jLwbenqYhC64AK57Fz447gabaN6Sku966U7/zbI\n51m8b8Pe9+L5zVR8vi02xWkNQByqYU3NPJGxAA4GsBbAIgBXUUqrLddNjCSE30aAo9wsoojw9cXt\nxBH+oL4BuhsvaD050iQp4be5CcIKf1AjZRjrm1+LoHJyYZPLyAUxSrsCJ6x7LuqDMoogBQm/PFhP\nUsJvsvhFY4Lf90l0wkxXT9p09rC3IalwzpMAzAdwPID9AbxJCJllXsUPIeRSAJcCQNswMWoCciVW\nZUoMIl2unqCbPp0dkMLeeJQCd96Z2glLHkZO7OwDsFGVeGieCXEQbx3iuLJhievq4YIRdNPzKCpV\nXnxxO5x0Cj8/nrDCH8fiv+8+83L8/MQRflsfv2hd8/+TCGqQ60lSgp1t4beRsG8BiBGqbWrmiVwM\n4AXKWA7gawAHWa4LAKCUPkIp7UEp7dHSRj0UyBdJN6Scjv/7P7sbR3VTTpjApr//fep/Xbt6MeY6\n0in8YUXk88+BG24ATjvNvJyqzFEax084IXUZLrp33RW8vaDtA+GsbxuLn1LP4tddO938Dh3YB2AN\n+WJHPk7HjqzvgS1RLf6owl9RoR5PViQJV8+DD/p/q4T/iCOARo2868Ut/iSEXz4/tUn4PwbQgRDS\nnhBSB8BgANOlZVYB6A8AhJA9AXQEsMJy3cSIG855zz121rwqWoBnElSNBzp/fnCjUDp9gGFdPfwV\nXjVYvIhK2Gwa8mRLWpUTnS/TrVv4hjBeD3iaCyCcv50vG3RNuEWre9jp5h96KPDFF+z8L1vmNfaL\nNG9uPy4tkHnht+kZm4Tw9+oFTJrEvqss/ubN2TgcYrtGHB9/psi28AdWE0ppJSFkOIAZAIoBPEEp\nXUIIuazm//EARgOYQAhZBIAA+BOldCMAqNZNz6EkI542Pj3bjjlhSGcHkrDCz+cHtZGoRMZG+OXz\npdqOOC/s+eXLm9pQbCx+W1ePbjnbt7gkrj0vQ1ifdFTht+kgxYU/rgDz+qgSfrGdQfbxp2P82kKx\n+K1ODaX0VQCvSvPGC9/XAvil7brpIokbyEZkCkX445JO4Vc1httiEzUVFNUDRG/clbcTRJL1Nkrk\nSVhsUyIkYfHLyD135TYAwHsw5FokTS5RUKcmU8Iv5zPRpV0NQ5LCL/v0dT5+0epo1y58fhNVmaO4\nelRWahzhFy3+Ro3Yd1mcTdfZ1sfPj1XXM9N2kO4kLf6ob0dh+OYbf78BHWVlbBolnl6HyeLnx/K3\nvyW3v3SRVKfKqDjhlxBvBFWjG8B6K3Lxr1sXeP/96PtbuJCl001S+GXxlSuZqrwrV7IMkmEgBDjv\nPP8828Zdnlq3S5f0Cv8nn7AhB+VtyL+POsr7bhvV8+CDLPrpuOPY786d/R2ndOciqCyLF5v3q0KM\nNw9DmOU//NB7kKoSCr7zDkvhwHn1VWDs2Pg9V8O6enS/OfKwls88E74sMh99FLzutGkszcqYMcD5\n59vvMx0UlPAn7eOvW1edMa+kxBs27vLL7VMHq+jcmQ2nl6Twy+dBftUWRU4kbBlKSoBhw8z7VlFc\n7DUOir115W2H2aaI2E/iwAOB3/0udRvysYrDANpa/M2a+dc79lhmFHDkB4eNxd+vH3DIIfr96ohq\n8Ye55kceCZx9tv7/447zp5TYd9/U+hGX4mK/e0fl6uHozsVf/+r/PXhw/HKZsrdyTj8d+OUvWY/v\ndLQ/hKGghD8drh7+uiovE2YUIRuSFH65UdbG1QMkIxo22ygq8oRfl8dFFM0kGneDREH8zS1122ui\nGx1Lt77JMo3aEKo6ZhsyNfBHHGSLXySKxR/G7acrS75TUMKfxODjciVQCVOuCz+PNuEPLduonky1\nkVJ5NksAABp3SURBVBQXe4nJdIO3JOHqMc0zPQjC9qmIWxfEskRtCM2EqyfK8klgK/y25z9T49rm\nMgUl/HHTAgP+ytOqlXo0J0Lw89CAYibNOCR5Q/Gu8vyhpRMTeeCIKGWQG7ZtLf5OndRl4IS1+MWB\nZVQpMUwWvvxbtrq7dTPvu107NuV9NfhvW8R985GnwmISflNa4ExEAcWlTRs2PeSQ1PKK/nq5bHw9\nmWy7WXKBghJ+G4v/xhuByZP1//Mb6OSTWeTCTTelZrEsKgLOOovl2//d78z70/mwZZK8obiFxG8S\n3Y1/7LH+nOW8DGFeZw8+2N8BqaiIdUrq0cObV1LCOrHxzJLFxay7/9y5+vYRlZvm4IPZ8HYvvuiJ\n8SWXsOsj9vBVCX+QG0Bn8S9Y4I1/oGPQIFYXeK/tefPUDbQ2Pn7Z/2yLyce/YgXrjR20b87KlWwo\nRNvl002/fsDs2cCoUf7je+klf2OyXLYjj2SBDAsW+Btf+fWtX189loAJ5+rJQWws/mbNgBNP1P/P\nK8/AgSyCobg4tfGH9xLs2zf4RojT8BsX/iA0hdMdfbT3PepNLb4V8QZVPlg2wN48unb1+j8UFbF2\nB9Vg0yp4uQ46iL0hDBrkHVP79uz6BLlnorp6unQJHkFJrgvNm5sbaHUPoaZNo7shTD7+Vq3YubMp\nC8Dy9OveWrLVJtCnDzs38tuR6IpVuYGOOopdQ/6GDnjnuGlTFlwRlzhpp7NFrRP+oHS7vPKY3h7C\nvB7bLpuOG4pbJya/sVi+qG0kKpeKuF3e1sCXCytufFuitcW/q7Yl+oTlbeh+h21gjWL5BVn8cepA\nIfv4RcTrFvQwF+uzeE25qyepY+EhrvlEQXm7bIXfJMb8BjJtK4zw21audNxQvOKbrGFxvxs2MDfF\n7Nnh9qNyqYiCzK0y/l/UKB3xRuYiqvLXqoQ/qqsnEzjhtyeq8IvXNE7jrurhHWeEsWxRUBa/Tf59\n+XVRhg+1pot1B/JP+EVrZ6+99Pu94AIWj3zNNeH2oxLYc8/15vEhDKNa/Hw9W+FXEcbVk+nkXkkI\nf6ZSNuiW79s33HaikoTFHzX0VccVVySznUxSUMJvk3IgyNUzYAB7gJhS4qbb1fPGG8DLL9vvIwix\n0vPh6Ti25bvkEuCqq9T/qW4gsUcvvzHkRmdbTI3OSbl6Mmnx694+krD4052yQVXGhQvZ0IeZQCxv\nUL8J1ZjX4jbiCv8NN7DQ6auvjredbFBQrh5b4Q+q7EFWZLot/nr1kglN5QIoClmQlaTD9KZkcqmI\n83QdnYIw+fjT4epJl8VfqD7+0tLMJUSLavGblguDPLZzpt2CSVFQFr9Nqtji4vgdONIt/DxqKC4q\nV49pvyYIseuJamr0jGrxR/Xxq7bBMT0IbOpHnLC+XLL4kxD+TMbFm4RfRneNknb15CMFJfw2Fr9O\nwMI8udPt6kla+E3HloSlZlvWuK4elfDbunpMFn6UMkUhnRZ/NlM2ZLInbBjh1701x7nWqrfOfKRg\nhJ9SuyyAvKIfe2y47YujRMW1+K+7jg29d/rp6uUISUaIxo1jsc5hYsplxNA3G4tf5MYb/e0CNq6e\nBx5IzfjZpw/r6Xv77d48m8Zdk49fLnNxMfD006njDKvo0gW4//7g5WxJQvj5wz1sSG5Srp5MIYYm\nB90jZ56pnp+Uqyefhb9gfPyEAEuXpl7UDh2AL7/0Lwew3pgtW3rDCwZFBImVKK7F37EjG3pPJB0W\n/7HHpoqoTNCx1KnD0uBGEf4xY/y/bSz+ESNSc72XlQFLpHHb4rp65N916rCopgsu0JeNs2BB8DJh\nSEL4eYe2sG1DSQh/kvn2gxCHPTXVo4UL9YZgEucbyG/hLxiLX4ep5T/qhcsXH7+N7zVoP6KFFVb4\nZUzumbDEdfXIvzNptabDx88zsMoDkQeRRJtAJgcVEXtR2/THSZp8FnuRghd+WfySENR0CL+8/SRc\nPUkIv5ii2Eb4TduL6uM3bSspV08mYvfT6ePn4mvTl0W17zjLp2toTxW2wm/6r1DGzY1DwQu/6ckf\n9cKFuVly3eIPEmGbm9r2LSpqOKeKpF09mbD4+bmUu/hn0+LPt6geW1ePTR2Lcr7FB48T/hzijTf8\nv21cPWPGhMvSl44IEFn4w+5DzLLJEW/IWbOAqVPN+1UR1uI3kaTFz7F19QQNtm5j8c+dm5qpNQwD\nBwK33gr8/e/++aryhiWbwp9JxIRoYYX/pZfidzR75RXgF79g353w5xBy5k2bDkuXXhouS1+6hd8k\nsjpeeil1nih2Rx/NUkmb9quCW6limXjFt90GJ1MWP0csl9wAGcXH37NnvGH6iouBP/85dfCZJIQ/\nU4272cbUGVFEVccGDmQpnuMIdtu2+dlTV6bghF/GRqTT3c3dhriuHlWZkjh20RLmZZKt42xY/Pwh\nYuvqkRsgVVE92SKfLP5sW7mi8JvKblPHXFRPAWOy+KPecOnu7BPF1RO1TPzYdeurXD1BbhMdSUb1\ncGxdPUEPq2x2vU9C+PmbWdjG3bD1JttiZ/uATkfbHodfp2yfizjUauHv1YtNw1p7mRB+lQgceqh+\nHbHRiQ9raLsvQD2oPKDOXR5X+JM4fwcfzKamQTBMZc5G466OJM4Lt/iPPDLcemEfNkmMax0H2+tk\nEn7euC6OEhcGPqjLAQdEWz8XKJgOXDpMN9OUKWxIurADKYS5QW2tAhtXz6OPAuvWAWec4Z//wQfA\nvvt6v2fNAlavDrffxo2BH39M/V816HnUENkkffwPPQScf7765lWd8yCLP5sDcPPzEtfinzePdVgM\nQxzhf/zxzKVj5tgaaaZ7dM89gQ8/NLfr/e9/emPozDNZB9B+/ezKkovUOuEXK3qjRiz/fNxtJo0u\nZUOrVt5bikivXsDOnd7vZs3UUT66fQH6wSTEHDB82aB0uDqStPhbtUp9AMr7CfOWks1GziRcPQDQ\nvXv4deL4+Nu3D/+giUsSFj8Q/GbUsaP+P0LCp3zJNWq1qyepbSaNzuJPajhI1Xo2wq8T7myGc5r2\nE8bVk02SEv4oxLH4s9EgnoSP31ELhT/T24zSAJRJ4ef70b3WqrYb1+LP1E2ZRLtEJsgn4Rfrczba\nRZKy+Gs7tU7488Xi17l6AGady7H0cS1+ObZc/l88bybhz1TKBhOqh23r1v7fvJynnZbestjAh8O8\n6KLU/0yZVZMgTlRPLlv8ufRGl4tY+fgJIQMA3A+gGMBjlNI7pf//CIDnNSwBcDCAlpTSzYSQbwBs\nA1AFoJJSGrEtPRpFRSzEbcgQ1nM1X4RfLmf79l7kzubNbCo2ssYVfl2bgKqjlOlharoxk2zcNaGy\noJs2BXbvTk1BMW1aMqOdxaFpUzaEn6pPwoIF6Q0bjOPqcRZ//hIo/ISQYgAPATgRwBoAHxNCplNK\nl/JlKKV3A7i7ZvlfAbiGUrpZ2MxxlNKNiZbckqIidkMl+RqdCVePKdxQVamjHh/flo3w2/j4Tbl9\nsunjB9QPpaQS4sVFJ2jpFrA4rp5ctvid8JuxqfI9ASynlK6glJYDmARgoGH5IQBiZDRJFn5TJ+lH\nzUbKhnRZffwG0QmPOIqXjY8+Fyx+hz3O4q+d2EhYawBiVPiamnkpEEIaABgAQBivChTAW4SQTwgh\nl0YtaFTyRfjl7WdK+IN67qpSI0R19WTb4nekUqgWv7v2ZpK+BX8F4H3JzXM0pbQbgJMBDCOEHKNa\nkRByKSFkHiFk3oYNGxIrUDoad4O2ceGF3veorp62bYFu3VhiKd12TjoJeOwx7/fRR7PhA6NQXAwM\nHcqGSxRRjdsrN/iGFf50W2OXXMLOn6qx1OGHEBbTPnmy3fLZjupp3Zr1V7j33szvu5Cwadz9FsA+\nwu82NfNUDIbk5qGUflsz/Z4Q8iKY6+g9eUVK6SMAHgGAHj16JGbfcpFJ0mIOslgnTmQNhs8+a79N\nuedu48bAZ58Bb73FGiBVvP66//esWfb7kykuBiZMAH76CbjtNm++qnFX7sGbaz7+du2AlSvTu49C\ngRDWi9UWsSE8GxZ/gwash7IjHja34McAOhBC2hNC6oCJ+3R5IUJIEwD9AEwT5jUkhJTx7wB+CWBx\nEgW3JV9dPfL3dCeE4vuRrXGVj58vG1b4nY8//xGFP5v5jRzxCLT4KaWVhJDhAGaAhXM+QSldQgi5\nrOb/8TWLngHgDUrpDmH1PQG8SJgylAB4llIq2anpJVtx/HH2kw3hV+0bMPv4VcKfCz5+R/rIds9d\nRzJYxfFTSl8F8Ko0b7z0ewKACdK8FQC6xiphTLjINGzIpklYKZnI1cNJh6vKhMni55iEX9cDWNyW\ns/jzF9Hid9cxfynIJG1TpwKjR7POL1ykHniAJZQ69dT4289Eds4o+0oCeX+mDlwq4b/44vD7yCQL\nFgAffZS9/ec7/OHdv7+LnMlnClL4zzqL3eALFnjzmjYF/vKXZLafDleP+IDIpqsH8CdkU1n8usbd\nli3Nb1S54Orp0oV9HNHg9WH48OyWwxGPgvW2Nm3Kptu3J7/tbDTuZhKxgVb08csPA/6qn418/I7s\nwK+ha6fJbwr28vG8NqrBReISxppPytWTSYtfbLRTCT/386pcPSZyweJ3xINfe3cN85uCvXxc+Ldu\nzW45oqB6CKRL+Pl2xe2LFj8fxlHMbskH9JaFP+gB4Cz+/Mddw8KgYIWfj0EadvDpXEC0pjId1QP4\nLf7Ro9kwc0cd5c3TCX8QzuLPf5yrpzAo2MvHGxmzlXKXi2HUlA2q7+lAZa2LFn/dut4wc7KrJ6yP\nP9MDsTiSxwl/YVCwl49brdw6zXV0g5lkw8cvCr9KpJ2Pv/bifPyFQcFevmxb/EmR7agelfA7H3/t\nRX7bc+QnBSv82bb4Tz+dTQ8/PN52smHx/+Y3qfsXOfRQNj3rLDZ1Fn/tIR8e3gcdlO0S5D4F2YEL\nyL7Ff/bZwM6dXiNzEFw8jz7aPz8bjbtXXw2MHOnfv8iBB/qPLWwcvxP+/CXXXT3l5a5HsQ0FK/zc\n4s+mq8dW9EXk7JbZ6rnLEYVfLIN4bLauHmfx5z+5bvG7jKF2FOwtyCtAvjTu6oQ92yJpM75vWFeP\ns8jyF/fWVhgU7OXLBYs/CbJh8av2bypDWOF35C9O+AuDgr186fDxP/ggy0qYDnTimS3hv/Za4Jhj\n1DnX41r8TjTyl1z38TvscD7+EAwfnvmshNlo3AWAu++2XzZsOKdz9eQvue7jd9hRsM/tfPPxc2SB\nzyXLKilXTy4dkyMcztVTGBTs5cs3H3+uuXpMRHX1OIs//3GunsKgYC8ft/gbNcpuOeKSbpHkYZlx\nxk/lItCihXm5xo39yzvyjySHMHVkj4L18RcVseEWTzwx2yWJR5xkbzb86U/MHXbFFdG3ccghwLBh\nwJAh5uXeeAN44QU2UpcjP3n+eeDZZ9kwpo78pWCFHwBGjMh2CcIjC3y6hb9BA+C22+yW1ZWhfn1g\n7Njg9ffbj0ULOfKXtm2BUaOyXQpHXNxLd45g2+s1F3A+eocjv3HCn+M4kXU4HEnjhD/H4Y2ucbN8\nOhwOB6egffyFQKNGwJw5rAE12+SSu8nhcETHCX8e0Lt3tkvAcEnWHI7CwLl6cox8sKqd8Dsc+Y0T\nfofD4ahlOFdPjnDAAWzKhzN0OGorFRUVWLNmDXbt2pXtouQk9erVQ5s2bVAao/u0E/4coW1bYMcO\n1hkqV8kHN5Qj/1mzZg3KysrQrl07EOdX9EEpxaZNm7BmzRq0b98+8nasXD2EkAGEkGWEkOWEkJR+\ne4SQPxJC5td8FhNCqgghzWzWdXg0aJAf/vN8KKMjf9m1axeaN2/uRF8BIQTNmzeP/TYUKPyEkGIA\nDwE4GUAnAEMIIZ3EZSild1NKu1FKuwG4HsB/KaWbbdZ1OBwOGSf6epI4NzYWf08AyymlKyil5QAm\nARhoWH4IgOcirutwOByONGMj/K0BrBZ+r6mZlwIhpAGAAQCeD7uuw+FwODJD0uGcvwLwPqV0c9gV\nCSGXEkLmEULmbdiwIeFiOZLAdeBy1BZGjRqFhx566OffN998M8aMGYP+/fvj8MMPR+fOnTFt2rSf\n/584cSK6dOmCrl274sILLwQArF+/HmeccQa6du2Krl27Ys6cORk/Dh02UT3fAthH+N2mZp6KwfDc\nPKHWpZQ+AuARAOjRo4eLH8lBnPA7Ms7VVwPz5ye7zW7dgPvuMy5y3nnn4eqrr8awYcMAAJMnT8aM\nGTNw5ZVXonHjxti4cSN69eqF008/HUuXLsWYMWMwZ84ctGjRAps3M7v3yiuvRL9+/fDiiy+iqqoK\n27dvT/Y4YmAj/B8D6EAIaQ8m2oMBnC8vRAhpAqAfgF+HXdfhcDhyicMOOwzff/891q5diw0bNqBp\n06bYa6+9cM011+C9995DUVERvv32W6xfvx7vvPMOzjnnHLSoGYKuWbNmAIB33nkHEydOBAAUFxej\nSZMmWTsemUDhp5RWEkKGA5gBoBjAE5TSJYSQy2r+H1+z6BkA3qCU7ghaN+mDcDgcBUqAZZ5Ozjnn\nHEydOhXr1q3Deeedh2eeeQYbNmzAJ598gtLSUrRr1y5vO5lZ+fgppa9SSg+klO5PKb2tZt54QfRB\nKZ1AKR1ss64jP3EduBy1ifPOOw+TJk3C1KlTcc455+DHH39Eq1atUFpaipkzZ2LlypUAgOOPPx5T\npkzBpk2bAOBnV0///v0xbtw4AEBVVRV+/PHH7ByIAperx+FwOBQccsgh2LZtG1q3bo29994bF1xw\nAebNm4fOnTtj4sSJOOigg35e7sYbb0S/fv3QtWtXjBw5EgBw//33Y+bMmejcuTO6d++OpUuXZvNw\nfLiUDY7QuMZdR21h0aJFP39v0aIFPvjgA+VyQ4cOxdChQ33z9txzT1/kTy7hLH6Hw+GoZTjhdzgc\njlqGE36HNa5x1+EoDJzwO0LjfPwOR37jhN9hjbP4HY7CwAm/IzTO4nc48hsn/A6Hw1HLcMLvsMa5\nehwONY0aNcp2EULhhN8RGufqcTjyG9dz1+Fw5CxZysqMUaNGYZ999vk5LfPNN9+MkpISzJw5E1u2\nbEFFRQXGjBmDgQODBxTcvn07Bg4cqFxv4sSJuOeee0AIQZcuXfDUU09h/fr1uOyyy7BixQoAwLhx\n43DUUUfFO2gJJ/wOh8MhESYff9AYuPXq1cOLL76YU3n8nfA7rHE+fkemyVZW5jD5+Pfaay/jtiil\nuOGGG3Iqj78TfkdonI/fURtIKh9/Lubxd427DofDocA2H38QuZjH3wm/w+FwKLDNxx9ELubxd64e\nhzV167JpaWl2y+FwZArbfPymBthczOPvhN9hzejRQL16wIUXZrskDocjDk74HdY0bgzcdVe2S+Fw\n5CaLFi3ChZJVVLduXcydOzdLJdLjhN/hcDgSoHPnzpifdG+zNOEadx0OR85BXacRLUmcGyf8Docj\np6hXrx42bdrkxF8BpRSbNm1CvXr1Ym3HuXocDkdO0aZNG6xZswYbNmzIdlFyknr16qFNmzaxtuGE\n3+Fw5BSlpaVo3759totR0DhXj8PhcNQynPA7HA5HLcMJv8PhcNQySC62nBNCNgCwy4CUSgsAGxMs\nTj7gjrl24I658IlzvPtSSlvaLJiTwh8HQsg8SmmPbJcjk7hjrh24Yy58MnW8ztXjcDgctQwn/A6H\nw1HLKEThfyTbBcgC7phrB+6YC5+MHG/B+fgdDofDYaYQLX6Hw+FwGCgY4SeEDCCELCOELCeEjMp2\neZKCELIPIWQmIWQpIWQJIeSqmvnNCCFvEkK+rJk2Fda5vuY8LCOEnJS90seDEFJMCPmMEPKfmt8F\nfcyEkD0IIVMJIf8jhHxOCOldC475mpp6vZgQ8hwhpF6hHTMh5AlCyPeEkMXCvNDHSAjpTghZVPPf\nA4QQErlQlNK8/wAoBvAVgP0A1AGwAECnbJcroWPbG8DhNd/LAHwBoBOAvwEYVTN/FIC7ar53qjn+\nugDa15yX4mwfR8RjHwngWQD/qfld0McM4EkAv6v5XgfAHoV8zABaA/gaQP2a35MBXFRoxwzgGACH\nA1gszAt9jAA+AtALAAHwGoCTo5apUCz+ngCWU0pXUErLAUwCMDDLZUoESul3lNJPa75vA/A52A0z\nEEwoUDMdVPN9IIBJlNLdlNKvASwHOz95BSGkDYBTATwmzC7YYyaENAETiMcBgFJaTin9AQV8zDWU\nAKhPCCkB0ADAWhTYMVNK3wOwWZod6hgJIXsDaEwp/ZCyp8BEYZ3QFIrwtwawWvi9pmZeQUEIaQfg\nMABzAexJKf2u5q91APas+V4o5+I+ANcBqBbmFfIxtwewAcC/atxbjxFCGqKAj5lS+i2AewCsAvAd\ngB8ppW+ggI9ZIOwxtq75Ls+PRKEIf8FDCGkE4HkAV1NKt4r/1VgABROeRQg5DcD3lNJPdMsU2jGD\nWb6HAxhHKT0MwA4wF8DPFNox1/i1B4I99H4BoCEh5NfiMoV2zCqycYyFIvzfAthH+N2mZl5BQAgp\nBRP9ZyilL9TMXl/z+oea6fc18wvhXPQBcDoh5Bswt93xhJCnUdjHvAbAGkopH5l7KtiDoJCP+QQA\nX1NKN1BKKwC8AOAoFPYxc8Ie47c13+X5kSgU4f8YQAdCSHtCSB0AgwFMz3KZEqGm5f5xAJ9TSu8V\n/poOYGjN96EApgnzBxNC6hJC2gPoANYolDdQSq+nlLahlLYDu5bvUEp/jcI+5nUAVhNCOtbM6g9g\nKQr4mMFcPL0IIQ1q6nl/sDasQj5mTqhjrHELbSWE9Ko5V78R1glPtlu8E2w5PwUs4uUrADdmuzwJ\nHtfRYK+BCwHMr/mcAqA5gLcBfAngLQDNhHVurDkPyxCj5T8XPgCOhRfVU9DHDKAbgHk11/olAE1r\nwTHfAuB/ABYDeAosmqWgjhnAc2BtGBVgb3aXRDlGAD1qztNXAMaipgNulI/ruetwOBy1jEJx9Tgc\nDofDEif8DofDUctwwu9wOBy1DCf8DofDUctwwu9wOBy1DCf8DofDUctwwu9wOBy1DCf8DofDUcv4\nfwG+U4/JdpcLAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x22e9a53b978>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "plt.plot(history.epoch,history.history.get('acc'),c='r',label=\"vacc\")\n",
    "plt.plot(history.epoch,history.history.get('val_acc'),c='b',label=\"val_acc\")\n",
    "plt.legend()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python [conda env:kr]",
   "language": "python",
   "name": "conda-env-kr-py"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
