{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Phase 3 Weighted Bagging"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\zake7\\Anaconda3\\lib\\site-packages\\gensim\\utils.py:1197: UserWarning: detected Windows; aliasing chunkize to chunkize_serial\n",
      "  warnings.warn(\"detected Windows; aliasing chunkize to chunkize_serial\")\n",
      "Using TensorFlow backend.\n",
      "C:\\Users\\zake7\\Anaconda3\\lib\\site-packages\\fuzzywuzzy\\fuzz.py:35: UserWarning: Using slow pure-python SequenceMatcher. Install python-Levenshtein to remove this warning\n",
      "  warnings.warn('Using slow pure-python SequenceMatcher. Install python-Levenshtein to remove this warning')\n"
     ]
    }
   ],
   "source": [
    "import pandas as pd\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "from os import listdir\n",
    "from os.path import isfile, join\n",
    "\n",
    "import os\n",
    "import re\n",
    "import csv\n",
    "import codecs\n",
    "import gensim\n",
    "import itertools\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import operator\n",
    "import sys\n",
    "\n",
    "from nltk import ngrams\n",
    "from collections import Counter\n",
    "from string import punctuation\n",
    "from keras.preprocessing.text import Tokenizer\n",
    "from keras.preprocessing.sequence import pad_sequences\n",
    "\n",
    "from iwillwin.trainer.supervised_trainer import KerasModelTrainer\n",
    "from iwillwin.data_utils.data_helpers import DataTransformer, DataLoader\n",
    "from iwillwin.config import dataset_config\n",
    "from iwillwin.data_utils.feature_engineering import FeatureCreator\n",
    "\n",
    "from fuzzywuzzy import fuzz\n",
    "from nltk.corpus import stopwords\n",
    "from tqdm import tqdm\n",
    "from scipy.stats import skew, kurtosis\n",
    "from scipy.spatial.distance import cosine, cityblock, jaccard, canberra, euclidean, minkowski, braycurtis\n",
    "from nltk import word_tokenize\n",
    "\n",
    "import seaborn as sns\n",
    "%matplotlib inline\n",
    "\n",
    "import lightgbm as lgb\n",
    "from sklearn.model_selection import train_test_split\n",
    "import matplotlib.pyplot as plt\n",
    "from sklearn.model_selection import KFold\n",
    "\n",
    "import os\n",
    "import re\n",
    "import csv\n",
    "import codecs\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import operator\n",
    "from os import listdir\n",
    "from os.path import isfile, join\n",
    "\n",
    "########################################\n",
    "## import packages\n",
    "########################################\n",
    "import os\n",
    "import re\n",
    "import csv\n",
    "import codecs\n",
    "import numpy as np\n",
    "np.random.seed(1337)\n",
    "\n",
    "import pandas as pd\n",
    "import operator\n",
    "import sys\n",
    "\n",
    "from string import punctuation\n",
    "from keras.preprocessing.text import Tokenizer\n",
    "from keras.preprocessing.sequence import pad_sequences\n",
    "\n",
    "from iwillwin.trainer.supervised_trainer import KerasModelTrainer\n",
    "from iwillwin.data_utils.data_helpers import DataTransformer, DataLoader\n",
    "from iwillwin.config import dataset_config\n",
    "from keras.utils import to_categorical"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Building prefix dict from the default dictionary ...\n",
      "Loading model from cache C:\\Users\\zake7\\AppData\\Local\\Temp\\jieba.cache\n",
      "Loading model cost 0.465 seconds.\n",
      "Prefix dict has been built succesfully.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[DataHelper] Apply normalization on value-type columns\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\zake7\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\validation.py:475: DataConversionWarning: Data with input dtype int64 was converted to float64 by MinMaxScaler.\n",
      "  warnings.warn(msg, DataConversionWarning)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Doing preprocessing...\n",
      "Transforming words to indices...\n",
      "Shape of data tensor: (320552, 50) (320552, 50)\n",
      "Shape of label tensor: (320552,)\n",
      "Preprocessed.\n",
      "Number of unique words 83265\n"
     ]
    }
   ],
   "source": [
    "NB_WORDS, MAX_SEQUENCE_LENGTH = 50000, 50\n",
    "data_transformer = DataTransformer(max_num_words=NB_WORDS, max_sequence_length=MAX_SEQUENCE_LENGTH, char_level=False,\n",
    "                                   normalization=True, features_processed=True)\n",
    "trains_nns, tests_nns, labels = data_transformer.prepare_data(dual=False)\n",
    "print(\"Number of unique words\", len(data_transformer.tokenizer.index_docs))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "trains_meta = trains_nns[2]\n",
    "tests_meta = tests_nns[2]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "train_df = pd.read_csv('../data/dataset/train.csv')\n",
    "test_df = pd.read_csv('../data/dataset/test.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n"
     ]
    }
   ],
   "source": [
    "rumor_words = ['辟谣', '谣言', '勿传', '假的']\n",
    "\n",
    "def is_rumor(text):\n",
    "    if type(text) != str:\n",
    "        print(text, type(text))\n",
    "        return 0\n",
    "    for rumor_word in rumor_words:\n",
    "        if rumor_word in text:\n",
    "            return 1\n",
    "    return 0\n",
    "\n",
    "def has_split_symbol(text):\n",
    "    if type(text) != str:\n",
    "        return 0\n",
    "    if '|' in text:\n",
    "        return 1\n",
    "    return 0\n",
    "\n",
    "for df in [train_df, test_df]:\n",
    "    df['has_|'] = df['title2_zh'].apply(has_split_symbol)\n",
    "    df['has_rumor_words'] = df['title2_zh'].apply(is_rumor)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "train_has_rumor = train_df.has_rumor_words.values\n",
    "test_has_rumor = test_df.has_rumor_words.values\n",
    "\n",
    "trick_trains_features = np.concatenate((trains_nns[2], train_has_rumor.reshape((-1, 1))), axis=1)\n",
    "trick_tests_features = np.concatenate((tests_nns[2], test_has_rumor.reshape((-1, 1))), axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "oof_file_names = sorted([f for f in listdir('../data/oofs/') if isfile(join('../data/oofs/', f)) and f != '.gitkeep'])\n",
    "preds_file_names = [name.replace('-Train', '') for name in oof_file_names]\n",
    "\n",
    "oofs = []\n",
    "preds = []\n",
    "for name in oof_file_names:\n",
    "    oofs.append(pd.read_csv('../data/oofs/' + name))\n",
    "for name in preds_file_names:\n",
    "    preds.append(pd.read_csv('../data/output/' + name))    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 3Embedding-3LayersDenseCNN42-NoDrop-NoClassWeighted-NoEM-Train-L0.809633-NB5000.csv\n",
      "1 3Embedding-3LayersDenseRNN42-Drop01-NoMeta-NoClassWeighted-WithEM-Train-L0.816583-NB5000.csv\n",
      "2 3Embedding-ESIM-Drop01-NoMeta-NoClassWeighted-NoEM-Train-L0.833565-NB5000.csv\n",
      "3 WordSGNS-DenseCNN5Layers-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.838202-NB100000.csv\n",
      "4 WordSGNS-ESIM-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L1.104962-NB100000.csv\n",
      "5 WordTC-DenseCNN5Layers-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.8440-NB100000.csv\n",
      "6 WordTC-DenseRNN-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.854586-NB100000.csv\n",
      "7 WordTC-ESIM-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.374334-NB100000.csv\n",
      "8 WordTC-Gated4GWindows-NoMeta-3P-NoEM-NoClassWeighted-3Layers-withEM-Train-L0.836860-NB100000.csv\n"
     ]
    }
   ],
   "source": [
    "for i, name in enumerate(oof_file_names):\n",
    "    print(i, name)\n",
    "    \n",
    "trains = pd.DataFrame()\n",
    "tests = pd.DataFrame()\n",
    "\n",
    "for i in range(len(oof_file_names)):\n",
    "    for label_type in ['agreed', 'disagreed', 'unrelated']:\n",
    "        trains['oofs_{}_{}'.format(i, label_type)] = oofs[i][label_type].values\n",
    "        tests['oofs_pred{}_{}'.format(i, label_type)] = preds[i][label_type].values"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "unrelated = pd.DataFrame()\n",
    "agreeds = pd.DataFrame()\n",
    "disagreeds = pd.DataFrame()\n",
    "\n",
    "#check_oofs = True\n",
    "check_oofs = False\n",
    "\n",
    "\n",
    "if check_oofs:\n",
    "    for i, oof in enumerate(oofs):\n",
    "        agreeds['oofs_agreed_{}'.format(i)] = oofs[i]['agreed'].values\n",
    "        unrelated['oofs_unrelated_{}'.format(i)] = oofs[i]['unrelated'].values\n",
    "        disagreeds['oofs_disagreeds_{}'.format(i)] = oofs[i]['disagreed'].values\n",
    "else:\n",
    "    for i, oof in enumerate(oofs):\n",
    "        agreeds['oofs_agreed_{}'.format(i)] = preds[i]['agreed'].values\n",
    "        unrelated['oofs_unrelated_{}'.format(i)] = preds[i]['unrelated'].values\n",
    "        disagreeds['oofs_disagreeds_{}'.format(i)] = preds[i]['disagreed'].values  "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>oofs_agreed_0</th>\n",
       "      <th>oofs_agreed_1</th>\n",
       "      <th>oofs_agreed_2</th>\n",
       "      <th>oofs_agreed_3</th>\n",
       "      <th>oofs_agreed_4</th>\n",
       "      <th>oofs_agreed_5</th>\n",
       "      <th>oofs_agreed_6</th>\n",
       "      <th>oofs_agreed_7</th>\n",
       "      <th>oofs_agreed_8</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_0</th>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.982310</td>\n",
       "      <td>0.976632</td>\n",
       "      <td>0.959797</td>\n",
       "      <td>0.956807</td>\n",
       "      <td>0.961449</td>\n",
       "      <td>0.961597</td>\n",
       "      <td>0.959404</td>\n",
       "      <td>0.962629</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_1</th>\n",
       "      <td>0.982310</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.982479</td>\n",
       "      <td>0.958966</td>\n",
       "      <td>0.957473</td>\n",
       "      <td>0.961494</td>\n",
       "      <td>0.961780</td>\n",
       "      <td>0.961639</td>\n",
       "      <td>0.961288</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_2</th>\n",
       "      <td>0.976632</td>\n",
       "      <td>0.982479</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.955476</td>\n",
       "      <td>0.959167</td>\n",
       "      <td>0.957512</td>\n",
       "      <td>0.961050</td>\n",
       "      <td>0.963775</td>\n",
       "      <td>0.956841</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_3</th>\n",
       "      <td>0.959797</td>\n",
       "      <td>0.958966</td>\n",
       "      <td>0.955476</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.982100</td>\n",
       "      <td>0.977313</td>\n",
       "      <td>0.970090</td>\n",
       "      <td>0.972992</td>\n",
       "      <td>0.973543</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_4</th>\n",
       "      <td>0.956807</td>\n",
       "      <td>0.957473</td>\n",
       "      <td>0.959167</td>\n",
       "      <td>0.982100</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.971693</td>\n",
       "      <td>0.972289</td>\n",
       "      <td>0.979278</td>\n",
       "      <td>0.968705</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_5</th>\n",
       "      <td>0.961449</td>\n",
       "      <td>0.961494</td>\n",
       "      <td>0.957512</td>\n",
       "      <td>0.977313</td>\n",
       "      <td>0.971693</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.981701</td>\n",
       "      <td>0.981716</td>\n",
       "      <td>0.982047</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_6</th>\n",
       "      <td>0.961597</td>\n",
       "      <td>0.961780</td>\n",
       "      <td>0.961050</td>\n",
       "      <td>0.970090</td>\n",
       "      <td>0.972289</td>\n",
       "      <td>0.981701</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.983917</td>\n",
       "      <td>0.982819</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_7</th>\n",
       "      <td>0.959404</td>\n",
       "      <td>0.961639</td>\n",
       "      <td>0.963775</td>\n",
       "      <td>0.972992</td>\n",
       "      <td>0.979278</td>\n",
       "      <td>0.981716</td>\n",
       "      <td>0.983917</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.978351</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_8</th>\n",
       "      <td>0.962629</td>\n",
       "      <td>0.961288</td>\n",
       "      <td>0.956841</td>\n",
       "      <td>0.973543</td>\n",
       "      <td>0.968705</td>\n",
       "      <td>0.982047</td>\n",
       "      <td>0.982819</td>\n",
       "      <td>0.978351</td>\n",
       "      <td>1.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "               oofs_agreed_0  oofs_agreed_1  oofs_agreed_2  oofs_agreed_3  \\\n",
       "oofs_agreed_0       1.000000       0.982310       0.976632       0.959797   \n",
       "oofs_agreed_1       0.982310       1.000000       0.982479       0.958966   \n",
       "oofs_agreed_2       0.976632       0.982479       1.000000       0.955476   \n",
       "oofs_agreed_3       0.959797       0.958966       0.955476       1.000000   \n",
       "oofs_agreed_4       0.956807       0.957473       0.959167       0.982100   \n",
       "oofs_agreed_5       0.961449       0.961494       0.957512       0.977313   \n",
       "oofs_agreed_6       0.961597       0.961780       0.961050       0.970090   \n",
       "oofs_agreed_7       0.959404       0.961639       0.963775       0.972992   \n",
       "oofs_agreed_8       0.962629       0.961288       0.956841       0.973543   \n",
       "\n",
       "               oofs_agreed_4  oofs_agreed_5  oofs_agreed_6  oofs_agreed_7  \\\n",
       "oofs_agreed_0       0.956807       0.961449       0.961597       0.959404   \n",
       "oofs_agreed_1       0.957473       0.961494       0.961780       0.961639   \n",
       "oofs_agreed_2       0.959167       0.957512       0.961050       0.963775   \n",
       "oofs_agreed_3       0.982100       0.977313       0.970090       0.972992   \n",
       "oofs_agreed_4       1.000000       0.971693       0.972289       0.979278   \n",
       "oofs_agreed_5       0.971693       1.000000       0.981701       0.981716   \n",
       "oofs_agreed_6       0.972289       0.981701       1.000000       0.983917   \n",
       "oofs_agreed_7       0.979278       0.981716       0.983917       1.000000   \n",
       "oofs_agreed_8       0.968705       0.982047       0.982819       0.978351   \n",
       "\n",
       "               oofs_agreed_8  \n",
       "oofs_agreed_0       0.962629  \n",
       "oofs_agreed_1       0.961288  \n",
       "oofs_agreed_2       0.956841  \n",
       "oofs_agreed_3       0.973543  \n",
       "oofs_agreed_4       0.968705  \n",
       "oofs_agreed_5       0.982047  \n",
       "oofs_agreed_6       0.982819  \n",
       "oofs_agreed_7       0.978351  \n",
       "oofs_agreed_8       1.000000  "
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "agreeds.corr()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>oofs_disagreeds_0</th>\n",
       "      <th>oofs_disagreeds_1</th>\n",
       "      <th>oofs_disagreeds_2</th>\n",
       "      <th>oofs_disagreeds_3</th>\n",
       "      <th>oofs_disagreeds_4</th>\n",
       "      <th>oofs_disagreeds_5</th>\n",
       "      <th>oofs_disagreeds_6</th>\n",
       "      <th>oofs_disagreeds_7</th>\n",
       "      <th>oofs_disagreeds_8</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_0</th>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.954897</td>\n",
       "      <td>0.951416</td>\n",
       "      <td>0.912412</td>\n",
       "      <td>0.913741</td>\n",
       "      <td>0.912573</td>\n",
       "      <td>0.916230</td>\n",
       "      <td>0.912694</td>\n",
       "      <td>0.908535</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_1</th>\n",
       "      <td>0.954897</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.964977</td>\n",
       "      <td>0.920113</td>\n",
       "      <td>0.918646</td>\n",
       "      <td>0.920680</td>\n",
       "      <td>0.921862</td>\n",
       "      <td>0.920311</td>\n",
       "      <td>0.911443</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_2</th>\n",
       "      <td>0.951416</td>\n",
       "      <td>0.964977</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.919030</td>\n",
       "      <td>0.925045</td>\n",
       "      <td>0.915579</td>\n",
       "      <td>0.923305</td>\n",
       "      <td>0.926555</td>\n",
       "      <td>0.913171</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_3</th>\n",
       "      <td>0.912412</td>\n",
       "      <td>0.920113</td>\n",
       "      <td>0.919030</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.967334</td>\n",
       "      <td>0.956456</td>\n",
       "      <td>0.945988</td>\n",
       "      <td>0.954167</td>\n",
       "      <td>0.943892</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_4</th>\n",
       "      <td>0.913741</td>\n",
       "      <td>0.918646</td>\n",
       "      <td>0.925045</td>\n",
       "      <td>0.967334</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.946770</td>\n",
       "      <td>0.944932</td>\n",
       "      <td>0.961943</td>\n",
       "      <td>0.939010</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_5</th>\n",
       "      <td>0.912573</td>\n",
       "      <td>0.920680</td>\n",
       "      <td>0.915579</td>\n",
       "      <td>0.956456</td>\n",
       "      <td>0.946770</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.965450</td>\n",
       "      <td>0.960278</td>\n",
       "      <td>0.955281</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_6</th>\n",
       "      <td>0.916230</td>\n",
       "      <td>0.921862</td>\n",
       "      <td>0.923305</td>\n",
       "      <td>0.945988</td>\n",
       "      <td>0.944932</td>\n",
       "      <td>0.965450</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.963620</td>\n",
       "      <td>0.958197</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_7</th>\n",
       "      <td>0.912694</td>\n",
       "      <td>0.920311</td>\n",
       "      <td>0.926555</td>\n",
       "      <td>0.954167</td>\n",
       "      <td>0.961943</td>\n",
       "      <td>0.960278</td>\n",
       "      <td>0.963620</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.954127</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_8</th>\n",
       "      <td>0.908535</td>\n",
       "      <td>0.911443</td>\n",
       "      <td>0.913171</td>\n",
       "      <td>0.943892</td>\n",
       "      <td>0.939010</td>\n",
       "      <td>0.955281</td>\n",
       "      <td>0.958197</td>\n",
       "      <td>0.954127</td>\n",
       "      <td>1.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                   oofs_disagreeds_0  oofs_disagreeds_1  oofs_disagreeds_2  \\\n",
       "oofs_disagreeds_0           1.000000           0.954897           0.951416   \n",
       "oofs_disagreeds_1           0.954897           1.000000           0.964977   \n",
       "oofs_disagreeds_2           0.951416           0.964977           1.000000   \n",
       "oofs_disagreeds_3           0.912412           0.920113           0.919030   \n",
       "oofs_disagreeds_4           0.913741           0.918646           0.925045   \n",
       "oofs_disagreeds_5           0.912573           0.920680           0.915579   \n",
       "oofs_disagreeds_6           0.916230           0.921862           0.923305   \n",
       "oofs_disagreeds_7           0.912694           0.920311           0.926555   \n",
       "oofs_disagreeds_8           0.908535           0.911443           0.913171   \n",
       "\n",
       "                   oofs_disagreeds_3  oofs_disagreeds_4  oofs_disagreeds_5  \\\n",
       "oofs_disagreeds_0           0.912412           0.913741           0.912573   \n",
       "oofs_disagreeds_1           0.920113           0.918646           0.920680   \n",
       "oofs_disagreeds_2           0.919030           0.925045           0.915579   \n",
       "oofs_disagreeds_3           1.000000           0.967334           0.956456   \n",
       "oofs_disagreeds_4           0.967334           1.000000           0.946770   \n",
       "oofs_disagreeds_5           0.956456           0.946770           1.000000   \n",
       "oofs_disagreeds_6           0.945988           0.944932           0.965450   \n",
       "oofs_disagreeds_7           0.954167           0.961943           0.960278   \n",
       "oofs_disagreeds_8           0.943892           0.939010           0.955281   \n",
       "\n",
       "                   oofs_disagreeds_6  oofs_disagreeds_7  oofs_disagreeds_8  \n",
       "oofs_disagreeds_0           0.916230           0.912694           0.908535  \n",
       "oofs_disagreeds_1           0.921862           0.920311           0.911443  \n",
       "oofs_disagreeds_2           0.923305           0.926555           0.913171  \n",
       "oofs_disagreeds_3           0.945988           0.954167           0.943892  \n",
       "oofs_disagreeds_4           0.944932           0.961943           0.939010  \n",
       "oofs_disagreeds_5           0.965450           0.960278           0.955281  \n",
       "oofs_disagreeds_6           1.000000           0.963620           0.958197  \n",
       "oofs_disagreeds_7           0.963620           1.000000           0.954127  \n",
       "oofs_disagreeds_8           0.958197           0.954127           1.000000  "
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "disagreeds.corr()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>oofs_unrelated_0</th>\n",
       "      <th>oofs_unrelated_1</th>\n",
       "      <th>oofs_unrelated_2</th>\n",
       "      <th>oofs_unrelated_3</th>\n",
       "      <th>oofs_unrelated_4</th>\n",
       "      <th>oofs_unrelated_5</th>\n",
       "      <th>oofs_unrelated_6</th>\n",
       "      <th>oofs_unrelated_7</th>\n",
       "      <th>oofs_unrelated_8</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_0</th>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.978855</td>\n",
       "      <td>0.972572</td>\n",
       "      <td>0.952930</td>\n",
       "      <td>0.949506</td>\n",
       "      <td>0.954771</td>\n",
       "      <td>0.955016</td>\n",
       "      <td>0.952586</td>\n",
       "      <td>0.955786</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_1</th>\n",
       "      <td>0.978855</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.979417</td>\n",
       "      <td>0.952496</td>\n",
       "      <td>0.950368</td>\n",
       "      <td>0.955304</td>\n",
       "      <td>0.955510</td>\n",
       "      <td>0.955456</td>\n",
       "      <td>0.954800</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_2</th>\n",
       "      <td>0.972572</td>\n",
       "      <td>0.979417</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.948158</td>\n",
       "      <td>0.952290</td>\n",
       "      <td>0.950251</td>\n",
       "      <td>0.954365</td>\n",
       "      <td>0.957542</td>\n",
       "      <td>0.949502</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_3</th>\n",
       "      <td>0.952930</td>\n",
       "      <td>0.952496</td>\n",
       "      <td>0.948158</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.979002</td>\n",
       "      <td>0.973876</td>\n",
       "      <td>0.965568</td>\n",
       "      <td>0.969264</td>\n",
       "      <td>0.969451</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_4</th>\n",
       "      <td>0.949506</td>\n",
       "      <td>0.950368</td>\n",
       "      <td>0.952290</td>\n",
       "      <td>0.979002</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.967039</td>\n",
       "      <td>0.967506</td>\n",
       "      <td>0.975886</td>\n",
       "      <td>0.963249</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_5</th>\n",
       "      <td>0.954771</td>\n",
       "      <td>0.955304</td>\n",
       "      <td>0.950251</td>\n",
       "      <td>0.973876</td>\n",
       "      <td>0.967039</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.978773</td>\n",
       "      <td>0.978637</td>\n",
       "      <td>0.978786</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_6</th>\n",
       "      <td>0.955016</td>\n",
       "      <td>0.955510</td>\n",
       "      <td>0.954365</td>\n",
       "      <td>0.965568</td>\n",
       "      <td>0.967506</td>\n",
       "      <td>0.978773</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.981089</td>\n",
       "      <td>0.979624</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_7</th>\n",
       "      <td>0.952586</td>\n",
       "      <td>0.955456</td>\n",
       "      <td>0.957542</td>\n",
       "      <td>0.969264</td>\n",
       "      <td>0.975886</td>\n",
       "      <td>0.978637</td>\n",
       "      <td>0.981089</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.974969</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_8</th>\n",
       "      <td>0.955786</td>\n",
       "      <td>0.954800</td>\n",
       "      <td>0.949502</td>\n",
       "      <td>0.969451</td>\n",
       "      <td>0.963249</td>\n",
       "      <td>0.978786</td>\n",
       "      <td>0.979624</td>\n",
       "      <td>0.974969</td>\n",
       "      <td>1.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                  oofs_unrelated_0  oofs_unrelated_1  oofs_unrelated_2  \\\n",
       "oofs_unrelated_0          1.000000          0.978855          0.972572   \n",
       "oofs_unrelated_1          0.978855          1.000000          0.979417   \n",
       "oofs_unrelated_2          0.972572          0.979417          1.000000   \n",
       "oofs_unrelated_3          0.952930          0.952496          0.948158   \n",
       "oofs_unrelated_4          0.949506          0.950368          0.952290   \n",
       "oofs_unrelated_5          0.954771          0.955304          0.950251   \n",
       "oofs_unrelated_6          0.955016          0.955510          0.954365   \n",
       "oofs_unrelated_7          0.952586          0.955456          0.957542   \n",
       "oofs_unrelated_8          0.955786          0.954800          0.949502   \n",
       "\n",
       "                  oofs_unrelated_3  oofs_unrelated_4  oofs_unrelated_5  \\\n",
       "oofs_unrelated_0          0.952930          0.949506          0.954771   \n",
       "oofs_unrelated_1          0.952496          0.950368          0.955304   \n",
       "oofs_unrelated_2          0.948158          0.952290          0.950251   \n",
       "oofs_unrelated_3          1.000000          0.979002          0.973876   \n",
       "oofs_unrelated_4          0.979002          1.000000          0.967039   \n",
       "oofs_unrelated_5          0.973876          0.967039          1.000000   \n",
       "oofs_unrelated_6          0.965568          0.967506          0.978773   \n",
       "oofs_unrelated_7          0.969264          0.975886          0.978637   \n",
       "oofs_unrelated_8          0.969451          0.963249          0.978786   \n",
       "\n",
       "                  oofs_unrelated_6  oofs_unrelated_7  oofs_unrelated_8  \n",
       "oofs_unrelated_0          0.955016          0.952586          0.955786  \n",
       "oofs_unrelated_1          0.955510          0.955456          0.954800  \n",
       "oofs_unrelated_2          0.954365          0.957542          0.949502  \n",
       "oofs_unrelated_3          0.965568          0.969264          0.969451  \n",
       "oofs_unrelated_4          0.967506          0.975886          0.963249  \n",
       "oofs_unrelated_5          0.978773          0.978637          0.978786  \n",
       "oofs_unrelated_6          1.000000          0.981089          0.979624  \n",
       "oofs_unrelated_7          0.981089          1.000000          0.974969  \n",
       "oofs_unrelated_8          0.979624          0.974969          1.000000  "
      ]
     },
     "execution_count": 12,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "unrelated.corr()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Prepare Different Inputs"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# Only use oofs\n",
    "ensemble_trains = trains.values\n",
    "ensemble_tests = tests.values"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# use oofs and meta-features\n",
    "#ensemble_trains = np.concatenate((trains.values, trick_trains_features), axis=1)\n",
    "#ensemble_tests = np.concatenate((tests.values, trick_tests_features), axis=1)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Ensemble With NN"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "########################################\n",
    "## import packages\n",
    "########################################\n",
    "import os\n",
    "import re\n",
    "import csv\n",
    "import codecs\n",
    "import numpy as np\n",
    "np.random.seed(1337)\n",
    "\n",
    "import pandas as pd\n",
    "import operator\n",
    "import sys\n",
    "\n",
    "from string import punctuation\n",
    "from keras.preprocessing.text import Tokenizer\n",
    "from keras.preprocessing.sequence import pad_sequences\n",
    "\n",
    "from iwillwin.trainer.supervised_trainer import KerasModelTrainer\n",
    "from iwillwin.data_utils.data_helpers import DataTransformer, DataLoader\n",
    "from iwillwin.model.sim_zoos import *\n",
    "import tensorflow as tf\n",
    "from keras.layers import Dense, Input, MaxPooling1D, CuDNNLSTM, Embedding, Add, Lambda, Dropout, Activation, SpatialDropout1D, Reshape, GlobalAveragePooling1D, merge, Flatten, Bidirectional, CuDNNGRU, add, Conv1D, GlobalMaxPooling1D\n",
    "from keras.layers.merge import concatenate\n",
    "from keras.models import Model\n",
    "from keras import optimizers\n",
    "from keras import initializers\n",
    "from keras.engine import InputSpec, Layer\n",
    "from iwillwin.config import dataset_config, model_config\n",
    "from keras.models import Sequential\n",
    "from keras.layers.embeddings import Embedding\n",
    "from keras.layers.core import Lambda, Dense, Dropout\n",
    "from keras.layers.recurrent import LSTM, GRU\n",
    "from keras.layers.wrappers import Bidirectional\n",
    "from keras.legacy.layers import Highway\n",
    "from keras.layers import TimeDistributed\n",
    "from keras.layers.normalization import BatchNormalization\n",
    "import keras.backend as K\n",
    "\n",
    "from sklearn.metrics import roc_auc_score, log_loss\n",
    "from keras.callbacks import EarlyStopping, ModelCheckpoint\n",
    "from sklearn.metrics import log_loss"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from keras import regularizers\n",
    "\n",
    "def weighted_accuracy(y_true, y_pred):\n",
    "    weight = np.array([[1/16, 1/15, 1/5]])\n",
    "    norm = [(1/16) + (1/15) + (1/5)]\n",
    "    weight_mask = weight * y_true\n",
    "    \n",
    "    y_pred = K.cast(y_pred > 0.5, 'int32') # hard \n",
    "    y_true = K.cast(y_true, 'int32')\n",
    "    \n",
    "    res = K.cast(K.equal(y_pred, y_true), 'float32') * weight_mask / K.sum(weight_mask)\n",
    "    res = K.sum(res)\n",
    "    return res\n",
    "\n",
    "def get_dense_add_net(feature_nums):\n",
    "    features_inputs = Input(shape=(feature_nums,), name='mata-features', dtype=\"float32\")\n",
    "    features = features_inputs\n",
    "    \n",
    "    depth = 5\n",
    "    for i in range(depth):\n",
    "        new_features = Dense(24, activation='relu')(features)\n",
    "        new_features = Dropout(0.1)(new_features)\n",
    "        features = Concatenate()([features, new_features])\n",
    "\n",
    "    h = Highway(activation='relu')(features)\n",
    "    out_ = Dense(3, activation='softmax')(h)\n",
    "    \n",
    "    model = Model(inputs=[features_inputs], outputs=out_)\n",
    "    model.compile(optimizer=Adam(lr=1e-3, decay=1e-6,), loss='categorical_crossentropy',\n",
    "    metrics=['accuracy', weighted_accuracy])\n",
    "    model.summary()\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def get_logit_net(feature_nums):\n",
    "    features_inputs = Input(shape=(feature_nums,), name='mata-features', dtype=\"float32\")    \n",
    "    out_ = Dense(3, activation='softmax')(features_inputs)\n",
    "    model = Model(inputs=[features_inputs], outputs=out_)\n",
    "    model.compile(optimizer=Adam(lr=1e-3, decay=1e-6,), loss='categorical_crossentropy',\n",
    "    metrics=['accuracy', weighted_accuracy])\n",
    "    model.summary()\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import pandas as pd\n",
    "import torch\n",
    "import torch.nn.functional as F\n",
    "import importlib\n",
    "\n",
    "from sklearn.metrics import roc_auc_score, log_loss\n",
    "from keras.callbacks import EarlyStopping, ModelCheckpoint\n",
    "\n",
    "from iwillwin.config import model_config\n",
    "\n",
    "class ModelTrainer(object):\n",
    "\n",
    "    def __init__(self, model_stamp, epoch_num, learning_rate=1e-3,\n",
    "                 shuffle_inputs=False, verbose_round=40, early_stopping_round=8):\n",
    "        self.models = []\n",
    "        self.model_stamp = model_stamp\n",
    "        self.val_loss = -1\n",
    "        self.auc = -1\n",
    "        self.epoch_num = epoch_num\n",
    "        self.learning_rate = learning_rate\n",
    "        self.eps = 1e-10\n",
    "        self.verbose_round = verbose_round\n",
    "        self.early_stopping_round = early_stopping_round\n",
    "        self.shuffle_inputs = shuffle_inputs\n",
    "        self.class_weight = [0.93, 1.21]\n",
    "\n",
    "    def train_folds(self, features, y, fold_count, batch_size, get_model_func, augments=None, skip_fold=0, patience=10, scale_sample_weight=False,\n",
    "                    class_weight=None, self_aware=False, swap_input=False):\n",
    "        weight_val=scale_sample_weight\n",
    "        class_weight=None\n",
    "        fold_size = len(features) // fold_count\n",
    "        models = []\n",
    "        fold_predictions = []\n",
    "        score = 0\n",
    "\n",
    "        for fold_id in range(0, fold_count):\n",
    "            fold_start = fold_size * fold_id\n",
    "            fold_end = fold_start + fold_size\n",
    "\n",
    "            if fold_id == fold_count - 1:\n",
    "                fold_end = len(features)\n",
    "\n",
    "            train_features = np.concatenate([features[:fold_start], features[fold_end:]])\n",
    "            train_y = np.concatenate([y[:fold_start], y[fold_end:]])\n",
    "            \n",
    "            val_features = features[fold_start:fold_end]\n",
    "            val_y = y[fold_start:fold_end]\n",
    "            fold_pos = (np.sum(train_y) / len(train_features))\n",
    "\n",
    "            train_data = {\n",
    "                \"mata-features\": train_features,\n",
    "            }\n",
    "\n",
    "            val_data = {\n",
    "                \"mata-features\": val_features,\n",
    "            }\n",
    "\n",
    "            model, bst_val_score, fold_prediction = self._train_model_by_logloss(\n",
    "                get_model_func(), batch_size, train_data, train_y, val_data, val_y, fold_id, patience, class_weight, weight_val=None)\n",
    "    \n",
    "            score += bst_val_score\n",
    "            models.append(model)\n",
    "            fold_predictions.append(fold_prediction)\n",
    "\n",
    "        self.models = models\n",
    "        self.val_loss = score / fold_count\n",
    "        return models, self.val_loss, fold_predictions\n",
    "\n",
    "    def _train_model_by_logloss(self, model, batch_size, train_x, train_y, val_x, val_y, fold_id, patience):\n",
    "        # return a list which holds [models, val_loss, auc, prediction]\n",
    "        raise NotImplementedError\n",
    "\n",
    "class KerasModelTrainer(ModelTrainer):\n",
    "\n",
    "    def __init__(self, *args, **kwargs):\n",
    "        super(KerasModelTrainer, self).__init__(*args, **kwargs)\n",
    "        pass\n",
    "\n",
    "    def _train_model_by_logloss(self, model, batch_size, train_x, train_y, val_x, val_y, fold_id, patience, class_weight, weight_val):\n",
    "        early_stopping = EarlyStopping(monitor='val_loss', patience=10)\n",
    "        bst_model_path = self.model_stamp + str(fold_id) + '.h5'\n",
    "        val_data = (val_x, val_y, weight_val) if weight_val is not None else (val_x, val_y)\n",
    "        model_checkpoint = ModelCheckpoint(bst_model_path, save_best_only=True, save_weights_only=True)\n",
    "        hist = model.fit(train_x, train_y,\n",
    "                         validation_data=val_data,\n",
    "                         epochs=self.epoch_num, batch_size=batch_size, shuffle=True,\n",
    "                         callbacks=[early_stopping, model_checkpoint],)\n",
    "        bst_val_score = max(hist.history['val_weighted_accuracy'])\n",
    "        model.load_weights(bst_model_path)\n",
    "        predictions = model.predict(val_x)\n",
    "\n",
    "        return model, bst_val_score, predictions"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def _agent_get_model():\n",
    "    return get_dense_add_net(ensemble_trains.shape[1])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def np_weighted_accuracy(y_true, y_pred):\n",
    "    weight = np.array([[1/16, 1/15, 1/5]])\n",
    "    norm = [(1/16) + (1/15) + (1/5)]\n",
    "    weight_mask = weight * y_true\n",
    "    weight_mask = np.max(weight_mask, axis=-1)\n",
    "    norms = np.sum(weight_mask)\n",
    "    \n",
    "    y_true = np.argmax(y_true, axis=-1)\n",
    "    y_pred = np.argmax(y_pred, axis=-1)\n",
    "    \n",
    "    res = ((y_true == y_pred) * weight_mask).sum() / norms\n",
    "    return res"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def weighted_accuracy(y_true, y_pred):\n",
    "    weight = np.array([[1/16, 1/15, 1/5]])\n",
    "    norm = [(1/16) + (1/15) + (1/5)]\n",
    "    weight_mask = weight * y_true\n",
    "    \n",
    "    y_pred = K.cast(y_pred > 0.5, 'int32')\n",
    "    y_true = K.cast(y_true, 'int32')\n",
    "    \n",
    "    res = K.cast(K.equal(y_pred, y_true), 'float32') * weight_mask / K.sum(weight_mask)\n",
    "    res = K.sum(res)\n",
    "    return res"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\zake7\\Anaconda3\\lib\\site-packages\\keras\\legacy\\layers.py:198: UserWarning: The `Highway` layer is deprecated and will be removed after 06/2017.\n",
      "  warnings.warn('The `Highway` layer is deprecated '\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 27)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_1 (Dense)                 (None, 24)           672         mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_1 (Dropout)             (None, 24)           0           dense_1[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_1 (Concatenate)     (None, 51)           0           mata-features[0][0]              \n",
      "                                                                 dropout_1[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_2 (Dense)                 (None, 24)           1248        concatenate_1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_2 (Dropout)             (None, 24)           0           dense_2[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_2 (Concatenate)     (None, 75)           0           concatenate_1[0][0]              \n",
      "                                                                 dropout_2[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_3 (Dense)                 (None, 24)           1824        concatenate_2[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_3 (Dropout)             (None, 24)           0           dense_3[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_3 (Concatenate)     (None, 99)           0           concatenate_2[0][0]              \n",
      "                                                                 dropout_3[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_4 (Dense)                 (None, 24)           2400        concatenate_3[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_4 (Dropout)             (None, 24)           0           dense_4[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_4 (Concatenate)     (None, 123)          0           concatenate_3[0][0]              \n",
      "                                                                 dropout_4[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_5 (Dense)                 (None, 24)           2976        concatenate_4[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_5 (Dropout)             (None, 24)           0           dense_5[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_5 (Concatenate)     (None, 147)          0           concatenate_4[0][0]              \n",
      "                                                                 dropout_5[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "highway_1 (Highway)             (None, 147)          43512       concatenate_5[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dense_6 (Dense)                 (None, 3)            444         highway_1[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 53,076\n",
      "Trainable params: 53,076\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      "288497/288497 [==============================] - ETA: 9:12 - loss: 0.9776 - acc: 0.5059 - weighted_accuracy: 0.050 - ETA: 41s - loss: 0.5093 - acc: 0.8182 - weighted_accuracy: 0.669 - ETA: 21s - loss: 0.4139 - acc: 0.8522 - weighted_accuracy: 0.75 - ETA: 14s - loss: 0.3770 - acc: 0.8616 - weighted_accuracy: 0.78 - ETA: 10s - loss: 0.3581 - acc: 0.8646 - weighted_accuracy: 0.80 - ETA: 8s - loss: 0.3419 - acc: 0.8688 - weighted_accuracy: 0.8151 - ETA: 6s - loss: 0.3321 - acc: 0.8709 - weighted_accuracy: 0.821 - ETA: 5s - loss: 0.3230 - acc: 0.8729 - weighted_accuracy: 0.827 - ETA: 4s - loss: 0.3165 - acc: 0.8744 - weighted_accuracy: 0.831 - ETA: 3s - loss: 0.3108 - acc: 0.8758 - weighted_accuracy: 0.835 - ETA: 3s - loss: 0.3062 - acc: 0.8769 - weighted_accuracy: 0.837 - ETA: 2s - loss: 0.3034 - acc: 0.8770 - weighted_accuracy: 0.839 - ETA: 2s - loss: 0.3012 - acc: 0.8772 - weighted_accuracy: 0.840 - ETA: 2s - loss: 0.2984 - acc: 0.8778 - weighted_accuracy: 0.842 - ETA: 1s - loss: 0.2961 - acc: 0.8784 - weighted_accuracy: 0.844 - ETA: 1s - loss: 0.2945 - acc: 0.8784 - weighted_accuracy: 0.845 - ETA: 1s - loss: 0.2928 - acc: 0.8787 - weighted_accuracy: 0.845 - ETA: 1s - loss: 0.2916 - acc: 0.8790 - weighted_accuracy: 0.846 - ETA: 0s - loss: 0.2905 - acc: 0.8791 - weighted_accuracy: 0.847 - ETA: 0s - loss: 0.2891 - acc: 0.8794 - weighted_accuracy: 0.847 - ETA: 0s - loss: 0.2881 - acc: 0.8795 - weighted_accuracy: 0.848 - ETA: 0s - loss: 0.2869 - acc: 0.8797 - weighted_accuracy: 0.849 - ETA: 0s - loss: 0.2857 - acc: 0.8800 - weighted_accuracy: 0.849 - ETA: 0s - loss: 0.2849 - acc: 0.8799 - weighted_accuracy: 0.850 - 3s 11us/step - loss: 0.2846 - acc: 0.8799 - weighted_accuracy: 0.8502 - val_loss: 0.2675 - val_acc: 0.8826 - val_weighted_accuracy: 0.8579\n",
      "Epoch 2/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2688 - acc: 0.8818 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2657 - acc: 0.8855 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2684 - acc: 0.8830 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2685 - acc: 0.8830 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2703 - acc: 0.8825 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2694 - acc: 0.8826 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2687 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2670 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2653 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2661 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2657 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2657 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2656 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2658 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2661 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2661 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2659 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2655 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2650 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2647 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2646 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2647 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2648 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2650 - acc: 0.8838 - weighted_accuracy: 0.859 - 1s 4us/step - loss: 0.2650 - acc: 0.8839 - weighted_accuracy: 0.8600 - val_loss: 0.2659 - val_acc: 0.8842 - val_weighted_accuracy: 0.8587\n",
      "Epoch 3/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2817 - acc: 0.8760 - weighted_accuracy: 0.848 - ETA: 1s - loss: 0.2659 - acc: 0.8852 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2588 - acc: 0.8857 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2608 - acc: 0.8863 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2600 - acc: 0.8863 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2604 - acc: 0.8854 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8851 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8840 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2632 - acc: 0.8837 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2635 - acc: 0.8838 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2633 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8838 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2634 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8837 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2634 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2643 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2641 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2637 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8839 - weighted_accuracy: 0.859 - 1s 4us/step - loss: 0.2637 - acc: 0.8839 - weighted_accuracy: 0.8599 - val_loss: 0.2661 - val_acc: 0.8836 - val_weighted_accuracy: 0.8600\n",
      "Epoch 4/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2974 - acc: 0.8604 - weighted_accuracy: 0.829 - ETA: 1s - loss: 0.2662 - acc: 0.8816 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2645 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2655 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2644 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2654 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2649 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2652 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2651 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2650 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2645 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2646 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2649 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2646 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2648 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2648 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2647 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2645 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2639 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2632 - acc: 0.8844 - weighted_accuracy: 0.860 - 1s 4us/step - loss: 0.2632 - acc: 0.8843 - weighted_accuracy: 0.8606 - val_loss: 0.2670 - val_acc: 0.8825 - val_weighted_accuracy: 0.8586\n",
      "Epoch 5/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2640 - acc: 0.8779 - weighted_accuracy: 0.840 - ETA: 1s - loss: 0.2577 - acc: 0.8873 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2583 - acc: 0.8869 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2595 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2628 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2632 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8841 - weighted_accuracy: 0.860 - 1s 4us/step - loss: 0.2626 - acc: 0.8842 - weighted_accuracy: 0.8606 - val_loss: 0.2656 - val_acc: 0.8835 - val_weighted_accuracy: 0.8604\n",
      "Epoch 6/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2955 - acc: 0.8711 - weighted_accuracy: 0.845 - ETA: 1s - loss: 0.2708 - acc: 0.8813 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2648 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2637 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2621 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2632 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8842 - weighted_accuracy: 0.860 - 1s 4us/step - loss: 0.2623 - acc: 0.8841 - weighted_accuracy: 0.8603 - val_loss: 0.2658 - val_acc: 0.8839 - val_weighted_accuracy: 0.8621\n",
      "Epoch 7/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2411 - acc: 0.8906 - weighted_accuracy: 0.870 - ETA: 1s - loss: 0.2657 - acc: 0.8824 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2623 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2606 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2640 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2637 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2625 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8845 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2620 - acc: 0.8846 - weighted_accuracy: 0.8610 - val_loss: 0.2654 - val_acc: 0.8833 - val_weighted_accuracy: 0.8591\n",
      "Epoch 8/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2397 - acc: 0.8896 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2566 - acc: 0.8890 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2590 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2580 - acc: 0.8863 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2589 - acc: 0.8863 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2592 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2609 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2619 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8848 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2619 - acc: 0.8847 - weighted_accuracy: 0.8608 - val_loss: 0.2658 - val_acc: 0.8831 - val_weighted_accuracy: 0.8610\n",
      "Epoch 9/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2658 - acc: 0.8721 - weighted_accuracy: 0.847 - ETA: 1s - loss: 0.2556 - acc: 0.8870 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2559 - acc: 0.8866 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2570 - acc: 0.8873 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2578 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2600 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2613 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2614 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8845 - weighted_accuracy: 0.860 - 1s 4us/step - loss: 0.2618 - acc: 0.8845 - weighted_accuracy: 0.8608 - val_loss: 0.2653 - val_acc: 0.8838 - val_weighted_accuracy: 0.8609\n",
      "Epoch 10/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2717 - acc: 0.8838 - weighted_accuracy: 0.849 - ETA: 1s - loss: 0.2629 - acc: 0.8868 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2632 - acc: 0.8863 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2613 - acc: 0.8868 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2604 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2597 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2614 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8847 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2616 - acc: 0.8847 - weighted_accuracy: 0.8610 - val_loss: 0.2645 - val_acc: 0.8838 - val_weighted_accuracy: 0.8595\n",
      "Epoch 11/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2470 - acc: 0.8877 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2674 - acc: 0.8813 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2639 - acc: 0.8820 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2621 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2603 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2592 - acc: 0.8858 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2580 - acc: 0.8865 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2583 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2581 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2583 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2580 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2588 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2595 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8846 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2612 - acc: 0.8846 - weighted_accuracy: 0.8611 - val_loss: 0.2655 - val_acc: 0.8830 - val_weighted_accuracy: 0.8578\n",
      "Epoch 12/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2603 - acc: 0.8809 - weighted_accuracy: 0.846 - ETA: 1s - loss: 0.2584 - acc: 0.8892 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2633 - acc: 0.8856 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2635 - acc: 0.8842 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2637 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8845 - weighted_accuracy: 0.860 - 1s 4us/step - loss: 0.2615 - acc: 0.8845 - weighted_accuracy: 0.8607 - val_loss: 0.2652 - val_acc: 0.8838 - val_weighted_accuracy: 0.8605\n",
      "Epoch 13/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2917 - acc: 0.8711 - weighted_accuracy: 0.849 - ETA: 1s - loss: 0.2630 - acc: 0.8811 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2630 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2623 - acc: 0.8840 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2630 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8834 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8833 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2621 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8849 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2609 - acc: 0.8849 - weighted_accuracy: 0.8615 - val_loss: 0.2649 - val_acc: 0.8834 - val_weighted_accuracy: 0.8605\n",
      "Epoch 14/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2882 - acc: 0.8623 - weighted_accuracy: 0.830 - ETA: 1s - loss: 0.2562 - acc: 0.8866 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2563 - acc: 0.8874 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2580 - acc: 0.8866 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2572 - acc: 0.8873 - weighted_accuracy: 0.866 - ETA: 0s - loss: 0.2591 - acc: 0.8861 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2574 - acc: 0.8870 - weighted_accuracy: 0.865 - ETA: 0s - loss: 0.2587 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2591 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2594 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2602 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8849 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2610 - acc: 0.8849 - weighted_accuracy: 0.8613 - val_loss: 0.2658 - val_acc: 0.8832 - val_weighted_accuracy: 0.8599\n",
      "Epoch 15/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2611 - acc: 0.8867 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2603 - acc: 0.8848 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2594 - acc: 0.8848 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2606 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2617 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2603 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.8611 - val_loss: 0.2655 - val_acc: 0.8836 - val_weighted_accuracy: 0.8626\n",
      "Epoch 16/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2580 - acc: 0.8799 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2586 - acc: 0.8836 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2616 - acc: 0.8845 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2601 - acc: 0.8845 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2567 - acc: 0.8857 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2571 - acc: 0.8858 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2591 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8852 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2586 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2588 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2588 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2594 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8847 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2608 - acc: 0.8847 - weighted_accuracy: 0.8615 - val_loss: 0.2662 - val_acc: 0.8830 - val_weighted_accuracy: 0.8603\n",
      "Epoch 17/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2462 - acc: 0.8994 - weighted_accuracy: 0.889 - ETA: 1s - loss: 0.2507 - acc: 0.8869 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2555 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2564 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2562 - acc: 0.8868 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2576 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2577 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2582 - acc: 0.8863 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2587 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2589 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2590 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2589 - acc: 0.8863 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2598 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.8616 - val_loss: 0.2650 - val_acc: 0.8835 - val_weighted_accuracy: 0.8606\n",
      "Epoch 18/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2528 - acc: 0.8916 - weighted_accuracy: 0.876 - ETA: 1s - loss: 0.2599 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2579 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2584 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2588 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8851 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.8616 - val_loss: 0.2651 - val_acc: 0.8829 - val_weighted_accuracy: 0.8600\n",
      "Epoch 19/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2736 - acc: 0.8799 - weighted_accuracy: 0.850 - ETA: 1s - loss: 0.2608 - acc: 0.8828 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2617 - acc: 0.8825 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2620 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2612 - acc: 0.8832 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2625 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2604 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2598 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8849 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.8618 - val_loss: 0.2658 - val_acc: 0.8827 - val_weighted_accuracy: 0.8600\n",
      "Epoch 20/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2896 - acc: 0.8691 - weighted_accuracy: 0.846 - ETA: 1s - loss: 0.2585 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2595 - acc: 0.8868 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2611 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2635 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8838 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2637 - acc: 0.8836 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2630 - acc: 0.8836 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2630 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8839 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2621 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8853 - weighted_accuracy: 0.862 - 1s 4us/step - loss: 0.2603 - acc: 0.8852 - weighted_accuracy: 0.8620 - val_loss: 0.2653 - val_acc: 0.8831 - val_weighted_accuracy: 0.8598\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 27)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_7 (Dense)                 (None, 24)           672         mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_6 (Dropout)             (None, 24)           0           dense_7[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_6 (Concatenate)     (None, 51)           0           mata-features[0][0]              \n",
      "                                                                 dropout_6[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_8 (Dense)                 (None, 24)           1248        concatenate_6[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_7 (Dropout)             (None, 24)           0           dense_8[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_7 (Concatenate)     (None, 75)           0           concatenate_6[0][0]              \n",
      "                                                                 dropout_7[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_9 (Dense)                 (None, 24)           1824        concatenate_7[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_8 (Dropout)             (None, 24)           0           dense_9[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_8 (Concatenate)     (None, 99)           0           concatenate_7[0][0]              \n",
      "                                                                 dropout_8[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_10 (Dense)                (None, 24)           2400        concatenate_8[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_9 (Dropout)             (None, 24)           0           dense_10[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_9 (Concatenate)     (None, 123)          0           concatenate_8[0][0]              \n",
      "                                                                 dropout_9[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_11 (Dense)                (None, 24)           2976        concatenate_9[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_10 (Dropout)            (None, 24)           0           dense_11[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_10 (Concatenate)    (None, 147)          0           concatenate_9[0][0]              \n",
      "                                                                 dropout_10[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_2 (Highway)             (None, 147)          43512       concatenate_10[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_12 (Dense)                (None, 3)            444         highway_2[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 53,076\n",
      "Trainable params: 53,076\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 1:50 - loss: 0.9500 - acc: 0.6924 - weighted_accuracy: 0.001 - ETA: 9s - loss: 0.5377 - acc: 0.8421 - weighted_accuracy: 0.6449  - ETA: 5s - loss: 0.4309 - acc: 0.8580 - weighted_accuracy: 0.736 - ETA: 3s - loss: 0.3872 - acc: 0.8650 - weighted_accuracy: 0.775 - ETA: 2s - loss: 0.3636 - acc: 0.8687 - weighted_accuracy: 0.795 - ETA: 2s - loss: 0.3469 - acc: 0.8710 - weighted_accuracy: 0.807 - ETA: 2s - loss: 0.3360 - acc: 0.8724 - weighted_accuracy: 0.815 - ETA: 1s - loss: 0.3264 - acc: 0.8739 - weighted_accuracy: 0.822 - ETA: 1s - loss: 0.3204 - acc: 0.8747 - weighted_accuracy: 0.826 - ETA: 1s - loss: 0.3144 - acc: 0.8760 - weighted_accuracy: 0.830 - ETA: 1s - loss: 0.3093 - acc: 0.8769 - weighted_accuracy: 0.833 - ETA: 1s - loss: 0.3060 - acc: 0.8773 - weighted_accuracy: 0.835 - ETA: 0s - loss: 0.3033 - acc: 0.8776 - weighted_accuracy: 0.837 - ETA: 0s - loss: 0.3003 - acc: 0.8783 - weighted_accuracy: 0.839 - ETA: 0s - loss: 0.2982 - acc: 0.8782 - weighted_accuracy: 0.840 - ETA: 0s - loss: 0.2964 - acc: 0.8784 - weighted_accuracy: 0.841 - ETA: 0s - loss: 0.2950 - acc: 0.8788 - weighted_accuracy: 0.842 - ETA: 0s - loss: 0.2937 - acc: 0.8789 - weighted_accuracy: 0.843 - ETA: 0s - loss: 0.2926 - acc: 0.8791 - weighted_accuracy: 0.844 - ETA: 0s - loss: 0.2916 - acc: 0.8791 - weighted_accuracy: 0.844 - ETA: 0s - loss: 0.2902 - acc: 0.8793 - weighted_accuracy: 0.845 - ETA: 0s - loss: 0.2888 - acc: 0.8796 - weighted_accuracy: 0.846 - ETA: 0s - loss: 0.2874 - acc: 0.8800 - weighted_accuracy: 0.847 - ETA: 0s - loss: 0.2866 - acc: 0.8800 - weighted_accuracy: 0.847 - 2s 6us/step - loss: 0.2863 - acc: 0.8801 - weighted_accuracy: 0.8479 - val_loss: 0.2398 - val_acc: 0.8941 - val_weighted_accuracy: 0.8760\n",
      "Epoch 2/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2557 - acc: 0.8848 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2650 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2679 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2679 - acc: 0.8832 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2685 - acc: 0.8837 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2684 - acc: 0.8832 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2687 - acc: 0.8823 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2692 - acc: 0.8820 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2688 - acc: 0.8820 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2683 - acc: 0.8820 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2688 - acc: 0.8814 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2686 - acc: 0.8816 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2684 - acc: 0.8817 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2679 - acc: 0.8820 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2684 - acc: 0.8818 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2680 - acc: 0.8817 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2678 - acc: 0.8820 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2677 - acc: 0.8821 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2677 - acc: 0.8823 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2673 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2676 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2678 - acc: 0.8823 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2677 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2671 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2675 - acc: 0.8826 - weighted_accuracy: 0.858 - 1s 5us/step - loss: 0.2675 - acc: 0.8825 - weighted_accuracy: 0.8581 - val_loss: 0.2384 - val_acc: 0.8948 - val_weighted_accuracy: 0.8778\n",
      "Epoch 3/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2729 - acc: 0.8779 - weighted_accuracy: 0.850 - ETA: 1s - loss: 0.2728 - acc: 0.8789 - weighted_accuracy: 0.854 - ETA: 1s - loss: 0.2686 - acc: 0.8805 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2696 - acc: 0.8815 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2682 - acc: 0.8818 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2668 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2662 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2654 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2661 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2661 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2668 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2665 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2673 - acc: 0.8823 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2676 - acc: 0.8822 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2678 - acc: 0.8820 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2672 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2664 - acc: 0.8828 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2669 - acc: 0.8825 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2667 - acc: 0.8827 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2666 - acc: 0.8825 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2668 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2665 - acc: 0.8825 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2665 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2667 - acc: 0.8826 - weighted_accuracy: 0.858 - 1s 5us/step - loss: 0.2665 - acc: 0.8828 - weighted_accuracy: 0.8584 - val_loss: 0.2374 - val_acc: 0.8952 - val_weighted_accuracy: 0.8769\n",
      "Epoch 4/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2637 - acc: 0.8721 - weighted_accuracy: 0.846 - ETA: 1s - loss: 0.2726 - acc: 0.8789 - weighted_accuracy: 0.854 - ETA: 1s - loss: 0.2702 - acc: 0.8811 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2680 - acc: 0.8825 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2685 - acc: 0.8824 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2695 - acc: 0.8818 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2682 - acc: 0.8822 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2675 - acc: 0.8823 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2675 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2684 - acc: 0.8821 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2673 - acc: 0.8822 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2678 - acc: 0.8819 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2678 - acc: 0.8818 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2674 - acc: 0.8822 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2667 - acc: 0.8825 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2661 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2658 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2661 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2657 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2661 - acc: 0.8825 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2661 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2664 - acc: 0.8824 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2663 - acc: 0.8824 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2662 - acc: 0.8825 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2661 - acc: 0.8826 - weighted_accuracy: 0.858 - 1s 5us/step - loss: 0.2659 - acc: 0.8827 - weighted_accuracy: 0.8583 - val_loss: 0.2385 - val_acc: 0.8942 - val_weighted_accuracy: 0.8762\n",
      "Epoch 5/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2621 - acc: 0.8740 - weighted_accuracy: 0.849 - ETA: 1s - loss: 0.2602 - acc: 0.8827 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2660 - acc: 0.8807 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2659 - acc: 0.8813 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2642 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2657 - acc: 0.8820 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2660 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2659 - acc: 0.8823 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2660 - acc: 0.8828 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2660 - acc: 0.8828 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2655 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2655 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2659 - acc: 0.8828 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2660 - acc: 0.8828 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2658 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2659 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2659 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2654 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2656 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2656 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2657 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2655 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2654 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2652 - acc: 0.8832 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2655 - acc: 0.8831 - weighted_accuracy: 0.8588 - val_loss: 0.2378 - val_acc: 0.8953 - val_weighted_accuracy: 0.8777\n",
      "Epoch 6/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2829 - acc: 0.8848 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2751 - acc: 0.8764 - weighted_accuracy: 0.854 - ETA: 1s - loss: 0.2715 - acc: 0.8796 - weighted_accuracy: 0.854 - ETA: 1s - loss: 0.2708 - acc: 0.8804 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2691 - acc: 0.8815 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2681 - acc: 0.8821 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2682 - acc: 0.8818 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2677 - acc: 0.8816 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2684 - acc: 0.8812 - weighted_accuracy: 0.855 - ETA: 0s - loss: 0.2680 - acc: 0.8817 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2677 - acc: 0.8817 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2672 - acc: 0.8822 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2675 - acc: 0.8819 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2676 - acc: 0.8819 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2673 - acc: 0.8821 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2667 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2670 - acc: 0.8820 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2666 - acc: 0.8825 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2665 - acc: 0.8826 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2660 - acc: 0.8827 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2657 - acc: 0.8829 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2655 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2655 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2652 - acc: 0.8832 - weighted_accuracy: 0.858 - 1s 5us/step - loss: 0.2651 - acc: 0.8833 - weighted_accuracy: 0.8587 - val_loss: 0.2378 - val_acc: 0.8953 - val_weighted_accuracy: 0.8780\n",
      "Epoch 7/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2467 - acc: 0.8867 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2682 - acc: 0.8821 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2646 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2628 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2625 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2647 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2653 - acc: 0.8826 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2658 - acc: 0.8823 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2657 - acc: 0.8825 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2657 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2662 - acc: 0.8821 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2659 - acc: 0.8822 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2667 - acc: 0.8819 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2666 - acc: 0.8822 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2666 - acc: 0.8821 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2665 - acc: 0.8822 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2662 - acc: 0.8823 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2658 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2661 - acc: 0.8825 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2658 - acc: 0.8826 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2657 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2656 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2655 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2654 - acc: 0.8829 - weighted_accuracy: 0.858 - 1s 5us/step - loss: 0.2651 - acc: 0.8830 - weighted_accuracy: 0.8586 - val_loss: 0.2386 - val_acc: 0.8937 - val_weighted_accuracy: 0.8781\n",
      "Epoch 8/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2775 - acc: 0.8799 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2686 - acc: 0.8830 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2690 - acc: 0.8832 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2660 - acc: 0.8847 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2669 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2667 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2671 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2667 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2659 - acc: 0.8830 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2660 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2655 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2658 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2661 - acc: 0.8824 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2656 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2655 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2654 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2658 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2652 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2653 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2651 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2650 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2650 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2648 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2646 - acc: 0.8833 - weighted_accuracy: 0.859 - 1s 4us/step - loss: 0.2645 - acc: 0.8833 - weighted_accuracy: 0.8591 - val_loss: 0.2375 - val_acc: 0.8956 - val_weighted_accuracy: 0.8769\n",
      "Epoch 9/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2996 - acc: 0.8672 - weighted_accuracy: 0.835 - ETA: 1s - loss: 0.2678 - acc: 0.8816 - weighted_accuracy: 0.851 - ETA: 1s - loss: 0.2650 - acc: 0.8827 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2634 - acc: 0.8830 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2645 - acc: 0.8828 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2646 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2630 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2641 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2641 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2648 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2651 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2652 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2646 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2644 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2644 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2643 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2639 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2642 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2644 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2646 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2645 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2646 - acc: 0.8831 - weighted_accuracy: 0.858 - 1s 4us/step - loss: 0.2645 - acc: 0.8832 - weighted_accuracy: 0.8586 - val_loss: 0.2380 - val_acc: 0.8949 - val_weighted_accuracy: 0.8774\n",
      "Epoch 10/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2866 - acc: 0.8828 - weighted_accuracy: 0.846 - ETA: 1s - loss: 0.2580 - acc: 0.8895 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2592 - acc: 0.8880 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2607 - acc: 0.8865 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2614 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2636 - acc: 0.8839 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2636 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2642 - acc: 0.8834 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2639 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2632 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2638 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8833 - weighted_accuracy: 0.859 - 1s 4us/step - loss: 0.2644 - acc: 0.8833 - weighted_accuracy: 0.8593 - val_loss: 0.2376 - val_acc: 0.8949 - val_weighted_accuracy: 0.8784\n",
      "Epoch 11/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2408 - acc: 0.8965 - weighted_accuracy: 0.877 - ETA: 1s - loss: 0.2585 - acc: 0.8887 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2591 - acc: 0.8879 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2599 - acc: 0.8870 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2621 - acc: 0.8858 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8856 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8857 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8851 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8844 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8842 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2629 - acc: 0.8844 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2632 - acc: 0.8842 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2631 - acc: 0.8842 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2629 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8837 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2645 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2641 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2641 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2642 - acc: 0.8834 - weighted_accuracy: 0.858 - 1s 4us/step - loss: 0.2642 - acc: 0.8834 - weighted_accuracy: 0.8589 - val_loss: 0.2380 - val_acc: 0.8940 - val_weighted_accuracy: 0.8760\n",
      "Epoch 12/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2632 - acc: 0.8916 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2680 - acc: 0.8822 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2662 - acc: 0.8830 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2676 - acc: 0.8823 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2665 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2657 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2652 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2627 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2628 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2635 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2644 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2637 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2634 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2629 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2633 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2637 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2637 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2639 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2638 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2642 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8836 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2642 - acc: 0.8835 - weighted_accuracy: 0.8595 - val_loss: 0.2373 - val_acc: 0.8954 - val_weighted_accuracy: 0.8765\n",
      "Epoch 13/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2872 - acc: 0.8740 - weighted_accuracy: 0.850 - ETA: 1s - loss: 0.2745 - acc: 0.8800 - weighted_accuracy: 0.851 - ETA: 1s - loss: 0.2677 - acc: 0.8829 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2654 - acc: 0.8829 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2663 - acc: 0.8829 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2662 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2649 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2629 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2641 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8834 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2641 - acc: 0.8833 - weighted_accuracy: 0.8592 - val_loss: 0.2383 - val_acc: 0.8939 - val_weighted_accuracy: 0.8770\n",
      "Epoch 14/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2816 - acc: 0.8760 - weighted_accuracy: 0.840 - ETA: 1s - loss: 0.2644 - acc: 0.8824 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2642 - acc: 0.8829 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2627 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2633 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2629 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2641 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2641 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2647 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2641 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2641 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2644 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2644 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2641 - acc: 0.8835 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2640 - acc: 0.8835 - weighted_accuracy: 0.8593 - val_loss: 0.2369 - val_acc: 0.8957 - val_weighted_accuracy: 0.8779\n",
      "Epoch 15/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2572 - acc: 0.8857 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2599 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2614 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2624 - acc: 0.8832 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2624 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2639 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8830 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2637 - acc: 0.8830 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2637 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2631 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2634 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2636 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8833 - weighted_accuracy: 0.859 - 1s 4us/step - loss: 0.2639 - acc: 0.8834 - weighted_accuracy: 0.8593 - val_loss: 0.2373 - val_acc: 0.8952 - val_weighted_accuracy: 0.8782\n",
      "Epoch 16/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2470 - acc: 0.8945 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2619 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2623 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2659 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2658 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2663 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2665 - acc: 0.8826 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2657 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2645 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2645 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2636 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2646 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2636 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2636 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8837 - weighted_accuracy: 0.859 - 1s 4us/step - loss: 0.2636 - acc: 0.8837 - weighted_accuracy: 0.8599 - val_loss: 0.2385 - val_acc: 0.8933 - val_weighted_accuracy: 0.8771\n",
      "Epoch 17/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2640 - acc: 0.8809 - weighted_accuracy: 0.851 - ETA: 1s - loss: 0.2652 - acc: 0.8827 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2641 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2646 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2626 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2639 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2618 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2629 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2636 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8835 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2637 - acc: 0.8835 - weighted_accuracy: 0.8597 - val_loss: 0.2388 - val_acc: 0.8946 - val_weighted_accuracy: 0.8777\n",
      "Epoch 18/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2921 - acc: 0.8750 - weighted_accuracy: 0.849 - ETA: 1s - loss: 0.2630 - acc: 0.8838 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2653 - acc: 0.8827 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2639 - acc: 0.8840 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2653 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2661 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2666 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2661 - acc: 0.8827 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2651 - acc: 0.8834 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2651 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2644 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2644 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2646 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2641 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2641 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2636 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2636 - acc: 0.8837 - weighted_accuracy: 0.859 - 1s 4us/step - loss: 0.2636 - acc: 0.8838 - weighted_accuracy: 0.8599 - val_loss: 0.2378 - val_acc: 0.8944 - val_weighted_accuracy: 0.8770\n",
      "Epoch 19/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2875 - acc: 0.8594 - weighted_accuracy: 0.836 - ETA: 1s - loss: 0.2658 - acc: 0.8792 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2640 - acc: 0.8830 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2630 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2626 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2647 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8834 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2642 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2642 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2639 - acc: 0.8840 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2640 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2644 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8834 - weighted_accuracy: 0.859 - 1s 4us/step - loss: 0.2636 - acc: 0.8833 - weighted_accuracy: 0.8595 - val_loss: 0.2371 - val_acc: 0.8951 - val_weighted_accuracy: 0.8769\n",
      "Epoch 20/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2866 - acc: 0.8691 - weighted_accuracy: 0.836 - ETA: 1s - loss: 0.2662 - acc: 0.8809 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2664 - acc: 0.8825 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2663 - acc: 0.8824 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2660 - acc: 0.8823 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2663 - acc: 0.8822 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2644 - acc: 0.8827 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2638 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2629 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8830 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2629 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2629 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8836 - weighted_accuracy: 0.860 - 1s 4us/step - loss: 0.2634 - acc: 0.8837 - weighted_accuracy: 0.8600 - val_loss: 0.2369 - val_acc: 0.8955 - val_weighted_accuracy: 0.8777\n",
      "Epoch 21/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2618 - acc: 0.8877 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2726 - acc: 0.8797 - weighted_accuracy: 0.852 - ETA: 1s - loss: 0.2650 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2644 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2645 - acc: 0.8833 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2640 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2629 - acc: 0.8840 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2638 - acc: 0.8838 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2635 - acc: 0.8839 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2634 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2642 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8839 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2633 - acc: 0.8837 - weighted_accuracy: 0.8599 - val_loss: 0.2370 - val_acc: 0.8957 - val_weighted_accuracy: 0.8781\n",
      "Epoch 22/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2554 - acc: 0.8760 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2638 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2638 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2624 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2624 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2636 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2621 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2621 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2629 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2628 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2627 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2630 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2625 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2629 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8839 - weighted_accuracy: 0.860 - 1s 4us/step - loss: 0.2632 - acc: 0.8838 - weighted_accuracy: 0.8601 - val_loss: 0.2378 - val_acc: 0.8944 - val_weighted_accuracy: 0.8781\n",
      "Epoch 23/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2389 - acc: 0.8984 - weighted_accuracy: 0.879 - ETA: 1s - loss: 0.2628 - acc: 0.8828 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2598 - acc: 0.8834 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2626 - acc: 0.8829 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2618 - acc: 0.8838 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2620 - acc: 0.8840 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2627 - acc: 0.8838 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2634 - acc: 0.8834 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2639 - acc: 0.8831 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2639 - acc: 0.8831 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2637 - acc: 0.8833 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8841 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2629 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2629 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2625 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2625 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2630 - acc: 0.8842 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2631 - acc: 0.8840 - weighted_accuracy: 0.8606 - val_loss: 0.2382 - val_acc: 0.8937 - val_weighted_accuracy: 0.8749\n",
      "Epoch 24/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2687 - acc: 0.8828 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2612 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2630 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2623 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2615 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2627 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2632 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2636 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8841 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2630 - acc: 0.8840 - weighted_accuracy: 0.8603 - val_loss: 0.2370 - val_acc: 0.8955 - val_weighted_accuracy: 0.8780\n",
      "Epoch 25/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2532 - acc: 0.8916 - weighted_accuracy: 0.876 - ETA: 1s - loss: 0.2567 - acc: 0.8887 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2603 - acc: 0.8859 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2640 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2637 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2627 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2627 - acc: 0.8839 - weighted_accuracy: 0.8605 - val_loss: 0.2376 - val_acc: 0.8944 - val_weighted_accuracy: 0.8777\n",
      "Epoch 26/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2408 - acc: 0.8926 - weighted_accuracy: 0.871 - ETA: 1s - loss: 0.2590 - acc: 0.8827 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2661 - acc: 0.8802 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2639 - acc: 0.8818 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2625 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2641 - acc: 0.8824 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2628 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2615 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2614 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8840 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2629 - acc: 0.8839 - weighted_accuracy: 0.8603 - val_loss: 0.2372 - val_acc: 0.8951 - val_weighted_accuracy: 0.8782\n",
      "Epoch 27/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2810 - acc: 0.8730 - weighted_accuracy: 0.849 - ETA: 1s - loss: 0.2586 - acc: 0.8865 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2590 - acc: 0.8860 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2604 - acc: 0.8844 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2615 - acc: 0.8845 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2632 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2625 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2628 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2636 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2645 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2631 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8842 - weighted_accuracy: 0.860 - 1s 4us/step - loss: 0.2626 - acc: 0.8841 - weighted_accuracy: 0.8606 - val_loss: 0.2375 - val_acc: 0.8953 - val_weighted_accuracy: 0.8792\n",
      "Epoch 28/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2637 - acc: 0.8828 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2583 - acc: 0.8834 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2614 - acc: 0.8831 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2628 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2637 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2632 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8826 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8834 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8834 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2632 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2629 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2628 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2627 - acc: 0.8842 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2627 - acc: 0.8841 - weighted_accuracy: 0.8605 - val_loss: 0.2373 - val_acc: 0.8952 - val_weighted_accuracy: 0.8775\n",
      "Epoch 29/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2391 - acc: 0.8984 - weighted_accuracy: 0.887 - ETA: 1s - loss: 0.2628 - acc: 0.8831 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2603 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2622 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2621 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2606 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8841 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2625 - acc: 0.8841 - weighted_accuracy: 0.8607 - val_loss: 0.2378 - val_acc: 0.8938 - val_weighted_accuracy: 0.8752\n",
      "Epoch 30/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2555 - acc: 0.8896 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2597 - acc: 0.8868 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2627 - acc: 0.8846 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2628 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2623 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2640 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2636 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2626 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2619 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8840 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2625 - acc: 0.8841 - weighted_accuracy: 0.8604 - val_loss: 0.2377 - val_acc: 0.8953 - val_weighted_accuracy: 0.8780\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 27)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_13 (Dense)                (None, 24)           672         mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_11 (Dropout)            (None, 24)           0           dense_13[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_11 (Concatenate)    (None, 51)           0           mata-features[0][0]              \n",
      "                                                                 dropout_11[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_14 (Dense)                (None, 24)           1248        concatenate_11[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_12 (Dropout)            (None, 24)           0           dense_14[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_12 (Concatenate)    (None, 75)           0           concatenate_11[0][0]             \n",
      "                                                                 dropout_12[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_15 (Dense)                (None, 24)           1824        concatenate_12[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_13 (Dropout)            (None, 24)           0           dense_15[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_13 (Concatenate)    (None, 99)           0           concatenate_12[0][0]             \n",
      "                                                                 dropout_13[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_16 (Dense)                (None, 24)           2400        concatenate_13[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_14 (Dropout)            (None, 24)           0           dense_16[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_14 (Concatenate)    (None, 123)          0           concatenate_13[0][0]             \n",
      "                                                                 dropout_14[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_17 (Dense)                (None, 24)           2976        concatenate_14[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_15 (Dropout)            (None, 24)           0           dense_17[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_15 (Concatenate)    (None, 147)          0           concatenate_14[0][0]             \n",
      "                                                                 dropout_15[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_3 (Highway)             (None, 147)          43512       concatenate_15[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_18 (Dense)                (None, 3)            444         highway_3[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 53,076\n",
      "Trainable params: 53,076\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2:07 - loss: 1.1454 - acc: 0.2910 - weighted_accuracy: 0.070 - ETA: 12s - loss: 0.6891 - acc: 0.6956 - weighted_accuracy: 0.544 - ETA: 6s - loss: 0.5073 - acc: 0.7878 - weighted_accuracy: 0.6881 - ETA: 4s - loss: 0.4397 - acc: 0.8185 - weighted_accuracy: 0.742 - ETA: 3s - loss: 0.4033 - acc: 0.8336 - weighted_accuracy: 0.771 - ETA: 3s - loss: 0.3803 - acc: 0.8426 - weighted_accuracy: 0.787 - ETA: 2s - loss: 0.3615 - acc: 0.8500 - weighted_accuracy: 0.800 - ETA: 2s - loss: 0.3481 - acc: 0.8552 - weighted_accuracy: 0.809 - ETA: 1s - loss: 0.3382 - acc: 0.8591 - weighted_accuracy: 0.816 - ETA: 1s - loss: 0.3309 - acc: 0.8620 - weighted_accuracy: 0.821 - ETA: 1s - loss: 0.3253 - acc: 0.8638 - weighted_accuracy: 0.825 - ETA: 1s - loss: 0.3203 - acc: 0.8654 - weighted_accuracy: 0.828 - ETA: 1s - loss: 0.3163 - acc: 0.8667 - weighted_accuracy: 0.830 - ETA: 1s - loss: 0.3128 - acc: 0.8678 - weighted_accuracy: 0.833 - ETA: 0s - loss: 0.3093 - acc: 0.8689 - weighted_accuracy: 0.835 - ETA: 0s - loss: 0.3064 - acc: 0.8701 - weighted_accuracy: 0.837 - ETA: 0s - loss: 0.3037 - acc: 0.8712 - weighted_accuracy: 0.838 - ETA: 0s - loss: 0.3009 - acc: 0.8722 - weighted_accuracy: 0.840 - ETA: 0s - loss: 0.2991 - acc: 0.8728 - weighted_accuracy: 0.841 - ETA: 0s - loss: 0.2975 - acc: 0.8732 - weighted_accuracy: 0.842 - ETA: 0s - loss: 0.2962 - acc: 0.8735 - weighted_accuracy: 0.842 - ETA: 0s - loss: 0.2947 - acc: 0.8740 - weighted_accuracy: 0.843 - ETA: 0s - loss: 0.2940 - acc: 0.8742 - weighted_accuracy: 0.843 - ETA: 0s - loss: 0.2925 - acc: 0.8747 - weighted_accuracy: 0.844 - ETA: 0s - loss: 0.2913 - acc: 0.8752 - weighted_accuracy: 0.845 - 2s 6us/step - loss: 0.2904 - acc: 0.8755 - weighted_accuracy: 0.8459 - val_loss: 0.2627 - val_acc: 0.8863 - val_weighted_accuracy: 0.8644\n",
      "Epoch 2/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2720 - acc: 0.8799 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2614 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2614 - acc: 0.8877 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2652 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2678 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2671 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2665 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2673 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2668 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2657 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2659 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2661 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2656 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2664 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2664 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2662 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2660 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2662 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2662 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2662 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2658 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2659 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2657 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2656 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2653 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2653 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2650 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2650 - acc: 0.8838 - weighted_accuracy: 0.860 - 2s 5us/step - loss: 0.2651 - acc: 0.8839 - weighted_accuracy: 0.8600 - val_loss: 0.2618 - val_acc: 0.8861 - val_weighted_accuracy: 0.8631\n",
      "Epoch 3/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2779 - acc: 0.8818 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2699 - acc: 0.8807 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2688 - acc: 0.8823 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2679 - acc: 0.8819 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2681 - acc: 0.8828 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2678 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2663 - acc: 0.8833 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2674 - acc: 0.8833 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2673 - acc: 0.8834 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2664 - acc: 0.8834 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2657 - acc: 0.8833 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2654 - acc: 0.8833 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2657 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2649 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2648 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2641 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2638 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2637 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2639 - acc: 0.8840 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2639 - acc: 0.8840 - weighted_accuracy: 0.8600 - val_loss: 0.2616 - val_acc: 0.8860 - val_weighted_accuracy: 0.8652\n",
      "Epoch 4/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2506 - acc: 0.8828 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2690 - acc: 0.8779 - weighted_accuracy: 0.853 - ETA: 1s - loss: 0.2669 - acc: 0.8796 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2638 - acc: 0.8806 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2611 - acc: 0.8826 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8823 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8830 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8840 - weighted_accuracy: 0.860 - 1s 4us/step - loss: 0.2632 - acc: 0.8840 - weighted_accuracy: 0.8601 - val_loss: 0.2613 - val_acc: 0.8852 - val_weighted_accuracy: 0.8644\n",
      "Epoch 5/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2426 - acc: 0.8984 - weighted_accuracy: 0.888 - ETA: 1s - loss: 0.2665 - acc: 0.8818 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2640 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2619 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2622 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8838 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2627 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2619 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2631 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8840 - weighted_accuracy: 0.859 - 1s 4us/step - loss: 0.2627 - acc: 0.8839 - weighted_accuracy: 0.8599 - val_loss: 0.2611 - val_acc: 0.8860 - val_weighted_accuracy: 0.8627\n",
      "Epoch 6/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2605 - acc: 0.8818 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2651 - acc: 0.8824 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2655 - acc: 0.8822 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2634 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2641 - acc: 0.8828 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2638 - acc: 0.8832 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2657 - acc: 0.8822 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2647 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2646 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2630 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8840 - weighted_accuracy: 0.860 - 1s 4us/step - loss: 0.2628 - acc: 0.8840 - weighted_accuracy: 0.8602 - val_loss: 0.2610 - val_acc: 0.8861 - val_weighted_accuracy: 0.8626\n",
      "Epoch 7/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2663 - acc: 0.8877 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2572 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2571 - acc: 0.8871 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2594 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2583 - acc: 0.8870 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2579 - acc: 0.8873 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2595 - acc: 0.8868 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2599 - acc: 0.8863 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8858 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8844 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2625 - acc: 0.8842 - weighted_accuracy: 0.8601 - val_loss: 0.2624 - val_acc: 0.8870 - val_weighted_accuracy: 0.8639\n",
      "Epoch 8/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2564 - acc: 0.8857 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2647 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2658 - acc: 0.8827 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2636 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2626 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2633 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2623 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2636 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2638 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2638 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8841 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2624 - acc: 0.8841 - weighted_accuracy: 0.8601 - val_loss: 0.2609 - val_acc: 0.8861 - val_weighted_accuracy: 0.8619\n",
      "Epoch 9/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2785 - acc: 0.8760 - weighted_accuracy: 0.852 - ETA: 1s - loss: 0.2665 - acc: 0.8825 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2631 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2632 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2647 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2650 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2641 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2634 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8841 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2623 - acc: 0.8842 - weighted_accuracy: 0.8603 - val_loss: 0.2611 - val_acc: 0.8867 - val_weighted_accuracy: 0.8653\n",
      "Epoch 10/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2565 - acc: 0.8887 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2590 - acc: 0.8853 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2575 - acc: 0.8867 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2593 - acc: 0.8858 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2610 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2624 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2617 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2621 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2616 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8844 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2619 - acc: 0.8845 - weighted_accuracy: 0.8606 - val_loss: 0.2607 - val_acc: 0.8869 - val_weighted_accuracy: 0.8639\n",
      "Epoch 11/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2352 - acc: 0.9033 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2578 - acc: 0.8913 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2587 - acc: 0.8877 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2590 - acc: 0.8870 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2624 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2629 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2642 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2652 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2643 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2632 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8844 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2619 - acc: 0.8844 - weighted_accuracy: 0.8605 - val_loss: 0.2612 - val_acc: 0.8871 - val_weighted_accuracy: 0.8646\n",
      "Epoch 12/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2658 - acc: 0.8828 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2641 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2625 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2616 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2604 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2609 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8845 - weighted_accuracy: 0.860 - 1s 4us/step - loss: 0.2618 - acc: 0.8845 - weighted_accuracy: 0.8607 - val_loss: 0.2608 - val_acc: 0.8867 - val_weighted_accuracy: 0.8648\n",
      "Epoch 13/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2566 - acc: 0.8945 - weighted_accuracy: 0.872 - ETA: 1s - loss: 0.2539 - acc: 0.8890 - weighted_accuracy: 0.868 - ETA: 1s - loss: 0.2533 - acc: 0.8905 - weighted_accuracy: 0.868 - ETA: 1s - loss: 0.2563 - acc: 0.8873 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2563 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2571 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2571 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2578 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8844 - weighted_accuracy: 0.860 - 1s 4us/step - loss: 0.2615 - acc: 0.8843 - weighted_accuracy: 0.8604 - val_loss: 0.2609 - val_acc: 0.8864 - val_weighted_accuracy: 0.8641\n",
      "Epoch 14/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2743 - acc: 0.8730 - weighted_accuracy: 0.852 - ETA: 1s - loss: 0.2578 - acc: 0.8890 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2589 - acc: 0.8870 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2587 - acc: 0.8859 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2599 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2585 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2587 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2588 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2588 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2593 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2600 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2603 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8843 - weighted_accuracy: 0.860 - 1s 4us/step - loss: 0.2617 - acc: 0.8843 - weighted_accuracy: 0.8608 - val_loss: 0.2604 - val_acc: 0.8861 - val_weighted_accuracy: 0.8639\n",
      "Epoch 15/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2343 - acc: 0.9014 - weighted_accuracy: 0.888 - ETA: 1s - loss: 0.2601 - acc: 0.8849 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2555 - acc: 0.8887 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2572 - acc: 0.8872 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2569 - acc: 0.8871 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2584 - acc: 0.8860 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2588 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2602 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8847 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.8608 - val_loss: 0.2605 - val_acc: 0.8864 - val_weighted_accuracy: 0.8644\n",
      "Epoch 16/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2663 - acc: 0.8799 - weighted_accuracy: 0.844 - ETA: 1s - loss: 0.2641 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2653 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2611 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2627 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2641 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2612 - acc: 0.8846 - weighted_accuracy: 0.8611 - val_loss: 0.2609 - val_acc: 0.8864 - val_weighted_accuracy: 0.8625\n",
      "Epoch 17/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2780 - acc: 0.8838 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2642 - acc: 0.8829 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2628 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2643 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2631 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8830 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2623 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2615 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2619 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2617 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2616 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2614 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.8607 - val_loss: 0.2609 - val_acc: 0.8859 - val_weighted_accuracy: 0.8619\n",
      "Epoch 18/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2662 - acc: 0.8838 - weighted_accuracy: 0.850 - ETA: 1s - loss: 0.2598 - acc: 0.8856 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2548 - acc: 0.8886 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2580 - acc: 0.8863 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2560 - acc: 0.8869 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2574 - acc: 0.8863 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2573 - acc: 0.8862 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2583 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2595 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2598 - acc: 0.8854 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2597 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2601 - acc: 0.8854 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2603 - acc: 0.8853 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2598 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.860 - 1s 4us/step - loss: 0.2610 - acc: 0.8846 - weighted_accuracy: 0.8609 - val_loss: 0.2605 - val_acc: 0.8858 - val_weighted_accuracy: 0.8612\n",
      "Epoch 19/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2613 - acc: 0.8848 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2573 - acc: 0.8847 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2567 - acc: 0.8867 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2584 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2590 - acc: 0.8858 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2609 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8847 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2611 - acc: 0.8846 - weighted_accuracy: 0.8611 - val_loss: 0.2606 - val_acc: 0.8869 - val_weighted_accuracy: 0.8646\n",
      "Epoch 20/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2434 - acc: 0.8936 - weighted_accuracy: 0.872 - ETA: 1s - loss: 0.2595 - acc: 0.8853 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2589 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2603 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2607 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2602 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2598 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2587 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8849 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2609 - acc: 0.8848 - weighted_accuracy: 0.8613 - val_loss: 0.2607 - val_acc: 0.8867 - val_weighted_accuracy: 0.8646\n",
      "Epoch 21/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2408 - acc: 0.8906 - weighted_accuracy: 0.870 - ETA: 1s - loss: 0.2619 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2634 - acc: 0.8825 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2620 - acc: 0.8834 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2611 - acc: 0.8837 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2615 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2604 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2603 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2593 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2603 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2602 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8849 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2609 - acc: 0.8848 - weighted_accuracy: 0.8612 - val_loss: 0.2605 - val_acc: 0.8856 - val_weighted_accuracy: 0.8637\n",
      "Epoch 22/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2599 - acc: 0.8984 - weighted_accuracy: 0.880 - ETA: 1s - loss: 0.2653 - acc: 0.8818 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2625 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2615 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2626 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2621 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.8615 - val_loss: 0.2604 - val_acc: 0.8867 - val_weighted_accuracy: 0.8632\n",
      "Epoch 23/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2498 - acc: 0.8896 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2639 - acc: 0.8803 - weighted_accuracy: 0.854 - ETA: 1s - loss: 0.2636 - acc: 0.8819 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2645 - acc: 0.8821 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2623 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2616 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2607 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2596 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2595 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2594 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2589 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2594 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2595 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2597 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8848 - weighted_accuracy: 0.861 - 1s 4us/step - loss: 0.2605 - acc: 0.8849 - weighted_accuracy: 0.8614 - val_loss: 0.2606 - val_acc: 0.8858 - val_weighted_accuracy: 0.8639\n",
      "Epoch 24/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2536 - acc: 0.8936 - weighted_accuracy: 0.868 - ETA: 1s - loss: 0.2536 - acc: 0.8884 - weighted_accuracy: 0.868 - ETA: 1s - loss: 0.2562 - acc: 0.8884 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2569 - acc: 0.8882 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2565 - acc: 0.8877 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2584 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2577 - acc: 0.8869 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2586 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2598 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2607 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2607 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2606 - acc: 0.8848 - weighted_accuracy: 0.8617 - val_loss: 0.2609 - val_acc: 0.8863 - val_weighted_accuracy: 0.8631\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 27)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_19 (Dense)                (None, 24)           672         mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_16 (Dropout)            (None, 24)           0           dense_19[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_16 (Concatenate)    (None, 51)           0           mata-features[0][0]              \n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "                                                                 dropout_16[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_20 (Dense)                (None, 24)           1248        concatenate_16[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_17 (Dropout)            (None, 24)           0           dense_20[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_17 (Concatenate)    (None, 75)           0           concatenate_16[0][0]             \n",
      "                                                                 dropout_17[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_21 (Dense)                (None, 24)           1824        concatenate_17[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_18 (Dropout)            (None, 24)           0           dense_21[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_18 (Concatenate)    (None, 99)           0           concatenate_17[0][0]             \n",
      "                                                                 dropout_18[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_22 (Dense)                (None, 24)           2400        concatenate_18[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_19 (Dropout)            (None, 24)           0           dense_22[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_19 (Concatenate)    (None, 123)          0           concatenate_18[0][0]             \n",
      "                                                                 dropout_19[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_23 (Dense)                (None, 24)           2976        concatenate_19[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_20 (Dropout)            (None, 24)           0           dense_23[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_20 (Concatenate)    (None, 147)          0           concatenate_19[0][0]             \n",
      "                                                                 dropout_20[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_4 (Highway)             (None, 147)          43512       concatenate_20[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_24 (Dense)                (None, 3)            444         highway_4[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 53,076\n",
      "Trainable params: 53,076\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      "288497/288497 [==============================] - ETA: 2:43 - loss: 1.1970 - acc: 0.2900 - weighted_accuracy: 0.251 - ETA: 14s - loss: 0.6177 - acc: 0.7249 - weighted_accuracy: 0.617 - ETA: 7s - loss: 0.4719 - acc: 0.7996 - weighted_accuracy: 0.7231 - ETA: 5s - loss: 0.4152 - acc: 0.8253 - weighted_accuracy: 0.766 - ETA: 4s - loss: 0.3804 - acc: 0.8403 - weighted_accuracy: 0.791 - ETA: 3s - loss: 0.3602 - acc: 0.8485 - weighted_accuracy: 0.805 - ETA: 2s - loss: 0.3459 - acc: 0.8543 - weighted_accuracy: 0.814 - ETA: 2s - loss: 0.3352 - acc: 0.8587 - weighted_accuracy: 0.821 - ETA: 2s - loss: 0.3280 - acc: 0.8612 - weighted_accuracy: 0.826 - ETA: 1s - loss: 0.3218 - acc: 0.8634 - weighted_accuracy: 0.829 - ETA: 1s - loss: 0.3168 - acc: 0.8656 - weighted_accuracy: 0.832 - ETA: 1s - loss: 0.3133 - acc: 0.8668 - weighted_accuracy: 0.834 - ETA: 1s - loss: 0.3102 - acc: 0.8678 - weighted_accuracy: 0.836 - ETA: 1s - loss: 0.3069 - acc: 0.8690 - weighted_accuracy: 0.838 - ETA: 1s - loss: 0.3038 - acc: 0.8702 - weighted_accuracy: 0.840 - ETA: 0s - loss: 0.3016 - acc: 0.8711 - weighted_accuracy: 0.841 - ETA: 0s - loss: 0.2998 - acc: 0.8718 - weighted_accuracy: 0.842 - ETA: 0s - loss: 0.2972 - acc: 0.8729 - weighted_accuracy: 0.844 - ETA: 0s - loss: 0.2953 - acc: 0.8736 - weighted_accuracy: 0.845 - ETA: 0s - loss: 0.2933 - acc: 0.8742 - weighted_accuracy: 0.845 - ETA: 0s - loss: 0.2921 - acc: 0.8747 - weighted_accuracy: 0.846 - ETA: 0s - loss: 0.2905 - acc: 0.8753 - weighted_accuracy: 0.847 - ETA: 0s - loss: 0.2889 - acc: 0.8760 - weighted_accuracy: 0.848 - ETA: 0s - loss: 0.2878 - acc: 0.8764 - weighted_accuracy: 0.849 - ETA: 0s - loss: 0.2868 - acc: 0.8768 - weighted_accuracy: 0.849 - 2s 7us/step - loss: 0.2861 - acc: 0.8771 - weighted_accuracy: 0.8501 - val_loss: 0.2775 - val_acc: 0.8776 - val_weighted_accuracy: 0.8531\n",
      "Epoch 2/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2789 - acc: 0.8828 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2639 - acc: 0.8840 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2624 - acc: 0.8840 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2626 - acc: 0.8849 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2606 - acc: 0.8862 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2617 - acc: 0.8855 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2628 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2640 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2630 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2634 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2632 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2632 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2628 - acc: 0.8848 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2632 - acc: 0.8847 - weighted_accuracy: 0.8611 - val_loss: 0.2761 - val_acc: 0.8788 - val_weighted_accuracy: 0.8554\n",
      "Epoch 3/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2565 - acc: 0.8906 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2630 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2596 - acc: 0.8867 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2596 - acc: 0.8865 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2609 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2631 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2624 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2624 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2623 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2619 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2617 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2619 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2625 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2627 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2625 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8845 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2623 - acc: 0.8846 - weighted_accuracy: 0.8612 - val_loss: 0.2754 - val_acc: 0.8784 - val_weighted_accuracy: 0.8495\n",
      "Epoch 4/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2808 - acc: 0.8721 - weighted_accuracy: 0.843 - ETA: 1s - loss: 0.2602 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2585 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2618 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2631 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2627 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2625 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2627 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2627 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2621 - acc: 0.8847 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2617 - acc: 0.8849 - weighted_accuracy: 0.8616 - val_loss: 0.2759 - val_acc: 0.8775 - val_weighted_accuracy: 0.8476\n",
      "Epoch 5/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2944 - acc: 0.8643 - weighted_accuracy: 0.835 - ETA: 1s - loss: 0.2666 - acc: 0.8808 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2658 - acc: 0.8819 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2661 - acc: 0.8818 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2654 - acc: 0.8819 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2644 - acc: 0.8827 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8847 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2614 - acc: 0.8848 - weighted_accuracy: 0.8613 - val_loss: 0.2751 - val_acc: 0.8786 - val_weighted_accuracy: 0.8517\n",
      "Epoch 6/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2910 - acc: 0.8721 - weighted_accuracy: 0.836 - ETA: 1s - loss: 0.2593 - acc: 0.8867 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2601 - acc: 0.8858 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2609 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2598 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2597 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2613 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8853 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2612 - acc: 0.8852 - weighted_accuracy: 0.8619 - val_loss: 0.2754 - val_acc: 0.8771 - val_weighted_accuracy: 0.8510\n",
      "Epoch 7/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2584 - acc: 0.8906 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2552 - acc: 0.8904 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2549 - acc: 0.8888 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2583 - acc: 0.8871 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2617 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2598 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2618 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2619 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2619 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2621 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8850 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2609 - acc: 0.8850 - weighted_accuracy: 0.8613 - val_loss: 0.2750 - val_acc: 0.8774 - val_weighted_accuracy: 0.8490\n",
      "Epoch 8/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2728 - acc: 0.8760 - weighted_accuracy: 0.851 - ETA: 1s - loss: 0.2666 - acc: 0.8817 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2631 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2636 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2616 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2601 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2578 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2578 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2590 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2607 - acc: 0.8851 - weighted_accuracy: 0.8615 - val_loss: 0.2749 - val_acc: 0.8778 - val_weighted_accuracy: 0.8516\n",
      "Epoch 9/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2323 - acc: 0.9033 - weighted_accuracy: 0.890 - ETA: 1s - loss: 0.2618 - acc: 0.8881 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2621 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2618 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2617 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2617 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2630 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8850 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2606 - acc: 0.8850 - weighted_accuracy: 0.8613 - val_loss: 0.2748 - val_acc: 0.8771 - val_weighted_accuracy: 0.8496\n",
      "Epoch 10/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2487 - acc: 0.8936 - weighted_accuracy: 0.874 - ETA: 1s - loss: 0.2563 - acc: 0.8890 - weighted_accuracy: 0.868 - ETA: 1s - loss: 0.2575 - acc: 0.8886 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2592 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2618 - acc: 0.8839 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8841 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8852 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2605 - acc: 0.8852 - weighted_accuracy: 0.8616 - val_loss: 0.2748 - val_acc: 0.8782 - val_weighted_accuracy: 0.8493\n",
      "Epoch 11/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2296 - acc: 0.9082 - weighted_accuracy: 0.881 - ETA: 1s - loss: 0.2595 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2632 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2624 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2608 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2588 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2592 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2588 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.8617 - val_loss: 0.2755 - val_acc: 0.8782 - val_weighted_accuracy: 0.8488\n",
      "Epoch 12/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2664 - acc: 0.8936 - weighted_accuracy: 0.869 - ETA: 1s - loss: 0.2634 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2587 - acc: 0.8872 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2607 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2620 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2596 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2597 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8851 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2600 - acc: 0.8854 - weighted_accuracy: 0.8619 - val_loss: 0.2755 - val_acc: 0.8773 - val_weighted_accuracy: 0.8450\n",
      "Epoch 13/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2479 - acc: 0.8877 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2605 - acc: 0.8865 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2640 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2630 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2621 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2623 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2620 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2602 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2609 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8853 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2600 - acc: 0.8854 - weighted_accuracy: 0.8622 - val_loss: 0.2748 - val_acc: 0.8775 - val_weighted_accuracy: 0.8501\n",
      "Epoch 14/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2471 - acc: 0.8926 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2549 - acc: 0.8884 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2561 - acc: 0.8877 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2576 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2590 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2584 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2590 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2584 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2577 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2586 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2588 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2589 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2598 - acc: 0.8854 - weighted_accuracy: 0.8622 - val_loss: 0.2755 - val_acc: 0.8770 - val_weighted_accuracy: 0.8487\n",
      "Epoch 15/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2393 - acc: 0.8984 - weighted_accuracy: 0.877 - ETA: 1s - loss: 0.2619 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2612 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2582 - acc: 0.8872 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2583 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2576 - acc: 0.8871 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2578 - acc: 0.8874 - weighted_accuracy: 0.865 - ETA: 0s - loss: 0.2580 - acc: 0.8875 - weighted_accuracy: 0.865 - ETA: 0s - loss: 0.2571 - acc: 0.8878 - weighted_accuracy: 0.865 - ETA: 0s - loss: 0.2575 - acc: 0.8875 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2576 - acc: 0.8872 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2577 - acc: 0.8871 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2576 - acc: 0.8870 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2581 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2584 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2588 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2582 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2589 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2598 - acc: 0.8854 - weighted_accuracy: 0.8622 - val_loss: 0.2744 - val_acc: 0.8774 - val_weighted_accuracy: 0.8509\n",
      "Epoch 16/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2772 - acc: 0.8809 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2646 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2609 - acc: 0.8852 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2619 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2619 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2608 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8862 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2591 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8863 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2592 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2593 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2601 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2594 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2596 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2598 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2599 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8855 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2596 - acc: 0.8855 - weighted_accuracy: 0.8626 - val_loss: 0.2749 - val_acc: 0.8767 - val_weighted_accuracy: 0.8499\n",
      "Epoch 17/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2759 - acc: 0.8828 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2631 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2604 - acc: 0.8863 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2632 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2633 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2633 - acc: 0.8848 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2620 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2602 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2591 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2592 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2591 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2593 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2590 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8857 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2594 - acc: 0.8856 - weighted_accuracy: 0.8625 - val_loss: 0.2755 - val_acc: 0.8774 - val_weighted_accuracy: 0.8456\n",
      "Epoch 18/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2484 - acc: 0.8906 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2610 - acc: 0.8866 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2626 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2621 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2630 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2627 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2612 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2604 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2598 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2587 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2592 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2587 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2587 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2584 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2585 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2589 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8854 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2594 - acc: 0.8855 - weighted_accuracy: 0.8625 - val_loss: 0.2753 - val_acc: 0.8769 - val_weighted_accuracy: 0.8458\n",
      "Epoch 19/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2745 - acc: 0.8906 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2635 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2592 - acc: 0.8867 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2584 - acc: 0.8866 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2567 - acc: 0.8878 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2587 - acc: 0.8865 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2572 - acc: 0.8872 - weighted_accuracy: 0.865 - ETA: 0s - loss: 0.2583 - acc: 0.8866 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2583 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2593 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2586 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2585 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2586 - acc: 0.8863 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2589 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2586 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2584 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2583 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2585 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8857 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2593 - acc: 0.8857 - weighted_accuracy: 0.8627 - val_loss: 0.2747 - val_acc: 0.8769 - val_weighted_accuracy: 0.8505\n",
      "Epoch 20/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2544 - acc: 0.8906 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2634 - acc: 0.8819 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2607 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2602 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2598 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2582 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2588 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2585 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2577 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2589 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2587 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2589 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2590 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2589 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8860 - weighted_accuracy: 0.863 - 1s 5us/step - loss: 0.2592 - acc: 0.8858 - weighted_accuracy: 0.8629 - val_loss: 0.2753 - val_acc: 0.8770 - val_weighted_accuracy: 0.8521\n",
      "Epoch 21/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2702 - acc: 0.8965 - weighted_accuracy: 0.875 - ETA: 1s - loss: 0.2573 - acc: 0.8892 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2556 - acc: 0.8878 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2580 - acc: 0.8878 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2588 - acc: 0.8875 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2582 - acc: 0.8874 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2587 - acc: 0.8873 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2587 - acc: 0.8869 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2585 - acc: 0.8867 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2602 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2596 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2601 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2600 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2596 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2593 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2594 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2594 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2591 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2593 - acc: 0.8859 - weighted_accuracy: 0.863 - 1s 5us/step - loss: 0.2591 - acc: 0.8860 - weighted_accuracy: 0.8633 - val_loss: 0.2754 - val_acc: 0.8776 - val_weighted_accuracy: 0.8481\n",
      "Epoch 22/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2815 - acc: 0.8789 - weighted_accuracy: 0.846 - ETA: 1s - loss: 0.2584 - acc: 0.8882 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2568 - acc: 0.8889 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2584 - acc: 0.8885 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2578 - acc: 0.8877 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2583 - acc: 0.8874 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2578 - acc: 0.8872 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2580 - acc: 0.8869 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2573 - acc: 0.8871 - weighted_accuracy: 0.865 - ETA: 0s - loss: 0.2580 - acc: 0.8869 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2573 - acc: 0.8873 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2577 - acc: 0.8868 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2582 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2580 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2582 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2584 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2584 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2588 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2586 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2585 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2586 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2588 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8859 - weighted_accuracy: 0.863 - 1s 5us/step - loss: 0.2590 - acc: 0.8859 - weighted_accuracy: 0.8630 - val_loss: 0.2749 - val_acc: 0.8771 - val_weighted_accuracy: 0.8500\n",
      "Epoch 23/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2557 - acc: 0.8906 - weighted_accuracy: 0.870 - ETA: 1s - loss: 0.2595 - acc: 0.8875 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2581 - acc: 0.8877 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2569 - acc: 0.8887 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2592 - acc: 0.8873 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2583 - acc: 0.8876 - weighted_accuracy: 0.865 - ETA: 0s - loss: 0.2579 - acc: 0.8876 - weighted_accuracy: 0.865 - ETA: 0s - loss: 0.2575 - acc: 0.8878 - weighted_accuracy: 0.865 - ETA: 0s - loss: 0.2576 - acc: 0.8874 - weighted_accuracy: 0.865 - ETA: 0s - loss: 0.2579 - acc: 0.8870 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2581 - acc: 0.8869 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2580 - acc: 0.8866 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2579 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2586 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2588 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2590 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2589 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2589 - acc: 0.8857 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2589 - acc: 0.8858 - weighted_accuracy: 0.8629 - val_loss: 0.2749 - val_acc: 0.8770 - val_weighted_accuracy: 0.8492\n",
      "Epoch 24/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2457 - acc: 0.8896 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2631 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2660 - acc: 0.8824 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2639 - acc: 0.8825 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2601 - acc: 0.8840 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2609 - acc: 0.8837 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2590 - acc: 0.8847 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2587 - acc: 0.8847 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2580 - acc: 0.8851 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2581 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2588 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2584 - acc: 0.8861 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2583 - acc: 0.8863 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2580 - acc: 0.8863 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2584 - acc: 0.8862 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2583 - acc: 0.8864 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2588 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2588 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8861 - weighted_accuracy: 0.863 - 1s 5us/step - loss: 0.2588 - acc: 0.8861 - weighted_accuracy: 0.8636 - val_loss: 0.2753 - val_acc: 0.8766 - val_weighted_accuracy: 0.8487\n",
      "Epoch 25/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2522 - acc: 0.8867 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2579 - acc: 0.8871 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2641 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2619 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2601 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2605 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2597 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8854 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8854 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2591 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2588 - acc: 0.8857 - weighted_accuracy: 0.863 - 1s 5us/step - loss: 0.2588 - acc: 0.8857 - weighted_accuracy: 0.8630 - val_loss: 0.2753 - val_acc: 0.8770 - val_weighted_accuracy: 0.8490\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 27)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_25 (Dense)                (None, 24)           672         mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_21 (Dropout)            (None, 24)           0           dense_25[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_21 (Concatenate)    (None, 51)           0           mata-features[0][0]              \n",
      "                                                                 dropout_21[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_26 (Dense)                (None, 24)           1248        concatenate_21[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_22 (Dropout)            (None, 24)           0           dense_26[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_22 (Concatenate)    (None, 75)           0           concatenate_21[0][0]             \n",
      "                                                                 dropout_22[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_27 (Dense)                (None, 24)           1824        concatenate_22[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_23 (Dropout)            (None, 24)           0           dense_27[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_23 (Concatenate)    (None, 99)           0           concatenate_22[0][0]             \n",
      "                                                                 dropout_23[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_28 (Dense)                (None, 24)           2400        concatenate_23[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_24 (Dropout)            (None, 24)           0           dense_28[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_24 (Concatenate)    (None, 123)          0           concatenate_23[0][0]             \n",
      "                                                                 dropout_24[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_29 (Dense)                (None, 24)           2976        concatenate_24[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_25 (Dropout)            (None, 24)           0           dense_29[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_25 (Concatenate)    (None, 147)          0           concatenate_24[0][0]             \n",
      "                                                                 dropout_25[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_5 (Highway)             (None, 147)          43512       concatenate_25[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_30 (Dense)                (None, 3)            444         highway_5[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 53,076\n",
      "Trainable params: 53,076\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      "288497/288497 [==============================] - ETA: 2:25 - loss: 0.8853 - acc: 0.6768 - weighted_accuracy: 0.284 - ETA: 12s - loss: 0.5173 - acc: 0.8375 - weighted_accuracy: 0.678 - ETA: 6s - loss: 0.4306 - acc: 0.8536 - weighted_accuracy: 0.7507 - ETA: 4s - loss: 0.3856 - acc: 0.8630 - weighted_accuracy: 0.784 - ETA: 3s - loss: 0.3616 - acc: 0.8682 - weighted_accuracy: 0.803 - ETA: 3s - loss: 0.3462 - acc: 0.8706 - weighted_accuracy: 0.814 - ETA: 2s - loss: 0.3346 - acc: 0.8726 - weighted_accuracy: 0.821 - ETA: 2s - loss: 0.3265 - acc: 0.8735 - weighted_accuracy: 0.826 - ETA: 1s - loss: 0.3198 - acc: 0.8748 - weighted_accuracy: 0.830 - ETA: 1s - loss: 0.3162 - acc: 0.8746 - weighted_accuracy: 0.832 - ETA: 1s - loss: 0.3118 - acc: 0.8754 - weighted_accuracy: 0.835 - ETA: 1s - loss: 0.3082 - acc: 0.8759 - weighted_accuracy: 0.837 - ETA: 1s - loss: 0.3050 - acc: 0.8764 - weighted_accuracy: 0.839 - ETA: 1s - loss: 0.3023 - acc: 0.8768 - weighted_accuracy: 0.840 - ETA: 0s - loss: 0.2993 - acc: 0.8776 - weighted_accuracy: 0.842 - ETA: 0s - loss: 0.2974 - acc: 0.8778 - weighted_accuracy: 0.843 - ETA: 0s - loss: 0.2962 - acc: 0.8778 - weighted_accuracy: 0.844 - ETA: 0s - loss: 0.2945 - acc: 0.8783 - weighted_accuracy: 0.845 - ETA: 0s - loss: 0.2933 - acc: 0.8782 - weighted_accuracy: 0.845 - ETA: 0s - loss: 0.2917 - acc: 0.8786 - weighted_accuracy: 0.846 - ETA: 0s - loss: 0.2908 - acc: 0.8787 - weighted_accuracy: 0.846 - ETA: 0s - loss: 0.2898 - acc: 0.8788 - weighted_accuracy: 0.847 - ETA: 0s - loss: 0.2891 - acc: 0.8789 - weighted_accuracy: 0.847 - ETA: 0s - loss: 0.2878 - acc: 0.8793 - weighted_accuracy: 0.847 - ETA: 0s - loss: 0.2865 - acc: 0.8797 - weighted_accuracy: 0.848 - ETA: 0s - loss: 0.2859 - acc: 0.8797 - weighted_accuracy: 0.848 - 2s 7us/step - loss: 0.2858 - acc: 0.8797 - weighted_accuracy: 0.8489 - val_loss: 0.2502 - val_acc: 0.8946 - val_weighted_accuracy: 0.8777\n",
      "Epoch 2/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2703 - acc: 0.8828 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2656 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2667 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2669 - acc: 0.8822 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2669 - acc: 0.8824 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2660 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2656 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2649 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2657 - acc: 0.8829 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2655 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2658 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2661 - acc: 0.8828 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2654 - acc: 0.8833 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2649 - acc: 0.8837 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2646 - acc: 0.8839 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2650 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2659 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2660 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2663 - acc: 0.8830 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2665 - acc: 0.8829 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2665 - acc: 0.8829 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2666 - acc: 0.8828 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2663 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2662 - acc: 0.8828 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2662 - acc: 0.8829 - weighted_accuracy: 0.858 - 1s 5us/step - loss: 0.2661 - acc: 0.8830 - weighted_accuracy: 0.8580 - val_loss: 0.2489 - val_acc: 0.8944 - val_weighted_accuracy: 0.8789\n",
      "Epoch 3/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2626 - acc: 0.8975 - weighted_accuracy: 0.877 - ETA: 1s - loss: 0.2641 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2650 - acc: 0.8821 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2647 - acc: 0.8821 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2636 - acc: 0.8832 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2647 - acc: 0.8827 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2642 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2651 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2648 - acc: 0.8830 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2649 - acc: 0.8825 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2643 - acc: 0.8825 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2643 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2647 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2650 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2650 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2651 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2649 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2653 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2660 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2659 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2659 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2655 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2652 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2650 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2653 - acc: 0.8833 - weighted_accuracy: 0.858 - 1s 5us/step - loss: 0.2653 - acc: 0.8833 - weighted_accuracy: 0.8589 - val_loss: 0.2504 - val_acc: 0.8924 - val_weighted_accuracy: 0.8754\n",
      "Epoch 4/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2750 - acc: 0.8740 - weighted_accuracy: 0.835 - ETA: 1s - loss: 0.2664 - acc: 0.8796 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2626 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2617 - acc: 0.8833 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2657 - acc: 0.8819 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2661 - acc: 0.8817 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2660 - acc: 0.8818 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2666 - acc: 0.8818 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2664 - acc: 0.8822 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2652 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2656 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2648 - acc: 0.8834 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2648 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2651 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2652 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2650 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2647 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2645 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2644 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2641 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2645 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2647 - acc: 0.8831 - weighted_accuracy: 0.858 - 1s 5us/step - loss: 0.2647 - acc: 0.8831 - weighted_accuracy: 0.8586 - val_loss: 0.2504 - val_acc: 0.8917 - val_weighted_accuracy: 0.8744\n",
      "Epoch 5/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2582 - acc: 0.8965 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2638 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2636 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2584 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2612 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2613 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2611 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2639 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2640 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2641 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2639 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2638 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2639 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2642 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2644 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2642 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2644 - acc: 0.8832 - weighted_accuracy: 0.858 - 1s 5us/step - loss: 0.2643 - acc: 0.8832 - weighted_accuracy: 0.8586 - val_loss: 0.2490 - val_acc: 0.8943 - val_weighted_accuracy: 0.8796\n",
      "Epoch 6/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2615 - acc: 0.8906 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2617 - acc: 0.8856 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2603 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2623 - acc: 0.8847 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2634 - acc: 0.8837 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2644 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2639 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2633 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2647 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2646 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2645 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2642 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2638 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2634 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2634 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2642 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2640 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2637 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2638 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2640 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2639 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2638 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2638 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2638 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2637 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2639 - acc: 0.8832 - weighted_accuracy: 0.858 - 1s 5us/step - loss: 0.2640 - acc: 0.8831 - weighted_accuracy: 0.8587 - val_loss: 0.2485 - val_acc: 0.8941 - val_weighted_accuracy: 0.8767\n",
      "Epoch 7/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2343 - acc: 0.8916 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2677 - acc: 0.8796 - weighted_accuracy: 0.852 - ETA: 1s - loss: 0.2657 - acc: 0.8813 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2644 - acc: 0.8819 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2651 - acc: 0.8815 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2651 - acc: 0.8814 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2661 - acc: 0.8812 - weighted_accuracy: 0.855 - ETA: 0s - loss: 0.2650 - acc: 0.8823 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2659 - acc: 0.8818 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2650 - acc: 0.8823 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2645 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2644 - acc: 0.8825 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2646 - acc: 0.8825 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2638 - acc: 0.8829 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2642 - acc: 0.8829 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2641 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2640 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2644 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2648 - acc: 0.8825 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2647 - acc: 0.8827 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2645 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2642 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2643 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2644 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2649 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2644 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2641 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2637 - acc: 0.8830 - weighted_accuracy: 0.858 - 2s 5us/step - loss: 0.2638 - acc: 0.8831 - weighted_accuracy: 0.8585 - val_loss: 0.2480 - val_acc: 0.8939 - val_weighted_accuracy: 0.8774\n",
      "Epoch 8/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2752 - acc: 0.8682 - weighted_accuracy: 0.846 - ETA: 1s - loss: 0.2591 - acc: 0.8820 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2565 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2591 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2581 - acc: 0.8860 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2601 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2595 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2605 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2610 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2610 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2612 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2634 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2635 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2629 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2635 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2634 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2635 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2635 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2637 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2638 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2637 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2634 - acc: 0.8833 - weighted_accuracy: 0.858 - 2s 6us/step - loss: 0.2635 - acc: 0.8832 - weighted_accuracy: 0.8586 - val_loss: 0.2473 - val_acc: 0.8942 - val_weighted_accuracy: 0.8775\n",
      "Epoch 9/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2725 - acc: 0.8721 - weighted_accuracy: 0.854 - ETA: 2s - loss: 0.2639 - acc: 0.8809 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2661 - acc: 0.8818 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2632 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2634 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2640 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2633 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2635 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2638 - acc: 0.8825 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2629 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2629 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2644 - acc: 0.8823 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2638 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2634 - acc: 0.8829 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2630 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2633 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2630 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2630 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2633 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2634 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2635 - acc: 0.8832 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2636 - acc: 0.8832 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2640 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2638 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2637 - acc: 0.8830 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2638 - acc: 0.8831 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2640 - acc: 0.8830 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2636 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2633 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2633 - acc: 0.8835 - weighted_accuracy: 0.858 - 2s 6us/step - loss: 0.2632 - acc: 0.8836 - weighted_accuracy: 0.8588 - val_loss: 0.2486 - val_acc: 0.8937 - val_weighted_accuracy: 0.8779\n",
      "Epoch 10/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2812 - acc: 0.8789 - weighted_accuracy: 0.851 - ETA: 1s - loss: 0.2723 - acc: 0.8812 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2683 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2661 - acc: 0.8830 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2639 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2638 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2629 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2634 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2628 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2629 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2636 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2632 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2632 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2629 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8836 - weighted_accuracy: 0.859 - 2s 6us/step - loss: 0.2632 - acc: 0.8835 - weighted_accuracy: 0.8590 - val_loss: 0.2481 - val_acc: 0.8937 - val_weighted_accuracy: 0.8776\n",
      "Epoch 11/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2594 - acc: 0.8965 - weighted_accuracy: 0.876 - ETA: 1s - loss: 0.2597 - acc: 0.8897 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2585 - acc: 0.8891 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2596 - acc: 0.8872 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2608 - acc: 0.8858 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2615 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2615 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2614 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2623 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2628 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2628 - acc: 0.8837 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2629 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2632 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2640 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2639 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2640 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2631 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2632 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2631 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2632 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2631 - acc: 0.8832 - weighted_accuracy: 0.858 - 2s 6us/step - loss: 0.2633 - acc: 0.8833 - weighted_accuracy: 0.8587 - val_loss: 0.2488 - val_acc: 0.8941 - val_weighted_accuracy: 0.8773\n",
      "Epoch 12/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2651 - acc: 0.8789 - weighted_accuracy: 0.853 - ETA: 1s - loss: 0.2548 - acc: 0.8892 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2615 - acc: 0.8851 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2613 - acc: 0.8847 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2616 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2613 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2599 - acc: 0.8853 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2595 - acc: 0.8857 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2591 - acc: 0.8855 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2598 - acc: 0.8848 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2598 - acc: 0.8848 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2603 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2611 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2616 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8837 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2630 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2621 - acc: 0.8837 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2621 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2628 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2630 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2630 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2630 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2634 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2635 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2633 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8837 - weighted_accuracy: 0.859 - 2s 5us/step - loss: 0.2631 - acc: 0.8838 - weighted_accuracy: 0.8591 - val_loss: 0.2482 - val_acc: 0.8944 - val_weighted_accuracy: 0.8802\n",
      "Epoch 13/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2586 - acc: 0.8877 - weighted_accuracy: 0.877 - ETA: 1s - loss: 0.2686 - acc: 0.8804 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2652 - acc: 0.8824 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2656 - acc: 0.8819 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2660 - acc: 0.8804 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2662 - acc: 0.8809 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2658 - acc: 0.8813 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2665 - acc: 0.8808 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2657 - acc: 0.8817 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2651 - acc: 0.8817 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2644 - acc: 0.8821 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2640 - acc: 0.8821 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2631 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2626 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2626 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2632 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2638 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2634 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2638 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2638 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2639 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2635 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2634 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2629 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2628 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2629 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2629 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2630 - acc: 0.8834 - weighted_accuracy: 0.858 - 2s 6us/step - loss: 0.2630 - acc: 0.8833 - weighted_accuracy: 0.8587 - val_loss: 0.2474 - val_acc: 0.8943 - val_weighted_accuracy: 0.8788\n",
      "Epoch 14/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2735 - acc: 0.8848 - weighted_accuracy: 0.851 - ETA: 1s - loss: 0.2561 - acc: 0.8882 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2600 - acc: 0.8852 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2606 - acc: 0.8852 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2587 - acc: 0.8858 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2599 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2594 - acc: 0.8855 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2604 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2599 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8848 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2616 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2622 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2626 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2630 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2625 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2624 - acc: 0.8838 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2623 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8836 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2626 - acc: 0.8836 - weighted_accuracy: 0.8589 - val_loss: 0.2477 - val_acc: 0.8941 - val_weighted_accuracy: 0.8783\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 15/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2913 - acc: 0.8691 - weighted_accuracy: 0.843 - ETA: 1s - loss: 0.2610 - acc: 0.8869 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2618 - acc: 0.8854 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2628 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2619 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2618 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2623 - acc: 0.8838 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2617 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2617 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2617 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8840 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2623 - acc: 0.8839 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2624 - acc: 0.8840 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2621 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2619 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8838 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2627 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8837 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2625 - acc: 0.8839 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2627 - acc: 0.8838 - weighted_accuracy: 0.8592 - val_loss: 0.2473 - val_acc: 0.8941 - val_weighted_accuracy: 0.8781\n",
      "Epoch 16/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2489 - acc: 0.8955 - weighted_accuracy: 0.870 - ETA: 1s - loss: 0.2529 - acc: 0.8861 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2600 - acc: 0.8826 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2625 - acc: 0.8816 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2607 - acc: 0.8829 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2597 - acc: 0.8840 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2590 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2601 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8840 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2624 - acc: 0.8839 - weighted_accuracy: 0.8597 - val_loss: 0.2476 - val_acc: 0.8940 - val_weighted_accuracy: 0.8784\n",
      "Epoch 17/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2549 - acc: 0.8750 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2640 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2642 - acc: 0.8821 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2654 - acc: 0.8814 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2651 - acc: 0.8815 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2648 - acc: 0.8819 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2649 - acc: 0.8818 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2649 - acc: 0.8816 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2644 - acc: 0.8818 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2633 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2632 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8825 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2634 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2630 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2628 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8835 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2624 - acc: 0.8836 - weighted_accuracy: 0.8593 - val_loss: 0.2481 - val_acc: 0.8939 - val_weighted_accuracy: 0.8782\n",
      "Epoch 18/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2344 - acc: 0.9023 - weighted_accuracy: 0.876 - ETA: 1s - loss: 0.2675 - acc: 0.8824 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2688 - acc: 0.8808 - weighted_accuracy: 0.853 - ETA: 1s - loss: 0.2688 - acc: 0.8801 - weighted_accuracy: 0.853 - ETA: 1s - loss: 0.2661 - acc: 0.8818 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2647 - acc: 0.8822 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2634 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2632 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2628 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2637 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2637 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2639 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2639 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2638 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2639 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2642 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2637 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2632 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2628 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2629 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8838 - weighted_accuracy: 0.859 - 2s 5us/step - loss: 0.2624 - acc: 0.8838 - weighted_accuracy: 0.8596 - val_loss: 0.2489 - val_acc: 0.8921 - val_weighted_accuracy: 0.8769\n",
      "Epoch 19/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2735 - acc: 0.8799 - weighted_accuracy: 0.849 - ETA: 1s - loss: 0.2680 - acc: 0.8820 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2614 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2610 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2620 - acc: 0.8831 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2605 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2604 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2602 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2608 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2604 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2602 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2607 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2617 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8840 - weighted_accuracy: 0.860 - 2s 5us/step - loss: 0.2623 - acc: 0.8838 - weighted_accuracy: 0.8599 - val_loss: 0.2487 - val_acc: 0.8938 - val_weighted_accuracy: 0.8782\n",
      "Epoch 20/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2535 - acc: 0.8896 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2622 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2620 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2626 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2604 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2599 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2613 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2594 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2604 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2619 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8837 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2621 - acc: 0.8837 - weighted_accuracy: 0.8597 - val_loss: 0.2484 - val_acc: 0.8938 - val_weighted_accuracy: 0.8781\n",
      "Epoch 21/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2761 - acc: 0.8867 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2606 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2620 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2574 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2584 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2583 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2603 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2602 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2600 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2598 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2603 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2603 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2596 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2604 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8841 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2619 - acc: 0.8841 - weighted_accuracy: 0.8602 - val_loss: 0.2488 - val_acc: 0.8940 - val_weighted_accuracy: 0.8776\n",
      "Epoch 22/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2470 - acc: 0.9053 - weighted_accuracy: 0.887 - ETA: 1s - loss: 0.2597 - acc: 0.8855 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2618 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2604 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2614 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2608 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2597 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2604 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2598 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8842 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2620 - acc: 0.8841 - weighted_accuracy: 0.8601 - val_loss: 0.2474 - val_acc: 0.8938 - val_weighted_accuracy: 0.8783\n",
      "Epoch 23/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2861 - acc: 0.8711 - weighted_accuracy: 0.843 - ETA: 1s - loss: 0.2622 - acc: 0.8830 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2645 - acc: 0.8826 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2658 - acc: 0.8825 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2642 - acc: 0.8827 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2634 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2606 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2619 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2616 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2613 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8842 - weighted_accuracy: 0.859 - 2s 5us/step - loss: 0.2619 - acc: 0.8842 - weighted_accuracy: 0.8599 - val_loss: 0.2486 - val_acc: 0.8937 - val_weighted_accuracy: 0.8788\n",
      "Epoch 24/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2636 - acc: 0.8809 - weighted_accuracy: 0.851 - ETA: 1s - loss: 0.2621 - acc: 0.8867 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2597 - acc: 0.8882 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2596 - acc: 0.8866 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2596 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2596 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2599 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2602 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2607 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2609 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2619 - acc: 0.8839 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2618 - acc: 0.8840 - weighted_accuracy: 0.8599 - val_loss: 0.2478 - val_acc: 0.8935 - val_weighted_accuracy: 0.8779\n",
      "Epoch 25/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2975 - acc: 0.8711 - weighted_accuracy: 0.836 - ETA: 1s - loss: 0.2679 - acc: 0.8822 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2633 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2628 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2611 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2615 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2613 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2603 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2604 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2612 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8842 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2617 - acc: 0.8843 - weighted_accuracy: 0.8607 - val_loss: 0.2479 - val_acc: 0.8932 - val_weighted_accuracy: 0.8780\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 27)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_31 (Dense)                (None, 24)           672         mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_26 (Dropout)            (None, 24)           0           dense_31[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_26 (Concatenate)    (None, 51)           0           mata-features[0][0]              \n",
      "                                                                 dropout_26[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_32 (Dense)                (None, 24)           1248        concatenate_26[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_27 (Dropout)            (None, 24)           0           dense_32[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_27 (Concatenate)    (None, 75)           0           concatenate_26[0][0]             \n",
      "                                                                 dropout_27[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_33 (Dense)                (None, 24)           1824        concatenate_27[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_28 (Dropout)            (None, 24)           0           dense_33[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_28 (Concatenate)    (None, 99)           0           concatenate_27[0][0]             \n",
      "                                                                 dropout_28[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_34 (Dense)                (None, 24)           2400        concatenate_28[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_29 (Dropout)            (None, 24)           0           dense_34[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_29 (Concatenate)    (None, 123)          0           concatenate_28[0][0]             \n",
      "                                                                 dropout_29[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_35 (Dense)                (None, 24)           2976        concatenate_29[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_30 (Dropout)            (None, 24)           0           dense_35[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_30 (Concatenate)    (None, 147)          0           concatenate_29[0][0]             \n",
      "                                                                 dropout_30[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_6 (Highway)             (None, 147)          43512       concatenate_30[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_36 (Dense)                (None, 3)            444         highway_6[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 53,076\n",
      "Trainable params: 53,076\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      "288497/288497 [==============================] - ETA: 2:46 - loss: 1.0339 - acc: 0.4619 - weighted_accuracy: 0.0000e+0 - ETA: 14s - loss: 0.5625 - acc: 0.8228 - weighted_accuracy: 0.6116    - ETA: 7s - loss: 0.4486 - acc: 0.8502 - weighted_accuracy: 0.7175 - ETA: 5s - loss: 0.3963 - acc: 0.8616 - weighted_accuracy: 0.761 - ETA: 4s - loss: 0.3651 - acc: 0.8685 - weighted_accuracy: 0.789 - ETA: 3s - loss: 0.3491 - acc: 0.8707 - weighted_accuracy: 0.803 - ETA: 2s - loss: 0.3387 - acc: 0.8720 - weighted_accuracy: 0.812 - ETA: 2s - loss: 0.3306 - acc: 0.8728 - weighted_accuracy: 0.818 - ETA: 2s - loss: 0.3242 - acc: 0.8738 - weighted_accuracy: 0.822 - ETA: 1s - loss: 0.3185 - acc: 0.8749 - weighted_accuracy: 0.827 - ETA: 1s - loss: 0.3145 - acc: 0.8754 - weighted_accuracy: 0.830 - ETA: 1s - loss: 0.3110 - acc: 0.8757 - weighted_accuracy: 0.832 - ETA: 1s - loss: 0.3077 - acc: 0.8762 - weighted_accuracy: 0.834 - ETA: 1s - loss: 0.3038 - acc: 0.8770 - weighted_accuracy: 0.836 - ETA: 1s - loss: 0.3015 - acc: 0.8775 - weighted_accuracy: 0.838 - ETA: 0s - loss: 0.2991 - acc: 0.8781 - weighted_accuracy: 0.840 - ETA: 0s - loss: 0.2975 - acc: 0.8783 - weighted_accuracy: 0.841 - ETA: 0s - loss: 0.2959 - acc: 0.8785 - weighted_accuracy: 0.842 - ETA: 0s - loss: 0.2941 - acc: 0.8789 - weighted_accuracy: 0.843 - ETA: 0s - loss: 0.2921 - acc: 0.8795 - weighted_accuracy: 0.844 - ETA: 0s - loss: 0.2904 - acc: 0.8798 - weighted_accuracy: 0.845 - ETA: 0s - loss: 0.2893 - acc: 0.8799 - weighted_accuracy: 0.845 - ETA: 0s - loss: 0.2881 - acc: 0.8802 - weighted_accuracy: 0.846 - ETA: 0s - loss: 0.2869 - acc: 0.8805 - weighted_accuracy: 0.847 - ETA: 0s - loss: 0.2861 - acc: 0.8806 - weighted_accuracy: 0.847 - ETA: 0s - loss: 0.2856 - acc: 0.8805 - weighted_accuracy: 0.847 - 2s 7us/step - loss: 0.2854 - acc: 0.8806 - weighted_accuracy: 0.8479 - val_loss: 0.2738 - val_acc: 0.8767 - val_weighted_accuracy: 0.8575\n",
      "Epoch 2/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2644 - acc: 0.8848 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2659 - acc: 0.8822 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2723 - acc: 0.8797 - weighted_accuracy: 0.853 - ETA: 1s - loss: 0.2702 - acc: 0.8809 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2654 - acc: 0.8831 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2652 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2661 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2655 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2657 - acc: 0.8838 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2652 - acc: 0.8839 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2652 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2655 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2648 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2650 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2654 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2652 - acc: 0.8837 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2651 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2648 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2645 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2650 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2645 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2651 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2645 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2645 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8844 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8843 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2642 - acc: 0.8843 - weighted_accuracy: 0.8597 - val_loss: 0.2700 - val_acc: 0.8785 - val_weighted_accuracy: 0.8595\n",
      "Epoch 3/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2501 - acc: 0.8857 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2589 - acc: 0.8893 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2599 - acc: 0.8876 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2596 - acc: 0.8868 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2625 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2604 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2613 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2615 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2625 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8845 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2631 - acc: 0.8844 - weighted_accuracy: 0.8600 - val_loss: 0.2697 - val_acc: 0.8782 - val_weighted_accuracy: 0.8587\n",
      "Epoch 4/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2638 - acc: 0.8799 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2631 - acc: 0.8849 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2638 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2622 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2619 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2628 - acc: 0.8848 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2630 - acc: 0.8846 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8853 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8847 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8847 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2623 - acc: 0.8847 - weighted_accuracy: 0.8601 - val_loss: 0.2710 - val_acc: 0.8785 - val_weighted_accuracy: 0.8623\n",
      "Epoch 5/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2740 - acc: 0.8779 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2626 - acc: 0.8816 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2652 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2631 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2631 - acc: 0.8848 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2615 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8860 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8862 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8858 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2612 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8853 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2618 - acc: 0.8852 - weighted_accuracy: 0.8610 - val_loss: 0.2701 - val_acc: 0.8776 - val_weighted_accuracy: 0.8565\n",
      "Epoch 6/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2530 - acc: 0.8887 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2571 - acc: 0.8870 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2635 - acc: 0.8841 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2613 - acc: 0.8847 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2619 - acc: 0.8846 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2629 - acc: 0.8845 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2632 - acc: 0.8846 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2625 - acc: 0.8854 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8853 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8846 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8848 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8848 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2615 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8848 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8847 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8848 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2619 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8848 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2615 - acc: 0.8849 - weighted_accuracy: 0.8602 - val_loss: 0.2708 - val_acc: 0.8783 - val_weighted_accuracy: 0.8569\n",
      "Epoch 7/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2557 - acc: 0.8818 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2644 - acc: 0.8833 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2621 - acc: 0.8847 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2612 - acc: 0.8849 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2609 - acc: 0.8855 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2601 - acc: 0.8858 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2590 - acc: 0.8867 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2593 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2597 - acc: 0.8858 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8857 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8853 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2600 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8848 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2614 - acc: 0.8848 - weighted_accuracy: 0.8604 - val_loss: 0.2700 - val_acc: 0.8783 - val_weighted_accuracy: 0.8578\n",
      "Epoch 8/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2295 - acc: 0.8975 - weighted_accuracy: 0.872 - ETA: 1s - loss: 0.2545 - acc: 0.8875 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2598 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2559 - acc: 0.8876 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2594 - acc: 0.8865 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2596 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2613 - acc: 0.8856 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2605 - acc: 0.8858 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2620 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2610 - acc: 0.8854 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8856 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8854 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8853 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8853 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8850 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2612 - acc: 0.8851 - weighted_accuracy: 0.8608 - val_loss: 0.2704 - val_acc: 0.8786 - val_weighted_accuracy: 0.8608\n",
      "Epoch 9/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2447 - acc: 0.8906 - weighted_accuracy: 0.877 - ETA: 1s - loss: 0.2583 - acc: 0.8833 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2613 - acc: 0.8822 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2617 - acc: 0.8832 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2589 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2589 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2595 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2602 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2593 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2595 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8849 - weighted_accuracy: 0.860 - 2s 5us/step - loss: 0.2611 - acc: 0.8849 - weighted_accuracy: 0.8605 - val_loss: 0.2690 - val_acc: 0.8791 - val_weighted_accuracy: 0.8606\n",
      "Epoch 10/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2685 - acc: 0.8770 - weighted_accuracy: 0.853 - ETA: 1s - loss: 0.2552 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2602 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2567 - acc: 0.8877 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2550 - acc: 0.8886 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2565 - acc: 0.8878 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2577 - acc: 0.8866 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2580 - acc: 0.8866 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2590 - acc: 0.8859 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2590 - acc: 0.8860 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8858 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8856 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2593 - acc: 0.8861 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2595 - acc: 0.8860 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8856 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2597 - acc: 0.8857 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2600 - acc: 0.8856 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2603 - acc: 0.8855 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2601 - acc: 0.8856 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2602 - acc: 0.8856 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2604 - acc: 0.8855 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8854 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8853 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8854 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8850 - weighted_accuracy: 0.860 - 2s 5us/step - loss: 0.2610 - acc: 0.8851 - weighted_accuracy: 0.8607 - val_loss: 0.2709 - val_acc: 0.8779 - val_weighted_accuracy: 0.8621\n",
      "Epoch 11/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2491 - acc: 0.8955 - weighted_accuracy: 0.877 - ETA: 1s - loss: 0.2540 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2547 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2555 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2568 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2558 - acc: 0.8876 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2574 - acc: 0.8867 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2583 - acc: 0.8860 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2581 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2592 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2594 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2601 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8853 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2598 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8858 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8852 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2606 - acc: 0.8852 - weighted_accuracy: 0.8609 - val_loss: 0.2706 - val_acc: 0.8782 - val_weighted_accuracy: 0.8586\n",
      "Epoch 12/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2551 - acc: 0.8809 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2667 - acc: 0.8804 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2611 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2612 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2628 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2639 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2630 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2615 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8854 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2605 - acc: 0.8853 - weighted_accuracy: 0.8609 - val_loss: 0.2693 - val_acc: 0.8778 - val_weighted_accuracy: 0.8578\n",
      "Epoch 13/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2591 - acc: 0.8838 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2536 - acc: 0.8899 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2580 - acc: 0.8866 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2573 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2594 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2606 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2603 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2592 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2595 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2593 - acc: 0.8858 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2595 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2589 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2590 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2598 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2598 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8858 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8853 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2604 - acc: 0.8853 - weighted_accuracy: 0.8610 - val_loss: 0.2694 - val_acc: 0.8785 - val_weighted_accuracy: 0.8565\n",
      "Epoch 14/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2486 - acc: 0.8926 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2612 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2646 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2643 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2650 - acc: 0.8839 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2620 - acc: 0.8853 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2613 - acc: 0.8853 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2602 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2607 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2606 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2606 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2595 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8853 - weighted_accuracy: 0.861 - 2s 5us/step - loss: 0.2604 - acc: 0.8854 - weighted_accuracy: 0.8611 - val_loss: 0.2695 - val_acc: 0.8783 - val_weighted_accuracy: 0.8571\n",
      "Epoch 15/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2492 - acc: 0.8896 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2504 - acc: 0.8885 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2594 - acc: 0.8848 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2610 - acc: 0.8843 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2606 - acc: 0.8840 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2590 - acc: 0.8855 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2591 - acc: 0.8860 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2599 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2606 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2613 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8858 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8855 - weighted_accuracy: 0.861 - 2s 5us/step - loss: 0.2602 - acc: 0.8855 - weighted_accuracy: 0.8614 - val_loss: 0.2701 - val_acc: 0.8785 - val_weighted_accuracy: 0.8590\n",
      "Epoch 16/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2557 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2580 - acc: 0.8858 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2612 - acc: 0.8849 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2598 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2594 - acc: 0.8861 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2587 - acc: 0.8862 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2578 - acc: 0.8867 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2573 - acc: 0.8870 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2580 - acc: 0.8868 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2577 - acc: 0.8873 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2584 - acc: 0.8869 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2585 - acc: 0.8865 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2591 - acc: 0.8863 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2590 - acc: 0.8864 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2589 - acc: 0.8864 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2584 - acc: 0.8867 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2590 - acc: 0.8866 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8862 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8860 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8857 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8853 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2604 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2601 - acc: 0.8854 - weighted_accuracy: 0.861 - 2s 5us/step - loss: 0.2601 - acc: 0.8854 - weighted_accuracy: 0.8612 - val_loss: 0.2696 - val_acc: 0.8781 - val_weighted_accuracy: 0.8557\n",
      "Epoch 17/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2455 - acc: 0.8799 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2603 - acc: 0.8817 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2647 - acc: 0.8812 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2626 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2621 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2604 - acc: 0.8839 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2608 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2592 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2592 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2587 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2585 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2588 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2593 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8864 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2598 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2597 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2595 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8853 - weighted_accuracy: 0.861 - 2s 6us/step - loss: 0.2599 - acc: 0.8852 - weighted_accuracy: 0.8614 - val_loss: 0.2698 - val_acc: 0.8782 - val_weighted_accuracy: 0.8542\n",
      "Epoch 18/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2320 - acc: 0.9014 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2654 - acc: 0.8835 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2609 - acc: 0.8856 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2614 - acc: 0.8848 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2596 - acc: 0.8857 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2595 - acc: 0.8854 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2601 - acc: 0.8854 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2607 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2608 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2614 - acc: 0.8848 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8844 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8844 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8844 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8856 - weighted_accuracy: 0.861 - 2s 5us/step - loss: 0.2597 - acc: 0.8855 - weighted_accuracy: 0.8611 - val_loss: 0.2697 - val_acc: 0.8782 - val_weighted_accuracy: 0.8604\n",
      "Epoch 19/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2389 - acc: 0.9023 - weighted_accuracy: 0.878 - ETA: 1s - loss: 0.2554 - acc: 0.8865 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2585 - acc: 0.8870 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2586 - acc: 0.8875 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2615 - acc: 0.8853 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2616 - acc: 0.8842 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2607 - acc: 0.8850 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2605 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2613 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2615 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2597 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2592 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2592 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2589 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2592 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2598 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2598 - acc: 0.8855 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2598 - acc: 0.8854 - weighted_accuracy: 0.8615 - val_loss: 0.2693 - val_acc: 0.8780 - val_weighted_accuracy: 0.8559\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 27)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_37 (Dense)                (None, 24)           672         mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_31 (Dropout)            (None, 24)           0           dense_37[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_31 (Concatenate)    (None, 51)           0           mata-features[0][0]              \n",
      "                                                                 dropout_31[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_38 (Dense)                (None, 24)           1248        concatenate_31[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_32 (Dropout)            (None, 24)           0           dense_38[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_32 (Concatenate)    (None, 75)           0           concatenate_31[0][0]             \n",
      "                                                                 dropout_32[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_39 (Dense)                (None, 24)           1824        concatenate_32[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_33 (Dropout)            (None, 24)           0           dense_39[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_33 (Concatenate)    (None, 99)           0           concatenate_32[0][0]             \n",
      "                                                                 dropout_33[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_40 (Dense)                (None, 24)           2400        concatenate_33[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_34 (Dropout)            (None, 24)           0           dense_40[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_34 (Concatenate)    (None, 123)          0           concatenate_33[0][0]             \n",
      "                                                                 dropout_34[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_41 (Dense)                (None, 24)           2976        concatenate_34[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_35 (Dropout)            (None, 24)           0           dense_41[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_35 (Concatenate)    (None, 147)          0           concatenate_34[0][0]             \n",
      "                                                                 dropout_35[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_7 (Highway)             (None, 147)          43512       concatenate_35[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_42 (Dense)                (None, 3)            444         highway_7[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 53,076\n",
      "Trainable params: 53,076\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      "288497/288497 [==============================] - ETA: 3:36 - loss: 1.2582 - acc: 0.1035 - weighted_accuracy: 0.003 - ETA: 22s - loss: 0.7314 - acc: 0.6446 - weighted_accuracy: 0.446 - ETA: 11s - loss: 0.5273 - acc: 0.7603 - weighted_accuracy: 0.63 - ETA: 8s - loss: 0.4590 - acc: 0.7996 - weighted_accuracy: 0.7036 - ETA: 6s - loss: 0.4179 - acc: 0.8202 - weighted_accuracy: 0.743 - ETA: 4s - loss: 0.3922 - acc: 0.8319 - weighted_accuracy: 0.766 - ETA: 4s - loss: 0.3741 - acc: 0.8401 - weighted_accuracy: 0.781 - ETA: 3s - loss: 0.3606 - acc: 0.8462 - weighted_accuracy: 0.792 - ETA: 3s - loss: 0.3500 - acc: 0.8504 - weighted_accuracy: 0.801 - ETA: 2s - loss: 0.3419 - acc: 0.8540 - weighted_accuracy: 0.807 - ETA: 2s - loss: 0.3348 - acc: 0.8572 - weighted_accuracy: 0.813 - ETA: 2s - loss: 0.3285 - acc: 0.8596 - weighted_accuracy: 0.817 - ETA: 2s - loss: 0.3232 - acc: 0.8616 - weighted_accuracy: 0.821 - ETA: 1s - loss: 0.3185 - acc: 0.8638 - weighted_accuracy: 0.825 - ETA: 1s - loss: 0.3151 - acc: 0.8652 - weighted_accuracy: 0.828 - ETA: 1s - loss: 0.3122 - acc: 0.8662 - weighted_accuracy: 0.829 - ETA: 1s - loss: 0.3100 - acc: 0.8669 - weighted_accuracy: 0.831 - ETA: 1s - loss: 0.3074 - acc: 0.8682 - weighted_accuracy: 0.833 - ETA: 1s - loss: 0.3050 - acc: 0.8692 - weighted_accuracy: 0.835 - ETA: 1s - loss: 0.3030 - acc: 0.8701 - weighted_accuracy: 0.836 - ETA: 0s - loss: 0.3012 - acc: 0.8709 - weighted_accuracy: 0.837 - ETA: 0s - loss: 0.2992 - acc: 0.8716 - weighted_accuracy: 0.839 - ETA: 0s - loss: 0.2978 - acc: 0.8722 - weighted_accuracy: 0.840 - ETA: 0s - loss: 0.2965 - acc: 0.8727 - weighted_accuracy: 0.840 - ETA: 0s - loss: 0.2957 - acc: 0.8730 - weighted_accuracy: 0.841 - ETA: 0s - loss: 0.2945 - acc: 0.8734 - weighted_accuracy: 0.842 - ETA: 0s - loss: 0.2934 - acc: 0.8738 - weighted_accuracy: 0.842 - ETA: 0s - loss: 0.2921 - acc: 0.8743 - weighted_accuracy: 0.843 - ETA: 0s - loss: 0.2908 - acc: 0.8748 - weighted_accuracy: 0.844 - ETA: 0s - loss: 0.2900 - acc: 0.8751 - weighted_accuracy: 0.845 - ETA: 0s - loss: 0.2893 - acc: 0.8754 - weighted_accuracy: 0.845 - 3s 9us/step - loss: 0.2885 - acc: 0.8757 - weighted_accuracy: 0.8461 - val_loss: 0.2921 - val_acc: 0.8678 - val_weighted_accuracy: 0.8318\n",
      "Epoch 2/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2755 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2728 - acc: 0.8812 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2679 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2659 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2649 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2662 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2663 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2665 - acc: 0.8844 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2649 - acc: 0.8855 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2638 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2630 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2628 - acc: 0.8864 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2628 - acc: 0.8863 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2623 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2621 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2624 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2630 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2626 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2621 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2618 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2620 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2620 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2620 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2617 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2616 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2620 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2621 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2622 - acc: 0.8856 - weighted_accuracy: 0.862 - 2s 5us/step - loss: 0.2622 - acc: 0.8856 - weighted_accuracy: 0.8623 - val_loss: 0.2934 - val_acc: 0.8671 - val_weighted_accuracy: 0.8284\n",
      "Epoch 3/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2247 - acc: 0.9004 - weighted_accuracy: 0.879 - ETA: 1s - loss: 0.2486 - acc: 0.8924 - weighted_accuracy: 0.869 - ETA: 1s - loss: 0.2520 - acc: 0.8905 - weighted_accuracy: 0.868 - ETA: 1s - loss: 0.2566 - acc: 0.8883 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2587 - acc: 0.8876 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2581 - acc: 0.8884 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2589 - acc: 0.8877 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2579 - acc: 0.8877 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2574 - acc: 0.8877 - weighted_accuracy: 0.865 - ETA: 0s - loss: 0.2586 - acc: 0.8874 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2589 - acc: 0.8871 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2599 - acc: 0.8867 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2594 - acc: 0.8870 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2592 - acc: 0.8871 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2598 - acc: 0.8868 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2604 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2603 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2603 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2598 - acc: 0.8866 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2603 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2603 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2604 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2608 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2613 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2613 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8857 - weighted_accuracy: 0.862 - 2s 5us/step - loss: 0.2611 - acc: 0.8857 - weighted_accuracy: 0.8627 - val_loss: 0.2879 - val_acc: 0.8686 - val_weighted_accuracy: 0.8405\n",
      "Epoch 4/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 4s - loss: 0.2499 - acc: 0.8965 - weighted_accuracy: 0.882 - ETA: 1s - loss: 0.2608 - acc: 0.8869 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2672 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2645 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2643 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2627 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2609 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2608 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2613 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2613 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2608 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2604 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2603 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2596 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2591 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2598 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2602 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2604 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2602 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2603 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2599 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2603 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2607 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2602 - acc: 0.8859 - weighted_accuracy: 0.863 - 2s 6us/step - loss: 0.2604 - acc: 0.8859 - weighted_accuracy: 0.8632 - val_loss: 0.2891 - val_acc: 0.8679 - val_weighted_accuracy: 0.8340\n",
      "Epoch 5/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2677 - acc: 0.8867 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2624 - acc: 0.8854 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2614 - acc: 0.8852 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2608 - acc: 0.8863 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2623 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2618 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2625 - acc: 0.8854 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2621 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2620 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2631 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2628 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2636 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2623 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2619 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2607 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2603 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2602 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2599 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2598 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2603 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2602 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2601 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2601 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2602 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2601 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2600 - acc: 0.8861 - weighted_accuracy: 0.863 - 2s 5us/step - loss: 0.2599 - acc: 0.8862 - weighted_accuracy: 0.8637 - val_loss: 0.2903 - val_acc: 0.8676 - val_weighted_accuracy: 0.8308\n",
      "Epoch 6/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2403 - acc: 0.9014 - weighted_accuracy: 0.874 - ETA: 1s - loss: 0.2639 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2635 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2623 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2606 - acc: 0.8854 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2607 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2603 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2599 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2594 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2585 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2589 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2590 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2594 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8863 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8861 - weighted_accuracy: 0.862 - 2s 5us/step - loss: 0.2595 - acc: 0.8862 - weighted_accuracy: 0.8630 - val_loss: 0.2900 - val_acc: 0.8680 - val_weighted_accuracy: 0.8322\n",
      "Epoch 7/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2658 - acc: 0.8887 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2588 - acc: 0.8865 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2607 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2596 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2573 - acc: 0.8872 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2561 - acc: 0.8874 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2564 - acc: 0.8874 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2572 - acc: 0.8870 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2582 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2586 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2585 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2585 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2586 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2586 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2591 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2593 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2596 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2593 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2599 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8860 - weighted_accuracy: 0.863 - 1s 5us/step - loss: 0.2596 - acc: 0.8861 - weighted_accuracy: 0.8633 - val_loss: 0.2887 - val_acc: 0.8678 - val_weighted_accuracy: 0.8338\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 8/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2586 - acc: 0.8926 - weighted_accuracy: 0.873 - ETA: 1s - loss: 0.2673 - acc: 0.8789 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2555 - acc: 0.8865 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2580 - acc: 0.8871 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2587 - acc: 0.8868 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2594 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2582 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2577 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2588 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2588 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2594 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2593 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2591 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2596 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2596 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2594 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2593 - acc: 0.8860 - weighted_accuracy: 0.863 - 1s 5us/step - loss: 0.2593 - acc: 0.8860 - weighted_accuracy: 0.8632 - val_loss: 0.2902 - val_acc: 0.8676 - val_weighted_accuracy: 0.8337\n",
      "Epoch 9/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2770 - acc: 0.8730 - weighted_accuracy: 0.847 - ETA: 1s - loss: 0.2625 - acc: 0.8822 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2612 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2641 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2627 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2620 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2609 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2603 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2598 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2593 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2591 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2591 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8860 - weighted_accuracy: 0.863 - 1s 5us/step - loss: 0.2591 - acc: 0.8860 - weighted_accuracy: 0.8631 - val_loss: 0.2885 - val_acc: 0.8672 - val_weighted_accuracy: 0.8343\n",
      "Epoch 10/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2603 - acc: 0.8770 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2559 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2586 - acc: 0.8844 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2600 - acc: 0.8841 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2597 - acc: 0.8843 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2595 - acc: 0.8844 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2599 - acc: 0.8841 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2598 - acc: 0.8843 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8839 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2597 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2595 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2588 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2584 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2583 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2586 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2591 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2588 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8860 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2589 - acc: 0.8861 - weighted_accuracy: 0.8632 - val_loss: 0.2892 - val_acc: 0.8682 - val_weighted_accuracy: 0.8336\n",
      "Epoch 11/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2402 - acc: 0.8926 - weighted_accuracy: 0.872 - ETA: 1s - loss: 0.2604 - acc: 0.8851 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2578 - acc: 0.8873 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2584 - acc: 0.8868 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2580 - acc: 0.8866 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2581 - acc: 0.8865 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2578 - acc: 0.8863 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2570 - acc: 0.8865 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2574 - acc: 0.8864 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2576 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2581 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2579 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2578 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2578 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2580 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2577 - acc: 0.8867 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2579 - acc: 0.8868 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2576 - acc: 0.8870 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2577 - acc: 0.8869 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2583 - acc: 0.8866 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2587 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2586 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2580 - acc: 0.8866 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2583 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2588 - acc: 0.8862 - weighted_accuracy: 0.863 - 1s 5us/step - loss: 0.2589 - acc: 0.8861 - weighted_accuracy: 0.8634 - val_loss: 0.2890 - val_acc: 0.8679 - val_weighted_accuracy: 0.8342\n",
      "Epoch 12/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2718 - acc: 0.8984 - weighted_accuracy: 0.870 - ETA: 1s - loss: 0.2618 - acc: 0.8883 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2554 - acc: 0.8891 - weighted_accuracy: 0.868 - ETA: 1s - loss: 0.2593 - acc: 0.8865 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2594 - acc: 0.8861 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2581 - acc: 0.8866 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2585 - acc: 0.8862 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2585 - acc: 0.8862 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2592 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2591 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2588 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2593 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2596 - acc: 0.8854 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2596 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2584 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2584 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2586 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2586 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2586 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2588 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8860 - weighted_accuracy: 0.863 - 1s 5us/step - loss: 0.2588 - acc: 0.8861 - weighted_accuracy: 0.8636 - val_loss: 0.2907 - val_acc: 0.8671 - val_weighted_accuracy: 0.8272\n",
      "Epoch 13/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2703 - acc: 0.8750 - weighted_accuracy: 0.850 - ETA: 1s - loss: 0.2649 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2613 - acc: 0.8860 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2624 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2606 - acc: 0.8867 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2580 - acc: 0.8880 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2591 - acc: 0.8873 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2582 - acc: 0.8876 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2590 - acc: 0.8873 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2594 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8867 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2583 - acc: 0.8871 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2578 - acc: 0.8873 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2581 - acc: 0.8870 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2577 - acc: 0.8872 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2580 - acc: 0.8868 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2581 - acc: 0.8868 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2580 - acc: 0.8867 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2585 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2586 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2588 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2591 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2591 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8862 - weighted_accuracy: 0.863 - 1s 5us/step - loss: 0.2588 - acc: 0.8861 - weighted_accuracy: 0.8634 - val_loss: 0.2890 - val_acc: 0.8678 - val_weighted_accuracy: 0.8338\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 27)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_43 (Dense)                (None, 24)           672         mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_36 (Dropout)            (None, 24)           0           dense_43[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_36 (Concatenate)    (None, 51)           0           mata-features[0][0]              \n",
      "                                                                 dropout_36[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_44 (Dense)                (None, 24)           1248        concatenate_36[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_37 (Dropout)            (None, 24)           0           dense_44[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_37 (Concatenate)    (None, 75)           0           concatenate_36[0][0]             \n",
      "                                                                 dropout_37[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_45 (Dense)                (None, 24)           1824        concatenate_37[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_38 (Dropout)            (None, 24)           0           dense_45[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_38 (Concatenate)    (None, 99)           0           concatenate_37[0][0]             \n",
      "                                                                 dropout_38[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_46 (Dense)                (None, 24)           2400        concatenate_38[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_39 (Dropout)            (None, 24)           0           dense_46[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_39 (Concatenate)    (None, 123)          0           concatenate_38[0][0]             \n",
      "                                                                 dropout_39[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_47 (Dense)                (None, 24)           2976        concatenate_39[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_40 (Dropout)            (None, 24)           0           dense_47[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_40 (Concatenate)    (None, 147)          0           concatenate_39[0][0]             \n",
      "                                                                 dropout_40[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_8 (Highway)             (None, 147)          43512       concatenate_40[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_48 (Dense)                (None, 3)            444         highway_8[0][0]                  \n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "==================================================================================================\n",
      "Total params: 53,076\n",
      "Trainable params: 53,076\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      "288497/288497 [==============================] - ETA: 3:47 - loss: 1.1595 - acc: 0.1865 - weighted_accuracy: 0.0000e+0 - ETA: 19s - loss: 0.6417 - acc: 0.7656 - weighted_accuracy: 0.5321    - ETA: 10s - loss: 0.4884 - acc: 0.8210 - weighted_accuracy: 0.67 - ETA: 7s - loss: 0.4313 - acc: 0.8390 - weighted_accuracy: 0.7350 - ETA: 5s - loss: 0.3940 - acc: 0.8508 - weighted_accuracy: 0.767 - ETA: 4s - loss: 0.3725 - acc: 0.8569 - weighted_accuracy: 0.785 - ETA: 3s - loss: 0.3563 - acc: 0.8614 - weighted_accuracy: 0.798 - ETA: 3s - loss: 0.3442 - acc: 0.8648 - weighted_accuracy: 0.807 - ETA: 2s - loss: 0.3364 - acc: 0.8665 - weighted_accuracy: 0.813 - ETA: 2s - loss: 0.3302 - acc: 0.8679 - weighted_accuracy: 0.818 - ETA: 2s - loss: 0.3238 - acc: 0.8700 - weighted_accuracy: 0.822 - ETA: 1s - loss: 0.3200 - acc: 0.8710 - weighted_accuracy: 0.825 - ETA: 1s - loss: 0.3157 - acc: 0.8719 - weighted_accuracy: 0.828 - ETA: 1s - loss: 0.3116 - acc: 0.8732 - weighted_accuracy: 0.831 - ETA: 1s - loss: 0.3087 - acc: 0.8739 - weighted_accuracy: 0.833 - ETA: 1s - loss: 0.3060 - acc: 0.8745 - weighted_accuracy: 0.835 - ETA: 1s - loss: 0.3035 - acc: 0.8750 - weighted_accuracy: 0.836 - ETA: 0s - loss: 0.3015 - acc: 0.8755 - weighted_accuracy: 0.838 - ETA: 0s - loss: 0.3000 - acc: 0.8757 - weighted_accuracy: 0.839 - ETA: 0s - loss: 0.2988 - acc: 0.8760 - weighted_accuracy: 0.840 - ETA: 0s - loss: 0.2974 - acc: 0.8764 - weighted_accuracy: 0.841 - ETA: 0s - loss: 0.2957 - acc: 0.8767 - weighted_accuracy: 0.842 - ETA: 0s - loss: 0.2946 - acc: 0.8769 - weighted_accuracy: 0.842 - ETA: 0s - loss: 0.2937 - acc: 0.8770 - weighted_accuracy: 0.842 - ETA: 0s - loss: 0.2925 - acc: 0.8773 - weighted_accuracy: 0.843 - ETA: 0s - loss: 0.2914 - acc: 0.8778 - weighted_accuracy: 0.844 - ETA: 0s - loss: 0.2905 - acc: 0.8779 - weighted_accuracy: 0.845 - 2s 8us/step - loss: 0.2897 - acc: 0.8781 - weighted_accuracy: 0.8456 - val_loss: 0.2656 - val_acc: 0.8820 - val_weighted_accuracy: 0.8596\n",
      "Epoch 2/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2555 - acc: 0.8848 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2713 - acc: 0.8809 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2707 - acc: 0.8798 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2683 - acc: 0.8820 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2689 - acc: 0.8819 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2673 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2670 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2651 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2644 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2650 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2659 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2661 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2654 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2652 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2650 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2652 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2647 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2644 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2648 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2652 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2653 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2654 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2658 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2658 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2655 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2656 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2655 - acc: 0.8839 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2652 - acc: 0.8840 - weighted_accuracy: 0.8599 - val_loss: 0.2644 - val_acc: 0.8827 - val_weighted_accuracy: 0.8602\n",
      "Epoch 3/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2422 - acc: 0.8984 - weighted_accuracy: 0.888 - ETA: 1s - loss: 0.2602 - acc: 0.8889 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2649 - acc: 0.8846 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2645 - acc: 0.8841 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2646 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2659 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2655 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2648 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2663 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2662 - acc: 0.8833 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2661 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2661 - acc: 0.8832 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2654 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2660 - acc: 0.8832 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2652 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2654 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2653 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2646 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2646 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2640 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2641 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2638 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2638 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2637 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8844 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2636 - acc: 0.8844 - weighted_accuracy: 0.8608 - val_loss: 0.2650 - val_acc: 0.8808 - val_weighted_accuracy: 0.8601\n",
      "Epoch 4/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.3128 - acc: 0.8662 - weighted_accuracy: 0.848 - ETA: 1s - loss: 0.2646 - acc: 0.8822 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2642 - acc: 0.8834 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2635 - acc: 0.8836 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2644 - acc: 0.8831 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2633 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2623 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2634 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8830 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2627 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2628 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2628 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2629 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2632 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2633 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2635 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8842 - weighted_accuracy: 0.860 - 2s 5us/step - loss: 0.2633 - acc: 0.8843 - weighted_accuracy: 0.8607 - val_loss: 0.2651 - val_acc: 0.8810 - val_weighted_accuracy: 0.8602\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 5/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2428 - acc: 0.9014 - weighted_accuracy: 0.882 - ETA: 1s - loss: 0.2622 - acc: 0.8860 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2636 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2635 - acc: 0.8844 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2606 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2609 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2615 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2619 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2621 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2623 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2622 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2625 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2631 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2636 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2639 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2639 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2628 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2630 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2628 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2629 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2630 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2631 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2631 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2631 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8843 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2628 - acc: 0.8844 - weighted_accuracy: 0.8610 - val_loss: 0.2633 - val_acc: 0.8815 - val_weighted_accuracy: 0.8603\n",
      "Epoch 6/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2280 - acc: 0.9014 - weighted_accuracy: 0.891 - ETA: 1s - loss: 0.2580 - acc: 0.8854 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2565 - acc: 0.8864 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2579 - acc: 0.8861 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2576 - acc: 0.8859 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2576 - acc: 0.8860 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2578 - acc: 0.8864 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2574 - acc: 0.8863 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2577 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2590 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2612 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2627 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2625 - acc: 0.8846 - weighted_accuracy: 0.861 - 2s 5us/step - loss: 0.2623 - acc: 0.8846 - weighted_accuracy: 0.8612 - val_loss: 0.2632 - val_acc: 0.8821 - val_weighted_accuracy: 0.8597\n",
      "Epoch 7/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2709 - acc: 0.8770 - weighted_accuracy: 0.851 - ETA: 1s - loss: 0.2598 - acc: 0.8879 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2640 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2633 - acc: 0.8869 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2618 - acc: 0.8869 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2620 - acc: 0.8869 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2620 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2614 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2615 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2620 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2616 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2614 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2616 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2613 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2615 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2621 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2622 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2625 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2621 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8845 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2623 - acc: 0.8845 - weighted_accuracy: 0.8614 - val_loss: 0.2633 - val_acc: 0.8825 - val_weighted_accuracy: 0.8594\n",
      "Epoch 8/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2478 - acc: 0.8926 - weighted_accuracy: 0.876 - ETA: 1s - loss: 0.2617 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2620 - acc: 0.8839 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2609 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2583 - acc: 0.8868 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2592 - acc: 0.8866 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2590 - acc: 0.8872 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2599 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2606 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8854 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8849 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2622 - acc: 0.8847 - weighted_accuracy: 0.8612 - val_loss: 0.2642 - val_acc: 0.8822 - val_weighted_accuracy: 0.8602\n",
      "Epoch 9/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2593 - acc: 0.8857 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2591 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2606 - acc: 0.8832 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2599 - acc: 0.8836 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2627 - acc: 0.8819 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2644 - acc: 0.8813 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2644 - acc: 0.8821 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8830 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8833 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8841 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2621 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2632 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2628 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2621 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2621 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8845 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2620 - acc: 0.8846 - weighted_accuracy: 0.8612 - val_loss: 0.2640 - val_acc: 0.8816 - val_weighted_accuracy: 0.8596\n",
      "Epoch 10/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2761 - acc: 0.8799 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2636 - acc: 0.8857 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2635 - acc: 0.8849 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2639 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2612 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2599 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2590 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2582 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2585 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2592 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8845 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2617 - acc: 0.8846 - weighted_accuracy: 0.8614 - val_loss: 0.2628 - val_acc: 0.8829 - val_weighted_accuracy: 0.8591\n",
      "Epoch 11/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2623 - acc: 0.8906 - weighted_accuracy: 0.868 - ETA: 1s - loss: 0.2601 - acc: 0.8875 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2614 - acc: 0.8874 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2626 - acc: 0.8863 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2601 - acc: 0.8868 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2596 - acc: 0.8865 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2592 - acc: 0.8863 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2612 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2603 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8847 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2614 - acc: 0.8847 - weighted_accuracy: 0.8611 - val_loss: 0.2628 - val_acc: 0.8821 - val_weighted_accuracy: 0.8581\n",
      "Epoch 12/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2357 - acc: 0.8975 - weighted_accuracy: 0.868 - ETA: 1s - loss: 0.2545 - acc: 0.8879 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2555 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2568 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2594 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2602 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2609 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2602 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2601 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2621 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8848 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2616 - acc: 0.8847 - weighted_accuracy: 0.8617 - val_loss: 0.2627 - val_acc: 0.8825 - val_weighted_accuracy: 0.8593\n",
      "Epoch 13/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2484 - acc: 0.8936 - weighted_accuracy: 0.869 - ETA: 1s - loss: 0.2647 - acc: 0.8831 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2614 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2606 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2610 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2617 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2619 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2626 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2619 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2614 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2616 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2621 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2613 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2613 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2618 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2614 - acc: 0.8849 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2615 - acc: 0.8849 - weighted_accuracy: 0.8618 - val_loss: 0.2640 - val_acc: 0.8812 - val_weighted_accuracy: 0.8597\n",
      "Epoch 14/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2646 - acc: 0.8750 - weighted_accuracy: 0.846 - ETA: 1s - loss: 0.2648 - acc: 0.8811 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2632 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2613 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2621 - acc: 0.8831 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2610 - acc: 0.8840 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2596 - acc: 0.8847 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2598 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8847 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2612 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8851 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2613 - acc: 0.8850 - weighted_accuracy: 0.8621 - val_loss: 0.2634 - val_acc: 0.8814 - val_weighted_accuracy: 0.8588\n",
      "Epoch 15/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2925 - acc: 0.8672 - weighted_accuracy: 0.839 - ETA: 1s - loss: 0.2556 - acc: 0.8880 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2630 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2618 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2629 - acc: 0.8832 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2624 - acc: 0.8833 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2623 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2617 - acc: 0.8839 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8847 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2613 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2609 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2609 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2615 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2615 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8851 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2611 - acc: 0.8851 - weighted_accuracy: 0.8621 - val_loss: 0.2632 - val_acc: 0.8814 - val_weighted_accuracy: 0.8608\n",
      "Epoch 16/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2552 - acc: 0.8955 - weighted_accuracy: 0.873 - ETA: 1s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2624 - acc: 0.8853 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2624 - acc: 0.8852 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2604 - acc: 0.8851 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2599 - acc: 0.8857 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2604 - acc: 0.8856 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2597 - acc: 0.8858 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2587 - acc: 0.8863 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2591 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2589 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2611 - acc: 0.8848 - weighted_accuracy: 0.8618 - val_loss: 0.2630 - val_acc: 0.8821 - val_weighted_accuracy: 0.8593\n",
      "Epoch 17/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2503 - acc: 0.8926 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2579 - acc: 0.8873 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2551 - acc: 0.8885 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2564 - acc: 0.8880 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2585 - acc: 0.8868 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2592 - acc: 0.8866 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2596 - acc: 0.8865 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2605 - acc: 0.8862 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2597 - acc: 0.8864 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2594 - acc: 0.8864 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2599 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2603 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2603 - acc: 0.8854 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2600 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2602 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2605 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2609 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2609 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2608 - acc: 0.8851 - weighted_accuracy: 0.8620 - val_loss: 0.2624 - val_acc: 0.8821 - val_weighted_accuracy: 0.8589\n",
      "Epoch 18/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2693 - acc: 0.8828 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2525 - acc: 0.8876 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2557 - acc: 0.8874 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2581 - acc: 0.8855 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2580 - acc: 0.8854 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2597 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2598 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2598 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2598 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2599 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2609 - acc: 0.8849 - weighted_accuracy: 0.8618 - val_loss: 0.2638 - val_acc: 0.8813 - val_weighted_accuracy: 0.8602\n",
      "Epoch 19/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2455 - acc: 0.8906 - weighted_accuracy: 0.872 - ETA: 1s - loss: 0.2589 - acc: 0.8815 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2604 - acc: 0.8841 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2633 - acc: 0.8828 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2623 - acc: 0.8833 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2621 - acc: 0.8834 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2622 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2619 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2622 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2620 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2618 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2619 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2623 - acc: 0.8845 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2622 - acc: 0.8845 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2620 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8847 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2617 - acc: 0.8847 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2612 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.8622 - val_loss: 0.2628 - val_acc: 0.8823 - val_weighted_accuracy: 0.8589\n",
      "Epoch 20/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2538 - acc: 0.8975 - weighted_accuracy: 0.868 - ETA: 1s - loss: 0.2677 - acc: 0.8828 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2622 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2635 - acc: 0.8822 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2637 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2620 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2621 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2614 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2612 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8851 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2607 - acc: 0.8851 - weighted_accuracy: 0.8621 - val_loss: 0.2628 - val_acc: 0.8823 - val_weighted_accuracy: 0.8597\n",
      "Epoch 21/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2585 - acc: 0.8818 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2561 - acc: 0.8879 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2576 - acc: 0.8874 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2581 - acc: 0.8867 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2586 - acc: 0.8866 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2597 - acc: 0.8863 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2607 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2604 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2594 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2594 - acc: 0.8859 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2588 - acc: 0.8864 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2588 - acc: 0.8862 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2593 - acc: 0.8860 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2590 - acc: 0.8864 - weighted_accuracy: 0.864 - ETA: 0s - loss: 0.2598 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2602 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2602 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2602 - acc: 0.8854 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2604 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2609 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2609 - acc: 0.8852 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2607 - acc: 0.8851 - weighted_accuracy: 0.8624 - val_loss: 0.2631 - val_acc: 0.8813 - val_weighted_accuracy: 0.8601\n",
      "Epoch 22/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2922 - acc: 0.8682 - weighted_accuracy: 0.838 - ETA: 1s - loss: 0.2685 - acc: 0.8805 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2632 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2616 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2586 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2588 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2589 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2585 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2580 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2609 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8851 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.8623 - val_loss: 0.2652 - val_acc: 0.8798 - val_weighted_accuracy: 0.8603\n",
      "Epoch 23/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2578 - acc: 0.8818 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2595 - acc: 0.8821 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2599 - acc: 0.8844 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2583 - acc: 0.8861 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2593 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2600 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2605 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2600 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2613 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2615 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8851 - weighted_accuracy: 0.862 - 2s 5us/step - loss: 0.2605 - acc: 0.8851 - weighted_accuracy: 0.8624 - val_loss: 0.2628 - val_acc: 0.8824 - val_weighted_accuracy: 0.8596\n",
      "Epoch 24/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2597 - acc: 0.8994 - weighted_accuracy: 0.878 - ETA: 1s - loss: 0.2565 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2546 - acc: 0.8872 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2602 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2612 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2611 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2611 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2605 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2604 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8852 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2602 - acc: 0.8853 - weighted_accuracy: 0.8626 - val_loss: 0.2629 - val_acc: 0.8824 - val_weighted_accuracy: 0.8579\n",
      "Epoch 25/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2514 - acc: 0.8848 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2634 - acc: 0.8813 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2601 - acc: 0.8839 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2586 - acc: 0.8853 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2602 - acc: 0.8852 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2592 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2577 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2581 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2586 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2585 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2596 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8847 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8851 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8847 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8853 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.8626 - val_loss: 0.2630 - val_acc: 0.8827 - val_weighted_accuracy: 0.8599\n",
      "Epoch 26/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2441 - acc: 0.8926 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2539 - acc: 0.8863 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2597 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2597 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2591 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2587 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8841 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2599 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2592 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8854 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2601 - acc: 0.8854 - weighted_accuracy: 0.8627 - val_loss: 0.2626 - val_acc: 0.8817 - val_weighted_accuracy: 0.8587\n",
      "Epoch 27/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2619 - acc: 0.8906 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2618 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2579 - acc: 0.8867 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2570 - acc: 0.8868 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2575 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2593 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8839 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2606 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2598 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2604 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2602 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2603 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2602 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2601 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2600 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2599 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8857 - weighted_accuracy: 0.863 - 1s 5us/step - loss: 0.2600 - acc: 0.8855 - weighted_accuracy: 0.8627 - val_loss: 0.2639 - val_acc: 0.8806 - val_weighted_accuracy: 0.8596\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 27)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_49 (Dense)                (None, 24)           672         mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_41 (Dropout)            (None, 24)           0           dense_49[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_41 (Concatenate)    (None, 51)           0           mata-features[0][0]              \n",
      "                                                                 dropout_41[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_50 (Dense)                (None, 24)           1248        concatenate_41[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_42 (Dropout)            (None, 24)           0           dense_50[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_42 (Concatenate)    (None, 75)           0           concatenate_41[0][0]             \n",
      "                                                                 dropout_42[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_51 (Dense)                (None, 24)           1824        concatenate_42[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_43 (Dropout)            (None, 24)           0           dense_51[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_43 (Concatenate)    (None, 99)           0           concatenate_42[0][0]             \n",
      "                                                                 dropout_43[0][0]                 \n",
      "__________________________________________________________________________________________________\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "dense_52 (Dense)                (None, 24)           2400        concatenate_43[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_44 (Dropout)            (None, 24)           0           dense_52[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_44 (Concatenate)    (None, 123)          0           concatenate_43[0][0]             \n",
      "                                                                 dropout_44[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_53 (Dense)                (None, 24)           2976        concatenate_44[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_45 (Dropout)            (None, 24)           0           dense_53[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_45 (Concatenate)    (None, 147)          0           concatenate_44[0][0]             \n",
      "                                                                 dropout_45[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_9 (Highway)             (None, 147)          43512       concatenate_45[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_54 (Dense)                (None, 3)            444         highway_9[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 53,076\n",
      "Trainable params: 53,076\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      "288497/288497 [==============================] - ETA: 3:14 - loss: 1.2751 - acc: 0.0742 - weighted_accuracy: 0.011 - ETA: 16s - loss: 0.6513 - acc: 0.6734 - weighted_accuracy: 0.538 - ETA: 8s - loss: 0.4901 - acc: 0.7770 - weighted_accuracy: 0.6866 - ETA: 5s - loss: 0.4289 - acc: 0.8116 - weighted_accuracy: 0.742 - ETA: 4s - loss: 0.3973 - acc: 0.8276 - weighted_accuracy: 0.770 - ETA: 3s - loss: 0.3766 - acc: 0.8370 - weighted_accuracy: 0.785 - ETA: 3s - loss: 0.3609 - acc: 0.8449 - weighted_accuracy: 0.797 - ETA: 2s - loss: 0.3490 - acc: 0.8502 - weighted_accuracy: 0.806 - ETA: 2s - loss: 0.3409 - acc: 0.8540 - weighted_accuracy: 0.812 - ETA: 2s - loss: 0.3327 - acc: 0.8576 - weighted_accuracy: 0.818 - ETA: 1s - loss: 0.3278 - acc: 0.8596 - weighted_accuracy: 0.822 - ETA: 1s - loss: 0.3224 - acc: 0.8615 - weighted_accuracy: 0.825 - ETA: 1s - loss: 0.3183 - acc: 0.8631 - weighted_accuracy: 0.828 - ETA: 1s - loss: 0.3143 - acc: 0.8648 - weighted_accuracy: 0.831 - ETA: 1s - loss: 0.3116 - acc: 0.8660 - weighted_accuracy: 0.832 - ETA: 0s - loss: 0.3087 - acc: 0.8670 - weighted_accuracy: 0.834 - ETA: 0s - loss: 0.3062 - acc: 0.8680 - weighted_accuracy: 0.835 - ETA: 0s - loss: 0.3043 - acc: 0.8687 - weighted_accuracy: 0.837 - ETA: 0s - loss: 0.3022 - acc: 0.8694 - weighted_accuracy: 0.838 - ETA: 0s - loss: 0.3001 - acc: 0.8704 - weighted_accuracy: 0.839 - ETA: 0s - loss: 0.2985 - acc: 0.8710 - weighted_accuracy: 0.840 - ETA: 0s - loss: 0.2974 - acc: 0.8713 - weighted_accuracy: 0.840 - ETA: 0s - loss: 0.2959 - acc: 0.8719 - weighted_accuracy: 0.841 - ETA: 0s - loss: 0.2946 - acc: 0.8724 - weighted_accuracy: 0.842 - ETA: 0s - loss: 0.2928 - acc: 0.8731 - weighted_accuracy: 0.843 - 2s 7us/step - loss: 0.2922 - acc: 0.8733 - weighted_accuracy: 0.8438 - val_loss: 0.2650 - val_acc: 0.8864 - val_weighted_accuracy: 0.8695\n",
      "Epoch 2/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.3060 - acc: 0.8584 - weighted_accuracy: 0.833 - ETA: 1s - loss: 0.2708 - acc: 0.8817 - weighted_accuracy: 0.853 - ETA: 1s - loss: 0.2690 - acc: 0.8806 - weighted_accuracy: 0.852 - ETA: 1s - loss: 0.2694 - acc: 0.8811 - weighted_accuracy: 0.854 - ETA: 1s - loss: 0.2697 - acc: 0.8813 - weighted_accuracy: 0.854 - ETA: 1s - loss: 0.2689 - acc: 0.8821 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2686 - acc: 0.8816 - weighted_accuracy: 0.856 - ETA: 0s - loss: 0.2666 - acc: 0.8829 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2657 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2660 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2660 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2668 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2663 - acc: 0.8832 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2667 - acc: 0.8827 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2664 - acc: 0.8827 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2661 - acc: 0.8829 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2663 - acc: 0.8828 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2660 - acc: 0.8829 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2659 - acc: 0.8830 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2657 - acc: 0.8830 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2654 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2661 - acc: 0.8829 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2660 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2654 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2656 - acc: 0.8834 - weighted_accuracy: 0.858 - 1s 5us/step - loss: 0.2656 - acc: 0.8834 - weighted_accuracy: 0.8586 - val_loss: 0.2640 - val_acc: 0.8860 - val_weighted_accuracy: 0.8692\n",
      "Epoch 3/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2307 - acc: 0.9072 - weighted_accuracy: 0.892 - ETA: 1s - loss: 0.2593 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2579 - acc: 0.8876 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2598 - acc: 0.8863 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2617 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2639 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2631 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2621 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2632 - acc: 0.8844 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2641 - acc: 0.8840 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2641 - acc: 0.8839 - weighted_accuracy: 0.8594 - val_loss: 0.2637 - val_acc: 0.8861 - val_weighted_accuracy: 0.8680\n",
      "Epoch 4/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2839 - acc: 0.8613 - weighted_accuracy: 0.819 - ETA: 1s - loss: 0.2690 - acc: 0.8816 - weighted_accuracy: 0.854 - ETA: 1s - loss: 0.2704 - acc: 0.8832 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2677 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2682 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2668 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2650 - acc: 0.8837 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2645 - acc: 0.8841 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2659 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2652 - acc: 0.8837 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2654 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2653 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2644 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2647 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2645 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2644 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8836 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2637 - acc: 0.8836 - weighted_accuracy: 0.8592 - val_loss: 0.2639 - val_acc: 0.8858 - val_weighted_accuracy: 0.8692\n",
      "Epoch 5/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2770 - acc: 0.8916 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2632 - acc: 0.8853 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2649 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2652 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2643 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2644 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2651 - acc: 0.8822 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2645 - acc: 0.8823 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2648 - acc: 0.8823 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2640 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2637 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2638 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2635 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2635 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8837 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2634 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8839 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2629 - acc: 0.8840 - weighted_accuracy: 0.8596 - val_loss: 0.2632 - val_acc: 0.8859 - val_weighted_accuracy: 0.8691\n",
      "Epoch 6/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2557 - acc: 0.8877 - weighted_accuracy: 0.870 - ETA: 1s - loss: 0.2538 - acc: 0.8911 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2630 - acc: 0.8844 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2626 - acc: 0.8852 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2630 - acc: 0.8849 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2618 - acc: 0.8850 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8846 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2629 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8844 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8840 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2628 - acc: 0.8840 - weighted_accuracy: 0.8593 - val_loss: 0.2633 - val_acc: 0.8862 - val_weighted_accuracy: 0.8691\n",
      "Epoch 7/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2745 - acc: 0.8809 - weighted_accuracy: 0.846 - ETA: 1s - loss: 0.2639 - acc: 0.8817 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2632 - acc: 0.8815 - weighted_accuracy: 0.854 - ETA: 1s - loss: 0.2619 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2615 - acc: 0.8830 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2618 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2621 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2617 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2605 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2604 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8841 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2624 - acc: 0.8841 - weighted_accuracy: 0.8599 - val_loss: 0.2637 - val_acc: 0.8859 - val_weighted_accuracy: 0.8689\n",
      "Epoch 8/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2469 - acc: 0.8896 - weighted_accuracy: 0.870 - ETA: 1s - loss: 0.2622 - acc: 0.8853 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2653 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2640 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2630 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2618 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8840 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2621 - acc: 0.8842 - weighted_accuracy: 0.8596 - val_loss: 0.2629 - val_acc: 0.8861 - val_weighted_accuracy: 0.8694\n",
      "Epoch 9/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2578 - acc: 0.8828 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2626 - acc: 0.8877 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2623 - acc: 0.8866 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2639 - acc: 0.8856 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2624 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2632 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2632 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2630 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2628 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2628 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2622 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2621 - acc: 0.8837 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2622 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8839 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2621 - acc: 0.8839 - weighted_accuracy: 0.8595 - val_loss: 0.2627 - val_acc: 0.8865 - val_weighted_accuracy: 0.8706\n",
      "Epoch 10/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2290 - acc: 0.9092 - weighted_accuracy: 0.899 - ETA: 1s - loss: 0.2558 - acc: 0.8893 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2585 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2599 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2591 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2617 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2616 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8842 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2620 - acc: 0.8842 - weighted_accuracy: 0.8598 - val_loss: 0.2632 - val_acc: 0.8864 - val_weighted_accuracy: 0.8679\n",
      "Epoch 11/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2914 - acc: 0.8740 - weighted_accuracy: 0.835 - ETA: 1s - loss: 0.2709 - acc: 0.8811 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2664 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2655 - acc: 0.8839 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2637 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2638 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2629 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2649 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2650 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2650 - acc: 0.8827 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2647 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2642 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2640 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2633 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2617 - acc: 0.8844 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8844 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.8599 - val_loss: 0.2629 - val_acc: 0.8864 - val_weighted_accuracy: 0.8689\n",
      "Epoch 12/500\n",
      "288497/288497 [==============================] - ETA: 3s - loss: 0.2581 - acc: 0.8848 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2700 - acc: 0.8799 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2651 - acc: 0.8823 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2638 - acc: 0.8817 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2616 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2619 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2607 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2602 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2600 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2597 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2604 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8843 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2615 - acc: 0.8843 - weighted_accuracy: 0.8601 - val_loss: 0.2626 - val_acc: 0.8869 - val_weighted_accuracy: 0.8670\n",
      "Epoch 13/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2639 - acc: 0.8945 - weighted_accuracy: 0.874 - ETA: 1s - loss: 0.2605 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2625 - acc: 0.8833 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2605 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2570 - acc: 0.8876 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2586 - acc: 0.8867 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2598 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2617 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2616 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2619 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2615 - acc: 0.8844 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2616 - acc: 0.8844 - weighted_accuracy: 0.8601 - val_loss: 0.2628 - val_acc: 0.8864 - val_weighted_accuracy: 0.8698\n",
      "Epoch 14/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2697 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2653 - acc: 0.8824 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2638 - acc: 0.8818 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2616 - acc: 0.8830 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2611 - acc: 0.8830 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2624 - acc: 0.8831 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2626 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8846 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2612 - acc: 0.8846 - weighted_accuracy: 0.8605 - val_loss: 0.2620 - val_acc: 0.8861 - val_weighted_accuracy: 0.8687\n",
      "Epoch 15/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2515 - acc: 0.8965 - weighted_accuracy: 0.875 - ETA: 1s - loss: 0.2635 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2638 - acc: 0.8825 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2608 - acc: 0.8845 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2608 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2613 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8830 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2619 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2616 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2615 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2617 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2611 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2617 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2615 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2617 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2613 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2614 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2616 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2614 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2612 - acc: 0.8844 - weighted_accuracy: 0.8601 - val_loss: 0.2629 - val_acc: 0.8863 - val_weighted_accuracy: 0.8694\n",
      "Epoch 16/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2589 - acc: 0.8828 - weighted_accuracy: 0.842 - ETA: 1s - loss: 0.2577 - acc: 0.8874 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2600 - acc: 0.8869 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2597 - acc: 0.8863 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2621 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2613 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2641 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2631 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.8605 - val_loss: 0.2629 - val_acc: 0.8866 - val_weighted_accuracy: 0.8676\n",
      "Epoch 17/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2386 - acc: 0.9004 - weighted_accuracy: 0.881 - ETA: 1s - loss: 0.2580 - acc: 0.8884 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2584 - acc: 0.8878 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2578 - acc: 0.8870 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2582 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2590 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2582 - acc: 0.8871 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2585 - acc: 0.8868 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2587 - acc: 0.8864 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2597 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2588 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2598 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2599 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2600 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2602 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2603 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8843 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2610 - acc: 0.8843 - weighted_accuracy: 0.8600 - val_loss: 0.2629 - val_acc: 0.8864 - val_weighted_accuracy: 0.8693\n",
      "Epoch 18/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2499 - acc: 0.8857 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2647 - acc: 0.8824 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2626 - acc: 0.8823 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2655 - acc: 0.8819 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2648 - acc: 0.8820 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2656 - acc: 0.8824 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2637 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2628 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2614 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8844 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2609 - acc: 0.8845 - weighted_accuracy: 0.8606 - val_loss: 0.2630 - val_acc: 0.8868 - val_weighted_accuracy: 0.8671\n",
      "Epoch 19/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2484 - acc: 0.8916 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2708 - acc: 0.8795 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2678 - acc: 0.8810 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2652 - acc: 0.8827 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2627 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2627 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2636 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2615 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2614 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2619 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2616 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2611 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2615 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2617 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2611 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2606 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8844 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2608 - acc: 0.8844 - weighted_accuracy: 0.8603 - val_loss: 0.2632 - val_acc: 0.8874 - val_weighted_accuracy: 0.8689\n",
      "Epoch 20/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2539 - acc: 0.8916 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2616 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2602 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2603 - acc: 0.8856 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2603 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2615 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2612 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2617 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2621 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2615 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2620 - acc: 0.8839 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2617 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2616 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2614 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2612 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2611 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2612 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2610 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8843 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2609 - acc: 0.8844 - weighted_accuracy: 0.8602 - val_loss: 0.2630 - val_acc: 0.8862 - val_weighted_accuracy: 0.8690\n",
      "Epoch 21/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2308 - acc: 0.9014 - weighted_accuracy: 0.888 - ETA: 1s - loss: 0.2580 - acc: 0.8864 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2632 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2605 - acc: 0.8863 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2635 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2619 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2615 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8845 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2609 - acc: 0.8847 - weighted_accuracy: 0.8609 - val_loss: 0.2626 - val_acc: 0.8863 - val_weighted_accuracy: 0.8698\n",
      "Epoch 22/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2771 - acc: 0.8809 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2513 - acc: 0.8881 - weighted_accuracy: 0.869 - ETA: 1s - loss: 0.2565 - acc: 0.8878 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2580 - acc: 0.8875 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2602 - acc: 0.8863 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2595 - acc: 0.8864 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2596 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2598 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2613 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8850 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2606 - acc: 0.8850 - weighted_accuracy: 0.8613 - val_loss: 0.2634 - val_acc: 0.8867 - val_weighted_accuracy: 0.8664\n",
      "Epoch 23/500\n",
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2283 - acc: 0.8955 - weighted_accuracy: 0.868 - ETA: 1s - loss: 0.2612 - acc: 0.8834 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2609 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2613 - acc: 0.8849 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2614 - acc: 0.8850 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2607 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8859 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2600 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8849 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2606 - acc: 0.8848 - weighted_accuracy: 0.8609 - val_loss: 0.2625 - val_acc: 0.8858 - val_weighted_accuracy: 0.8677\n",
      "Epoch 24/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288497/288497 [==============================] - ETA: 2s - loss: 0.2686 - acc: 0.8799 - weighted_accuracy: 0.854 - ETA: 1s - loss: 0.2551 - acc: 0.8873 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2606 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2621 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2630 - acc: 0.8829 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2618 - acc: 0.8838 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2628 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2625 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2626 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2619 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2618 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2611 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2608 - acc: 0.8842 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2605 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8843 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2606 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2607 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2606 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2605 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2604 - acc: 0.8847 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2603 - acc: 0.8848 - weighted_accuracy: 0.8608 - val_loss: 0.2624 - val_acc: 0.8857 - val_weighted_accuracy: 0.8686\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 27)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_55 (Dense)                (None, 24)           672         mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_46 (Dropout)            (None, 24)           0           dense_55[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_46 (Concatenate)    (None, 51)           0           mata-features[0][0]              \n",
      "                                                                 dropout_46[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_56 (Dense)                (None, 24)           1248        concatenate_46[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_47 (Dropout)            (None, 24)           0           dense_56[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_47 (Concatenate)    (None, 75)           0           concatenate_46[0][0]             \n",
      "                                                                 dropout_47[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_57 (Dense)                (None, 24)           1824        concatenate_47[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_48 (Dropout)            (None, 24)           0           dense_57[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_48 (Concatenate)    (None, 99)           0           concatenate_47[0][0]             \n",
      "                                                                 dropout_48[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_58 (Dense)                (None, 24)           2400        concatenate_48[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_49 (Dropout)            (None, 24)           0           dense_58[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_49 (Concatenate)    (None, 123)          0           concatenate_48[0][0]             \n",
      "                                                                 dropout_49[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_59 (Dense)                (None, 24)           2976        concatenate_49[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_50 (Dropout)            (None, 24)           0           dense_59[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_50 (Concatenate)    (None, 147)          0           concatenate_49[0][0]             \n",
      "                                                                 dropout_50[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_10 (Highway)            (None, 147)          43512       concatenate_50[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_60 (Dense)                (None, 3)            444         highway_10[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 53,076\n",
      "Trainable params: 53,076\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288495 samples, validate on 32057 samples\n",
      "Epoch 1/500\n",
      "288495/288495 [==============================] - ETA: 3:31 - loss: 1.4939 - acc: 0.2754 - weighted_accuracy: 0.059 - ETA: 16s - loss: 0.7699 - acc: 0.6767 - weighted_accuracy: 0.502 - ETA: 9s - loss: 0.5665 - acc: 0.7674 - weighted_accuracy: 0.6510 - ETA: 6s - loss: 0.4809 - acc: 0.8044 - weighted_accuracy: 0.714 - ETA: 4s - loss: 0.4332 - acc: 0.8232 - weighted_accuracy: 0.751 - ETA: 3s - loss: 0.3998 - acc: 0.8363 - weighted_accuracy: 0.775 - ETA: 3s - loss: 0.3796 - acc: 0.8438 - weighted_accuracy: 0.788 - ETA: 2s - loss: 0.3664 - acc: 0.8486 - weighted_accuracy: 0.798 - ETA: 2s - loss: 0.3557 - acc: 0.8522 - weighted_accuracy: 0.805 - ETA: 2s - loss: 0.3477 - acc: 0.8552 - weighted_accuracy: 0.810 - ETA: 1s - loss: 0.3404 - acc: 0.8575 - weighted_accuracy: 0.815 - ETA: 1s - loss: 0.3338 - acc: 0.8599 - weighted_accuracy: 0.819 - ETA: 1s - loss: 0.3284 - acc: 0.8620 - weighted_accuracy: 0.823 - ETA: 1s - loss: 0.3241 - acc: 0.8635 - weighted_accuracy: 0.825 - ETA: 1s - loss: 0.3202 - acc: 0.8650 - weighted_accuracy: 0.828 - ETA: 0s - loss: 0.3171 - acc: 0.8659 - weighted_accuracy: 0.830 - ETA: 0s - loss: 0.3146 - acc: 0.8666 - weighted_accuracy: 0.831 - ETA: 0s - loss: 0.3116 - acc: 0.8678 - weighted_accuracy: 0.833 - ETA: 0s - loss: 0.3093 - acc: 0.8685 - weighted_accuracy: 0.834 - ETA: 0s - loss: 0.3068 - acc: 0.8694 - weighted_accuracy: 0.836 - ETA: 0s - loss: 0.3048 - acc: 0.8699 - weighted_accuracy: 0.837 - ETA: 0s - loss: 0.3032 - acc: 0.8705 - weighted_accuracy: 0.838 - ETA: 0s - loss: 0.3013 - acc: 0.8712 - weighted_accuracy: 0.839 - ETA: 0s - loss: 0.2994 - acc: 0.8720 - weighted_accuracy: 0.840 - ETA: 0s - loss: 0.2985 - acc: 0.8724 - weighted_accuracy: 0.841 - 2s 8us/step - loss: 0.2973 - acc: 0.8729 - weighted_accuracy: 0.8419 - val_loss: 0.2550 - val_acc: 0.8887 - val_weighted_accuracy: 0.8594\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 2/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2724 - acc: 0.8770 - weighted_accuracy: 0.845 - ETA: 1s - loss: 0.2705 - acc: 0.8784 - weighted_accuracy: 0.853 - ETA: 1s - loss: 0.2657 - acc: 0.8818 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2649 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2653 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2668 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2666 - acc: 0.8826 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2671 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2674 - acc: 0.8823 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2676 - acc: 0.8827 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2674 - acc: 0.8827 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2668 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2666 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2667 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2662 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2661 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2659 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2656 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2658 - acc: 0.8836 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2659 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2658 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2658 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2661 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2660 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2658 - acc: 0.8833 - weighted_accuracy: 0.858 - 1s 5us/step - loss: 0.2660 - acc: 0.8833 - weighted_accuracy: 0.8589 - val_loss: 0.2533 - val_acc: 0.8894 - val_weighted_accuracy: 0.8664\n",
      "Epoch 3/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2447 - acc: 0.8809 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2614 - acc: 0.8848 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2660 - acc: 0.8828 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2664 - acc: 0.8828 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2692 - acc: 0.8816 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2675 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2653 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2651 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2663 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2659 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2655 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2648 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2647 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2653 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2653 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2649 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2649 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2653 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2652 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2660 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2658 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2656 - acc: 0.8830 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2655 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2652 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2650 - acc: 0.8834 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2649 - acc: 0.8835 - weighted_accuracy: 0.8597 - val_loss: 0.2518 - val_acc: 0.8893 - val_weighted_accuracy: 0.8630\n",
      "Epoch 4/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2389 - acc: 0.8896 - weighted_accuracy: 0.871 - ETA: 1s - loss: 0.2603 - acc: 0.8827 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2610 - acc: 0.8844 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2602 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2610 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2617 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2610 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2618 - acc: 0.8857 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2627 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2636 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2638 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2637 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2643 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2644 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2641 - acc: 0.8835 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2643 - acc: 0.8834 - weighted_accuracy: 0.8593 - val_loss: 0.2521 - val_acc: 0.8896 - val_weighted_accuracy: 0.8667\n",
      "Epoch 5/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2602 - acc: 0.8877 - weighted_accuracy: 0.872 - ETA: 1s - loss: 0.2591 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2646 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2655 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2634 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2624 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8854 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2633 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2645 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2645 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2644 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2643 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2644 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2636 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2638 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2639 - acc: 0.8839 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2639 - acc: 0.8838 - weighted_accuracy: 0.8598 - val_loss: 0.2529 - val_acc: 0.8893 - val_weighted_accuracy: 0.8602\n",
      "Epoch 6/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2584 - acc: 0.8867 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2608 - acc: 0.8854 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2612 - acc: 0.8861 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2606 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2626 - acc: 0.8849 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2630 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2633 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2635 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2636 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2627 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2631 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2633 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2636 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2638 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2639 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2638 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2644 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2642 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2641 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2638 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2638 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2638 - acc: 0.8840 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2636 - acc: 0.8841 - weighted_accuracy: 0.8603 - val_loss: 0.2518 - val_acc: 0.8896 - val_weighted_accuracy: 0.8646\n",
      "Epoch 7/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2923 - acc: 0.8750 - weighted_accuracy: 0.852 - ETA: 1s - loss: 0.2678 - acc: 0.8813 - weighted_accuracy: 0.854 - ETA: 1s - loss: 0.2665 - acc: 0.8808 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2625 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2657 - acc: 0.8823 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2653 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2639 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2633 - acc: 0.8834 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2633 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2623 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2629 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2629 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2636 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8836 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2635 - acc: 0.8835 - weighted_accuracy: 0.8598 - val_loss: 0.2519 - val_acc: 0.8893 - val_weighted_accuracy: 0.8625\n",
      "Epoch 8/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2672 - acc: 0.8936 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2626 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2638 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2639 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2654 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2639 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2644 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2646 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2646 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2637 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2634 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8838 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2632 - acc: 0.8837 - weighted_accuracy: 0.8598 - val_loss: 0.2521 - val_acc: 0.8888 - val_weighted_accuracy: 0.8584\n",
      "Epoch 9/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2551 - acc: 0.8906 - weighted_accuracy: 0.873 - ETA: 1s - loss: 0.2633 - acc: 0.8807 - weighted_accuracy: 0.855 - ETA: 1s - loss: 0.2659 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2667 - acc: 0.8825 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2649 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2640 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8852 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2645 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2643 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2643 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2639 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2637 - acc: 0.8846 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2637 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2638 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8840 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2633 - acc: 0.8838 - weighted_accuracy: 0.8598 - val_loss: 0.2522 - val_acc: 0.8897 - val_weighted_accuracy: 0.8650\n",
      "Epoch 10/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2632 - acc: 0.8896 - weighted_accuracy: 0.873 - ETA: 1s - loss: 0.2710 - acc: 0.8788 - weighted_accuracy: 0.854 - ETA: 1s - loss: 0.2641 - acc: 0.8815 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2639 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2633 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2640 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2633 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2639 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2636 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2632 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8839 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2629 - acc: 0.8839 - weighted_accuracy: 0.8603 - val_loss: 0.2558 - val_acc: 0.8880 - val_weighted_accuracy: 0.8597\n",
      "Epoch 11/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2421 - acc: 0.9023 - weighted_accuracy: 0.882 - ETA: 1s - loss: 0.2678 - acc: 0.8830 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2622 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2630 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2620 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2634 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2634 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8830 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2648 - acc: 0.8826 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2644 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8834 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2627 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2632 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8839 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2629 - acc: 0.8839 - weighted_accuracy: 0.8602 - val_loss: 0.2516 - val_acc: 0.8895 - val_weighted_accuracy: 0.8636\n",
      "Epoch 12/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2765 - acc: 0.8711 - weighted_accuracy: 0.852 - ETA: 1s - loss: 0.2641 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2653 - acc: 0.8823 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2658 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2669 - acc: 0.8818 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2671 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2669 - acc: 0.8820 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2662 - acc: 0.8823 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2661 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2660 - acc: 0.8827 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2660 - acc: 0.8829 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2653 - acc: 0.8832 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2649 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2645 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2642 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2642 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2639 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2641 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8837 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2629 - acc: 0.8838 - weighted_accuracy: 0.8599 - val_loss: 0.2519 - val_acc: 0.8894 - val_weighted_accuracy: 0.8646\n",
      "Epoch 13/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2464 - acc: 0.8926 - weighted_accuracy: 0.875 - ETA: 1s - loss: 0.2695 - acc: 0.8824 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2615 - acc: 0.8864 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2600 - acc: 0.8861 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2613 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2605 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2617 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2632 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2632 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2633 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8839 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8838 - weighted_accuracy: 0.859 - 1s 5us/step - loss: 0.2627 - acc: 0.8839 - weighted_accuracy: 0.8599 - val_loss: 0.2526 - val_acc: 0.8893 - val_weighted_accuracy: 0.8630\n",
      "Epoch 14/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2586 - acc: 0.8789 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2604 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2625 - acc: 0.8835 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2606 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2619 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2609 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2612 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2629 - acc: 0.8839 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2626 - acc: 0.8840 - weighted_accuracy: 0.8601 - val_loss: 0.2520 - val_acc: 0.8897 - val_weighted_accuracy: 0.8639\n",
      "Epoch 15/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2713 - acc: 0.8809 - weighted_accuracy: 0.851 - ETA: 1s - loss: 0.2665 - acc: 0.8809 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2650 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2654 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2653 - acc: 0.8830 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2639 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2638 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2635 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2631 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8841 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.8601 - val_loss: 0.2515 - val_acc: 0.8894 - val_weighted_accuracy: 0.8642\n",
      "Epoch 16/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2696 - acc: 0.8838 - weighted_accuracy: 0.854 - ETA: 1s - loss: 0.2633 - acc: 0.8836 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2581 - acc: 0.8861 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2603 - acc: 0.8860 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2614 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2615 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2616 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2621 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8838 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2623 - acc: 0.8838 - weighted_accuracy: 0.8601 - val_loss: 0.2521 - val_acc: 0.8896 - val_weighted_accuracy: 0.8615\n",
      "Epoch 17/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2536 - acc: 0.8896 - weighted_accuracy: 0.871 - ETA: 1s - loss: 0.2654 - acc: 0.8826 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2636 - acc: 0.8822 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2639 - acc: 0.8819 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2636 - acc: 0.8828 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2626 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2615 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2630 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2633 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2631 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2616 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2624 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8839 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2622 - acc: 0.8839 - weighted_accuracy: 0.8601 - val_loss: 0.2516 - val_acc: 0.8900 - val_weighted_accuracy: 0.8631\n",
      "Epoch 18/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288495/288495 [==============================] - ETA: 3s - loss: 0.2718 - acc: 0.8896 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2716 - acc: 0.8801 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2656 - acc: 0.8817 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2631 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2611 - acc: 0.8832 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2632 - acc: 0.8824 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2632 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2632 - acc: 0.8833 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2632 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8841 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2621 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8841 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8841 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2625 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8842 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2621 - acc: 0.8841 - weighted_accuracy: 0.8607 - val_loss: 0.2514 - val_acc: 0.8895 - val_weighted_accuracy: 0.8633\n",
      "Epoch 19/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2550 - acc: 0.8877 - weighted_accuracy: 0.873 - ETA: 1s - loss: 0.2565 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2573 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2593 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2602 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2615 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2625 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2626 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2625 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2625 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8842 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2621 - acc: 0.8842 - weighted_accuracy: 0.8605 - val_loss: 0.2515 - val_acc: 0.8894 - val_weighted_accuracy: 0.8605\n",
      "Epoch 20/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2354 - acc: 0.8945 - weighted_accuracy: 0.888 - ETA: 1s - loss: 0.2595 - acc: 0.8845 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2635 - acc: 0.8824 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2635 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2629 - acc: 0.8828 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2620 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2609 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2635 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2638 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2640 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2635 - acc: 0.8840 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2628 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2623 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8845 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2618 - acc: 0.8845 - weighted_accuracy: 0.8607 - val_loss: 0.2514 - val_acc: 0.8894 - val_weighted_accuracy: 0.8641\n",
      "Epoch 21/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2996 - acc: 0.8584 - weighted_accuracy: 0.842 - ETA: 1s - loss: 0.2668 - acc: 0.8817 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2681 - acc: 0.8805 - weighted_accuracy: 0.854 - ETA: 1s - loss: 0.2660 - acc: 0.8821 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2660 - acc: 0.8816 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2654 - acc: 0.8819 - weighted_accuracy: 0.857 - ETA: 0s - loss: 0.2631 - acc: 0.8831 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2621 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2626 - acc: 0.8833 - weighted_accuracy: 0.858 - ETA: 0s - loss: 0.2624 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2613 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2612 - acc: 0.8841 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2613 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.860 - 1s 5us/step - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.8606 - val_loss: 0.2517 - val_acc: 0.8892 - val_weighted_accuracy: 0.8618\n",
      "Epoch 22/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2792 - acc: 0.8848 - weighted_accuracy: 0.866 - ETA: 1s - loss: 0.2628 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2593 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2601 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2601 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2594 - acc: 0.8862 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8844 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2617 - acc: 0.8844 - weighted_accuracy: 0.8610 - val_loss: 0.2517 - val_acc: 0.8893 - val_weighted_accuracy: 0.8621\n",
      "Epoch 23/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2524 - acc: 0.8867 - weighted_accuracy: 0.873 - ETA: 1s - loss: 0.2617 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2637 - acc: 0.8827 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2632 - acc: 0.8826 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2649 - acc: 0.8825 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2636 - acc: 0.8832 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2631 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2614 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8845 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2615 - acc: 0.8845 - weighted_accuracy: 0.8611 - val_loss: 0.2517 - val_acc: 0.8893 - val_weighted_accuracy: 0.8641\n",
      "Epoch 24/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2521 - acc: 0.8906 - weighted_accuracy: 0.871 - ETA: 1s - loss: 0.2700 - acc: 0.8787 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2682 - acc: 0.8796 - weighted_accuracy: 0.856 - ETA: 1s - loss: 0.2647 - acc: 0.8829 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2641 - acc: 0.8831 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2639 - acc: 0.8832 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2624 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8845 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2614 - acc: 0.8847 - weighted_accuracy: 0.8615 - val_loss: 0.2522 - val_acc: 0.8893 - val_weighted_accuracy: 0.8626\n",
      "Epoch 25/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2764 - acc: 0.8789 - weighted_accuracy: 0.849 - ETA: 1s - loss: 0.2627 - acc: 0.8840 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2586 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2592 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2595 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2596 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2597 - acc: 0.8847 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2617 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2616 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8845 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2615 - acc: 0.8844 - weighted_accuracy: 0.8613 - val_loss: 0.2513 - val_acc: 0.8896 - val_weighted_accuracy: 0.8637\n",
      "Epoch 26/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2751 - acc: 0.8730 - weighted_accuracy: 0.852 - ETA: 1s - loss: 0.2652 - acc: 0.8813 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2623 - acc: 0.8819 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2614 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2615 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2596 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2629 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2623 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2616 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8838 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8835 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2615 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2614 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2613 - acc: 0.8844 - weighted_accuracy: 0.8610 - val_loss: 0.2517 - val_acc: 0.8893 - val_weighted_accuracy: 0.8629\n",
      "Epoch 27/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2615 - acc: 0.8857 - weighted_accuracy: 0.857 - ETA: 1s - loss: 0.2604 - acc: 0.8865 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2636 - acc: 0.8849 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2647 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2640 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2637 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2630 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2629 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2626 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2609 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8846 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2611 - acc: 0.8846 - weighted_accuracy: 0.8613 - val_loss: 0.2536 - val_acc: 0.8887 - val_weighted_accuracy: 0.8592\n",
      "Epoch 28/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2662 - acc: 0.8877 - weighted_accuracy: 0.852 - ETA: 1s - loss: 0.2612 - acc: 0.8851 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2599 - acc: 0.8860 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2613 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2627 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2622 - acc: 0.8838 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2628 - acc: 0.8836 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2634 - acc: 0.8835 - weighted_accuracy: 0.859 - ETA: 0s - loss: 0.2630 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8846 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2610 - acc: 0.8847 - weighted_accuracy: 0.8615 - val_loss: 0.2525 - val_acc: 0.8887 - val_weighted_accuracy: 0.8577\n",
      "Epoch 29/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2869 - acc: 0.8574 - weighted_accuracy: 0.831 - ETA: 1s - loss: 0.2618 - acc: 0.8857 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2607 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2596 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2584 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2585 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2589 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2609 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2613 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2615 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2610 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2612 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2611 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8848 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2609 - acc: 0.8848 - weighted_accuracy: 0.8616 - val_loss: 0.2522 - val_acc: 0.8893 - val_weighted_accuracy: 0.8630\n",
      "Epoch 30/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2577 - acc: 0.8818 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2550 - acc: 0.8867 - weighted_accuracy: 0.867 - ETA: 1s - loss: 0.2621 - acc: 0.8843 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2607 - acc: 0.8845 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2610 - acc: 0.8856 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2603 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2598 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2609 - acc: 0.8851 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2604 - acc: 0.8852 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2613 - acc: 0.8847 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2621 - acc: 0.8837 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2624 - acc: 0.8837 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8839 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.861 - 1s 5us/step - loss: 0.2609 - acc: 0.8846 - weighted_accuracy: 0.8616 - val_loss: 0.2520 - val_acc: 0.8889 - val_weighted_accuracy: 0.8624\n",
      "Epoch 31/500\n",
      "288495/288495 [==============================] - ETA: 2s - loss: 0.2886 - acc: 0.8711 - weighted_accuracy: 0.852 - ETA: 1s - loss: 0.2614 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2584 - acc: 0.8873 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2632 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2628 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2634 - acc: 0.8836 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2628 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2628 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2627 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8847 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2619 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2618 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2620 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2621 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2616 - acc: 0.8842 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.862 - 1s 5us/step - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.8619 - val_loss: 0.2524 - val_acc: 0.8889 - val_weighted_accuracy: 0.8618\n",
      "Epoch 32/500\n",
      "288495/288495 [==============================] - ETA: 3s - loss: 0.2643 - acc: 0.8887 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2605 - acc: 0.8854 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2640 - acc: 0.8831 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2636 - acc: 0.8828 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2625 - acc: 0.8830 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2610 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2604 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2600 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2610 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2614 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2613 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2610 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2601 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.861 - 2s 6us/step - loss: 0.2607 - acc: 0.8849 - weighted_accuracy: 0.8620 - val_loss: 0.2523 - val_acc: 0.8884 - val_weighted_accuracy: 0.8619\n",
      "Epoch 33/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288495/288495 [==============================] - ETA: 4s - loss: 0.2274 - acc: 0.9053 - weighted_accuracy: 0.887 - ETA: 2s - loss: 0.2606 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 2s - loss: 0.2636 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 2s - loss: 0.2629 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 2s - loss: 0.2625 - acc: 0.8854 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2626 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2624 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2618 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2625 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2619 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2615 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2610 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2604 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2606 - acc: 0.8845 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2602 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2608 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2606 - acc: 0.8845 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2608 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2604 - acc: 0.8845 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2604 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2604 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8847 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8847 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2607 - acc: 0.8847 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.862 - 2s 8us/step - loss: 0.2606 - acc: 0.8849 - weighted_accuracy: 0.8623 - val_loss: 0.2512 - val_acc: 0.8899 - val_weighted_accuracy: 0.8653\n",
      "Epoch 34/500\n",
      "288495/288495 [==============================] - ETA: 4s - loss: 0.2508 - acc: 0.8936 - weighted_accuracy: 0.874 - ETA: 2s - loss: 0.2622 - acc: 0.8867 - weighted_accuracy: 0.863 - ETA: 2s - loss: 0.2654 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 2s - loss: 0.2628 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 2s - loss: 0.2619 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2611 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2621 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2617 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2628 - acc: 0.8840 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2615 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2607 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2625 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2614 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2609 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2608 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2610 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2619 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2620 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2621 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2622 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2621 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2621 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2617 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2618 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2623 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2625 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 0s - loss: 0.2622 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2620 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2622 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2619 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2615 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2614 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2612 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8849 - weighted_accuracy: 0.861 - 2s 8us/step - loss: 0.2605 - acc: 0.8849 - weighted_accuracy: 0.8619 - val_loss: 0.2526 - val_acc: 0.8885 - val_weighted_accuracy: 0.8581\n",
      "Epoch 35/500\n",
      "288495/288495 [==============================] - ETA: 5s - loss: 0.2562 - acc: 0.8809 - weighted_accuracy: 0.864 - ETA: 2s - loss: 0.2649 - acc: 0.8824 - weighted_accuracy: 0.858 - ETA: 2s - loss: 0.2614 - acc: 0.8832 - weighted_accuracy: 0.859 - ETA: 2s - loss: 0.2603 - acc: 0.8854 - weighted_accuracy: 0.863 - ETA: 2s - loss: 0.2610 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2615 - acc: 0.8845 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2596 - acc: 0.8851 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2610 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2606 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2607 - acc: 0.8841 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2607 - acc: 0.8838 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2607 - acc: 0.8840 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2607 - acc: 0.8840 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2600 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2591 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2591 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2588 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2592 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2596 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2601 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2599 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2600 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2598 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2589 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2602 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8849 - weighted_accuracy: 0.862 - 2s 8us/step - loss: 0.2603 - acc: 0.8849 - weighted_accuracy: 0.8619 - val_loss: 0.2524 - val_acc: 0.8889 - val_weighted_accuracy: 0.8634\n",
      "Epoch 36/500\n",
      "288495/288495 [==============================] - ETA: 3s - loss: 0.2635 - acc: 0.8789 - weighted_accuracy: 0.860 - ETA: 2s - loss: 0.2636 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 2s - loss: 0.2634 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 2s - loss: 0.2591 - acc: 0.8863 - weighted_accuracy: 0.861 - ETA: 2s - loss: 0.2618 - acc: 0.8832 - weighted_accuracy: 0.857 - ETA: 2s - loss: 0.2594 - acc: 0.8846 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2595 - acc: 0.8846 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2597 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2609 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2609 - acc: 0.8851 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2610 - acc: 0.8856 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2610 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2614 - acc: 0.8853 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2608 - acc: 0.8855 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2606 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2612 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2614 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2610 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2608 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2607 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2605 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2608 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8844 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2609 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2610 - acc: 0.8845 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2608 - acc: 0.8847 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8849 - weighted_accuracy: 0.862 - 2s 8us/step - loss: 0.2603 - acc: 0.8850 - weighted_accuracy: 0.8623 - val_loss: 0.2537 - val_acc: 0.8876 - val_weighted_accuracy: 0.8595\n",
      "Epoch 37/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288495/288495 [==============================] - ETA: 5s - loss: 0.2441 - acc: 0.8945 - weighted_accuracy: 0.876 - ETA: 2s - loss: 0.2593 - acc: 0.8853 - weighted_accuracy: 0.863 - ETA: 2s - loss: 0.2601 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 2s - loss: 0.2589 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 2s - loss: 0.2581 - acc: 0.8867 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2581 - acc: 0.8865 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2578 - acc: 0.8866 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2586 - acc: 0.8867 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2597 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2591 - acc: 0.8859 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2594 - acc: 0.8860 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2596 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2599 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2600 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2611 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2610 - acc: 0.8844 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2607 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2603 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2609 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2609 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2604 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2605 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2611 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2609 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2608 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2604 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8852 - weighted_accuracy: 0.862 - 2s 8us/step - loss: 0.2602 - acc: 0.8851 - weighted_accuracy: 0.8623 - val_loss: 0.2543 - val_acc: 0.8873 - val_weighted_accuracy: 0.8620\n",
      "Epoch 38/500\n",
      "288495/288495 [==============================] - ETA: 5s - loss: 0.2575 - acc: 0.8828 - weighted_accuracy: 0.859 - ETA: 2s - loss: 0.2628 - acc: 0.8810 - weighted_accuracy: 0.863 - ETA: 2s - loss: 0.2629 - acc: 0.8828 - weighted_accuracy: 0.861 - ETA: 2s - loss: 0.2612 - acc: 0.8836 - weighted_accuracy: 0.861 - ETA: 2s - loss: 0.2601 - acc: 0.8851 - weighted_accuracy: 0.863 - ETA: 2s - loss: 0.2611 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2603 - acc: 0.8845 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2600 - acc: 0.8844 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2606 - acc: 0.8837 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2605 - acc: 0.8836 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2607 - acc: 0.8835 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2603 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2592 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2588 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2590 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2590 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2589 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2593 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2597 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2600 - acc: 0.8854 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2598 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2593 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2591 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2599 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2594 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2590 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2594 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8854 - weighted_accuracy: 0.862 - 2s 8us/step - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.8625 - val_loss: 0.2530 - val_acc: 0.8882 - val_weighted_accuracy: 0.8601\n",
      "Epoch 39/500\n",
      "288495/288495 [==============================] - ETA: 5s - loss: 0.2674 - acc: 0.8857 - weighted_accuracy: 0.860 - ETA: 2s - loss: 0.2551 - acc: 0.8899 - weighted_accuracy: 0.866 - ETA: 2s - loss: 0.2637 - acc: 0.8837 - weighted_accuracy: 0.859 - ETA: 2s - loss: 0.2625 - acc: 0.8850 - weighted_accuracy: 0.860 - ETA: 2s - loss: 0.2616 - acc: 0.8862 - weighted_accuracy: 0.862 - ETA: 2s - loss: 0.2604 - acc: 0.8864 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2610 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2629 - acc: 0.8846 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2627 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2630 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2629 - acc: 0.8845 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2620 - acc: 0.8848 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2622 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2616 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2624 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2628 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2623 - acc: 0.8843 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2620 - acc: 0.8845 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2613 - acc: 0.8847 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2607 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2608 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2605 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2609 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2606 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2607 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2604 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2606 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2605 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8851 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2603 - acc: 0.8852 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2602 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2603 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8854 - weighted_accuracy: 0.862 - 2s 8us/step - loss: 0.2599 - acc: 0.8853 - weighted_accuracy: 0.8624 - val_loss: 0.2531 - val_acc: 0.8886 - val_weighted_accuracy: 0.8616\n",
      "Epoch 40/500\n",
      "288495/288495 [==============================] - ETA: 3s - loss: 0.2799 - acc: 0.8760 - weighted_accuracy: 0.851 - ETA: 2s - loss: 0.2651 - acc: 0.8779 - weighted_accuracy: 0.857 - ETA: 2s - loss: 0.2616 - acc: 0.8816 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2598 - acc: 0.8843 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2578 - acc: 0.8857 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2574 - acc: 0.8860 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2570 - acc: 0.8858 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2567 - acc: 0.8862 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2585 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2584 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2589 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2586 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2587 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2580 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2585 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2587 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2589 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2584 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2582 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2579 - acc: 0.8860 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2585 - acc: 0.8858 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2586 - acc: 0.8857 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2590 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2590 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8856 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2601 - acc: 0.8848 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8852 - weighted_accuracy: 0.862 - 2s 8us/step - loss: 0.2598 - acc: 0.8852 - weighted_accuracy: 0.8625 - val_loss: 0.2524 - val_acc: 0.8890 - val_weighted_accuracy: 0.8641\n",
      "Epoch 41/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "288495/288495 [==============================] - ETA: 5s - loss: 0.2809 - acc: 0.8828 - weighted_accuracy: 0.857 - ETA: 2s - loss: 0.2674 - acc: 0.8797 - weighted_accuracy: 0.857 - ETA: 2s - loss: 0.2605 - acc: 0.8823 - weighted_accuracy: 0.860 - ETA: 2s - loss: 0.2659 - acc: 0.8795 - weighted_accuracy: 0.856 - ETA: 2s - loss: 0.2651 - acc: 0.8812 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2649 - acc: 0.8816 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2641 - acc: 0.8818 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2630 - acc: 0.8824 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2632 - acc: 0.8821 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2628 - acc: 0.8826 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2630 - acc: 0.8823 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2633 - acc: 0.8822 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2626 - acc: 0.8825 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2620 - acc: 0.8831 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2618 - acc: 0.8829 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2617 - acc: 0.8832 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2607 - acc: 0.8838 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2608 - acc: 0.8839 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2601 - acc: 0.8841 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2596 - acc: 0.8844 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2596 - acc: 0.8846 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2593 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2591 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8849 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2590 - acc: 0.8855 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2593 - acc: 0.8854 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2592 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2596 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8852 - weighted_accuracy: 0.862 - 2s 8us/step - loss: 0.2598 - acc: 0.8851 - weighted_accuracy: 0.8623 - val_loss: 0.2528 - val_acc: 0.8882 - val_weighted_accuracy: 0.8580\n",
      "Epoch 42/500\n",
      "288495/288495 [==============================] - ETA: 4s - loss: 0.2892 - acc: 0.8779 - weighted_accuracy: 0.852 - ETA: 2s - loss: 0.2635 - acc: 0.8804 - weighted_accuracy: 0.858 - ETA: 2s - loss: 0.2573 - acc: 0.8832 - weighted_accuracy: 0.860 - ETA: 2s - loss: 0.2598 - acc: 0.8830 - weighted_accuracy: 0.859 - ETA: 2s - loss: 0.2609 - acc: 0.8822 - weighted_accuracy: 0.858 - ETA: 2s - loss: 0.2601 - acc: 0.8826 - weighted_accuracy: 0.858 - ETA: 1s - loss: 0.2591 - acc: 0.8833 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2600 - acc: 0.8826 - weighted_accuracy: 0.859 - ETA: 1s - loss: 0.2596 - acc: 0.8829 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2589 - acc: 0.8837 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2588 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2590 - acc: 0.8839 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2591 - acc: 0.8842 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2587 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2585 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2591 - acc: 0.8843 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2593 - acc: 0.8841 - weighted_accuracy: 0.860 - ETA: 1s - loss: 0.2587 - acc: 0.8846 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2586 - acc: 0.8848 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2584 - acc: 0.8849 - weighted_accuracy: 0.861 - ETA: 1s - loss: 0.2581 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 1s - loss: 0.2583 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2585 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2590 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2591 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2592 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2599 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2596 - acc: 0.8851 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8850 - weighted_accuracy: 0.861 - ETA: 0s - loss: 0.2599 - acc: 0.8850 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2597 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2594 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2595 - acc: 0.8852 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8850 - weighted_accuracy: 0.862 - 2s 8us/step - loss: 0.2598 - acc: 0.8850 - weighted_accuracy: 0.8620 - val_loss: 0.2538 - val_acc: 0.8884 - val_weighted_accuracy: 0.8580\n",
      "Epoch 43/500\n",
      "288495/288495 [==============================] - ETA: 4s - loss: 0.2645 - acc: 0.8916 - weighted_accuracy: 0.868 - ETA: 2s - loss: 0.2529 - acc: 0.8933 - weighted_accuracy: 0.867 - ETA: 2s - loss: 0.2552 - acc: 0.8900 - weighted_accuracy: 0.866 - ETA: 2s - loss: 0.2573 - acc: 0.8892 - weighted_accuracy: 0.866 - ETA: 2s - loss: 0.2578 - acc: 0.8886 - weighted_accuracy: 0.864 - ETA: 2s - loss: 0.2591 - acc: 0.8877 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2584 - acc: 0.8879 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2573 - acc: 0.8887 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2587 - acc: 0.8879 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2581 - acc: 0.8877 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2577 - acc: 0.8878 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2580 - acc: 0.8876 - weighted_accuracy: 0.865 - ETA: 1s - loss: 0.2590 - acc: 0.8869 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2585 - acc: 0.8870 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2591 - acc: 0.8866 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2592 - acc: 0.8867 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2590 - acc: 0.8867 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2593 - acc: 0.8864 - weighted_accuracy: 0.864 - ETA: 1s - loss: 0.2595 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2591 - acc: 0.8861 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2595 - acc: 0.8857 - weighted_accuracy: 0.863 - ETA: 1s - loss: 0.2590 - acc: 0.8859 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2596 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8856 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2598 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2600 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2600 - acc: 0.8853 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2598 - acc: 0.8853 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2596 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2595 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2600 - acc: 0.8853 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2601 - acc: 0.8853 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2597 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2598 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2598 - acc: 0.8855 - weighted_accuracy: 0.863 - ETA: 0s - loss: 0.2598 - acc: 0.8854 - weighted_accuracy: 0.862 - ETA: 0s - loss: 0.2596 - acc: 0.8855 - weighted_accuracy: 0.862 - 2s 8us/step - loss: 0.2596 - acc: 0.8855 - weighted_accuracy: 0.8630 - val_loss: 0.2557 - val_acc: 0.8876 - val_weighted_accuracy: 0.8595\n"
     ]
    }
   ],
   "source": [
    "trainer = KerasModelTrainer(model_stamp=\"Ensemble-DenseNet\", epoch_num=500)\n",
    "models, score, folds_preds = trainer.train_folds(features=ensemble_trains, y=to_categorical(labels), augments=None, fold_count=10,\n",
    "    batch_size=1024, \n",
    "    scale_sample_weight=None, class_weight=None,\n",
    "    get_model_func=_agent_get_model, \n",
    "    patience=20)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "score 0.8612014500879486\n",
      "Predicting training results...\n",
      "Predicting testing results...\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 12us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "Predicting labeled testing results...\n"
     ]
    }
   ],
   "source": [
    "print(\"score\", score)\n",
    "oofs_dir = \"../data/ensemble/oofs/\"\n",
    "output_dir = \"../data/ensemble/preds/\"\n",
    "onehot_pred_dir = \"../data/ensemble/nn_one_hot/\"\n",
    "\n",
    "model_submit_prefix = \"AddNN-Ensemble\"\n",
    "\n",
    "oofs_path = oofs_dir + model_submit_prefix\n",
    "output_path = output_dir + model_submit_prefix\n",
    "one_hot_pred_path = onehot_pred_dir + \"One-Hot\" + model_submit_prefix\n",
    "\n",
    "print(\"Predicting training results...\")\n",
    "train_predicts = np.concatenate(folds_preds, axis=0)\n",
    "score = np_weighted_accuracy(to_categorical(labels), train_predicts)\n",
    "\n",
    "oofs = pd.DataFrame({\"unrelated\": train_predicts[:, 0], \"agreed\": train_predicts[:, 1], \"disagreed\": train_predicts[:, 2]})\n",
    "submit_path = oofs_path + \"-Train-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "oofs.to_csv(submit_path, index=False)\n",
    "\n",
    "print(\"Predicting testing results...\")\n",
    "test_predicts_list = []\n",
    "for fold_id, model in enumerate(models):\n",
    "    test_predicts = model.predict({\"mata-features\": ensemble_tests}, batch_size=128, verbose=1)\n",
    "    test_predicts_list.append(test_predicts)\n",
    "\n",
    "test_predicts = np.zeros(test_predicts_list[0].shape)\n",
    "for fold_predict in test_predicts_list:\n",
    "    test_predicts += fold_predict\n",
    "test_predicts /= len(test_predicts_list)\n",
    "\n",
    "test_predicts = pd.DataFrame({\"unrelated\": test_predicts[:, 0], \"agreed\": test_predicts[:, 1], \"disagreed\": test_predicts[:, 2]})\n",
    "submit_path = output_path + \"-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "test_predicts.to_csv(submit_path, index=False) # 0.3343\n",
    "\n",
    "print(\"Predicting labeled testing results...\")\n",
    "ids = pd.read_csv(\"../data/dataset/test.csv\")\n",
    "pred_labels = test_predicts.idxmax(axis=1)\n",
    "sub = pd.DataFrame({\"Id\": ids['id'].values, \"Category\": pred_labels})\n",
    "submit_path = one_hot_pred_path + \"-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "sub.to_csv(submit_path, index=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "score = np_weighted_accuracy(to_categorical(labels), train_predicts)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.8612014500879486"
      ]
     },
     "execution_count": 26,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "score"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.3"
  },
  "widgets": {
   "application/vnd.jupyter.widget-state+json": {
    "state": {},
    "version_major": 1,
    "version_minor": 0
   }
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
