{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Phase 3 Weighted Bagging"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\zake7\\Anaconda3\\lib\\site-packages\\gensim\\utils.py:1197: UserWarning: detected Windows; aliasing chunkize to chunkize_serial\n",
      "  warnings.warn(\"detected Windows; aliasing chunkize to chunkize_serial\")\n",
      "Using TensorFlow backend.\n",
      "C:\\Users\\zake7\\Anaconda3\\lib\\site-packages\\fuzzywuzzy\\fuzz.py:35: UserWarning: Using slow pure-python SequenceMatcher. Install python-Levenshtein to remove this warning\n",
      "  warnings.warn('Using slow pure-python SequenceMatcher. Install python-Levenshtein to remove this warning')\n"
     ]
    }
   ],
   "source": [
    "import pandas as pd\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "from os import listdir\n",
    "from os.path import isfile, join\n",
    "\n",
    "import os\n",
    "import re\n",
    "import csv\n",
    "import codecs\n",
    "import gensim\n",
    "import itertools\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import operator\n",
    "import sys\n",
    "\n",
    "from nltk import ngrams\n",
    "from collections import Counter\n",
    "from string import punctuation\n",
    "from keras.preprocessing.text import Tokenizer\n",
    "from keras.preprocessing.sequence import pad_sequences\n",
    "\n",
    "from iwillwin.trainer.supervised_trainer import KerasModelTrainer\n",
    "from iwillwin.data_utils.data_helpers import DataTransformer, DataLoader\n",
    "from iwillwin.config import dataset_config\n",
    "from iwillwin.data_utils.feature_engineering import FeatureCreator\n",
    "\n",
    "from fuzzywuzzy import fuzz\n",
    "from nltk.corpus import stopwords\n",
    "from tqdm import tqdm\n",
    "from scipy.stats import skew, kurtosis\n",
    "from scipy.spatial.distance import cosine, cityblock, jaccard, canberra, euclidean, minkowski, braycurtis\n",
    "from nltk import word_tokenize\n",
    "\n",
    "import seaborn as sns\n",
    "%matplotlib inline\n",
    "\n",
    "import lightgbm as lgb\n",
    "from sklearn.model_selection import train_test_split\n",
    "import matplotlib.pyplot as plt\n",
    "from sklearn.model_selection import KFold\n",
    "\n",
    "import os\n",
    "import re\n",
    "import csv\n",
    "import codecs\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import operator\n",
    "from os import listdir\n",
    "from os.path import isfile, join\n",
    "\n",
    "########################################\n",
    "## import packages\n",
    "########################################\n",
    "import os\n",
    "import re\n",
    "import csv\n",
    "import codecs\n",
    "import numpy as np\n",
    "np.random.seed(1337)\n",
    "\n",
    "import pandas as pd\n",
    "import operator\n",
    "import sys\n",
    "\n",
    "from string import punctuation\n",
    "from keras.preprocessing.text import Tokenizer\n",
    "from keras.preprocessing.sequence import pad_sequences\n",
    "\n",
    "from iwillwin.trainer.supervised_trainer import KerasModelTrainer\n",
    "from iwillwin.data_utils.data_helpers import DataTransformer, DataLoader\n",
    "from iwillwin.config import dataset_config\n",
    "from keras.utils import to_categorical"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Building prefix dict from the default dictionary ...\n",
      "Dumping model to file cache C:\\Users\\zake7\\AppData\\Local\\Temp\\jieba.cache\n",
      "Loading model cost 0.846 seconds.\n",
      "Prefix dict has been built succesfully.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[DataHelper] Apply normalization on value-type columns\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\zake7\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\validation.py:475: DataConversionWarning: Data with input dtype int64 was converted to float64 by MinMaxScaler.\n",
      "  warnings.warn(msg, DataConversionWarning)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Doing preprocessing...\n",
      "Transforming words to indices...\n",
      "Shape of data tensor: (320552, 50) (320552, 50)\n",
      "Shape of label tensor: (320552,)\n",
      "Preprocessed.\n",
      "Number of unique words 83265\n"
     ]
    }
   ],
   "source": [
    "NB_WORDS, MAX_SEQUENCE_LENGTH = 50000, 50\n",
    "data_transformer = DataTransformer(max_num_words=NB_WORDS, max_sequence_length=MAX_SEQUENCE_LENGTH, char_level=False,\n",
    "                                   normalization=True, features_processed=True)\n",
    "trains_nns, tests_nns, labels = data_transformer.prepare_data(dual=False)\n",
    "print(\"Number of unique words\", len(data_transformer.tokenizer.index_docs))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "trains_meta = trains_nns[2]\n",
    "tests_meta = tests_nns[2]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "train_df = pd.read_csv('../data/dataset/train.csv')\n",
    "test_df = pd.read_csv('../data/dataset/test.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n",
      "nan <class 'float'>\n"
     ]
    }
   ],
   "source": [
    "rumor_words = ['辟谣', '谣言', '勿传', '假的']\n",
    "\n",
    "def is_rumor(text):\n",
    "    if type(text) != str:\n",
    "        print(text, type(text))\n",
    "        return 0\n",
    "    for rumor_word in rumor_words:\n",
    "        if rumor_word in text:\n",
    "            return 1\n",
    "    return 0\n",
    "\n",
    "def has_split_symbol(text):\n",
    "    if type(text) != str:\n",
    "        return 0\n",
    "    if '|' in text:\n",
    "        return 1\n",
    "    return 0\n",
    "\n",
    "for df in [train_df, test_df]:\n",
    "    df['has_|'] = df['title2_zh'].apply(has_split_symbol)\n",
    "    df['has_rumor_words'] = df['title2_zh'].apply(is_rumor)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "train_has_rumor = train_df.has_rumor_words.values\n",
    "test_has_rumor = test_df.has_rumor_words.values\n",
    "\n",
    "trick_trains_features = np.concatenate((trains_nns[2], train_has_rumor.reshape((-1, 1))), axis=1)\n",
    "trick_tests_features = np.concatenate((tests_nns[2], test_has_rumor.reshape((-1, 1))), axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "oof_file_names = sorted([f for f in listdir('../data/pseudo/oofs/') if isfile(join('../data/pseudo/oofs/', f)) and f != '.gitkeep'])\n",
    "preds_file_names = [name.replace('-Train', '') for name in oof_file_names]\n",
    "\n",
    "oofs = []\n",
    "preds = []\n",
    "for name in oof_file_names:\n",
    "    oofs.append(pd.read_csv('../data/pseudo/oofs/' + name))\n",
    "for name in preds_file_names:\n",
    "    preds.append(pd.read_csv('../data/pseudo/output/' + name))    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 3Embedding-DecomposalbeAttention-NoMeta-ClassWeighted-NoEM-Train-L0.857551-NB5000.csv\n",
      "1 P3Embedding-3LayersDenseCNN42-NoDrop-NoClassWeighted-withEM-Train-L0.297362-NB5000.csv\n",
      "2 P3Embedding-3LayersDenseRNN42-Drop01-NoMeta-NoClassWeighted-WithEM-Train-L0.292235-NB5000.csv\n",
      "3 P3Embedding-ESIM-Drop01-NoMeta-NoClassWeighted-NoEM-Train-L0.283131-NB5000.csv\n",
      "4 PS3Embedding-3LayersDenseCNN42-NoDrop-NoClassWeighted-withEM-Train-L0.853793-NB5000.csv\n",
      "5 PS3Embedding-3LayersDenseRNN42-Drop01-NoMeta-NoClassWeighted-WithEM-Train-L0.853764-NB5000.csv\n",
      "6 PS3Embedding-ESIM-Drop01-NoMeta-NoClassWeighted-NoEM-Train-L0.863459-NB5000.csv\n",
      "7 PSWordSGNS-DAttn-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.861867-NB100000.csv\n",
      "8 PSWordSGNS-DenseCNN5Layers-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.851927-NB100000.csv\n",
      "9 PSWordSGNS-DenseCNN5Layers-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.855071-NB100000.csv\n",
      "10 PSWordSGNS-ESIM-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.860951-NB100000.csv\n",
      "11 PSWordTC-DAttn-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.861878-NB100000.csv\n",
      "12 PSWordTC-DenseRNN-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.862805-NB100000.csv\n",
      "13 PSWordTC-ESIM-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.863699-NB100000.csv\n",
      "14 PWordSGNS-DenseCNN5Layers-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.852129-NB100000.csv\n",
      "15 PWordSGNS-ESIM-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.310416-NB100000.csv\n",
      "16 PWordTC-DenseCNN5Layers-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.847205-NB100000.csv\n",
      "17 PWordTC-DenseRNN-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.308049-NB100000.csv\n",
      "18 PWordTC-ESIM-NoMeta-3P-NoEM-NoClassWeighted-3Layers-Train-L0.306949-NB100000.csv\n"
     ]
    }
   ],
   "source": [
    "for i, name in enumerate(oof_file_names):\n",
    "    print(i, name)\n",
    "    \n",
    "trains = pd.DataFrame()\n",
    "tests = pd.DataFrame()\n",
    "\n",
    "for i in range(len(oof_file_names)):\n",
    "    for label_type in ['agreed', 'disagreed', 'unrelated']:\n",
    "        trains['oofs_{}_{}'.format(i, label_type)] = oofs[i][label_type].values\n",
    "        tests['oofs_pred{}_{}'.format(i, label_type)] = preds[i][label_type].values"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "unrelated = pd.DataFrame()\n",
    "agreeds = pd.DataFrame()\n",
    "disagreeds = pd.DataFrame()\n",
    "\n",
    "#check_oofs = True\n",
    "check_oofs = False\n",
    "\n",
    "\n",
    "if check_oofs:\n",
    "    for i, oof in enumerate(oofs):\n",
    "        agreeds['oofs_agreed_{}'.format(i)] = oofs[i]['agreed'].values\n",
    "        unrelated['oofs_unrelated_{}'.format(i)] = oofs[i]['unrelated'].values\n",
    "        disagreeds['oofs_disagreeds_{}'.format(i)] = oofs[i]['disagreed'].values\n",
    "else:\n",
    "    for i, oof in enumerate(oofs):\n",
    "        agreeds['oofs_agreed_{}'.format(i)] = preds[i]['agreed'].values\n",
    "        unrelated['oofs_unrelated_{}'.format(i)] = preds[i]['unrelated'].values\n",
    "        disagreeds['oofs_disagreeds_{}'.format(i)] = preds[i]['disagreed'].values  "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>oofs_agreed_0</th>\n",
       "      <th>oofs_agreed_1</th>\n",
       "      <th>oofs_agreed_2</th>\n",
       "      <th>oofs_agreed_3</th>\n",
       "      <th>oofs_agreed_4</th>\n",
       "      <th>oofs_agreed_5</th>\n",
       "      <th>oofs_agreed_6</th>\n",
       "      <th>oofs_agreed_7</th>\n",
       "      <th>oofs_agreed_8</th>\n",
       "      <th>oofs_agreed_9</th>\n",
       "      <th>oofs_agreed_10</th>\n",
       "      <th>oofs_agreed_11</th>\n",
       "      <th>oofs_agreed_12</th>\n",
       "      <th>oofs_agreed_13</th>\n",
       "      <th>oofs_agreed_14</th>\n",
       "      <th>oofs_agreed_15</th>\n",
       "      <th>oofs_agreed_16</th>\n",
       "      <th>oofs_agreed_17</th>\n",
       "      <th>oofs_agreed_18</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_0</th>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.985454</td>\n",
       "      <td>0.988323</td>\n",
       "      <td>0.986448</td>\n",
       "      <td>0.984380</td>\n",
       "      <td>0.987315</td>\n",
       "      <td>0.985907</td>\n",
       "      <td>0.973378</td>\n",
       "      <td>0.975002</td>\n",
       "      <td>0.977427</td>\n",
       "      <td>0.975342</td>\n",
       "      <td>0.976909</td>\n",
       "      <td>0.976050</td>\n",
       "      <td>0.976170</td>\n",
       "      <td>0.976952</td>\n",
       "      <td>0.976174</td>\n",
       "      <td>0.975576</td>\n",
       "      <td>0.975719</td>\n",
       "      <td>0.976823</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_1</th>\n",
       "      <td>0.985454</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.988241</td>\n",
       "      <td>0.986295</td>\n",
       "      <td>0.995884</td>\n",
       "      <td>0.988644</td>\n",
       "      <td>0.985872</td>\n",
       "      <td>0.976543</td>\n",
       "      <td>0.978513</td>\n",
       "      <td>0.977921</td>\n",
       "      <td>0.978879</td>\n",
       "      <td>0.974114</td>\n",
       "      <td>0.978497</td>\n",
       "      <td>0.978545</td>\n",
       "      <td>0.977649</td>\n",
       "      <td>0.979854</td>\n",
       "      <td>0.979520</td>\n",
       "      <td>0.978561</td>\n",
       "      <td>0.979853</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_2</th>\n",
       "      <td>0.988323</td>\n",
       "      <td>0.988241</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.987435</td>\n",
       "      <td>0.988193</td>\n",
       "      <td>0.996147</td>\n",
       "      <td>0.987102</td>\n",
       "      <td>0.970575</td>\n",
       "      <td>0.976039</td>\n",
       "      <td>0.976866</td>\n",
       "      <td>0.977199</td>\n",
       "      <td>0.973728</td>\n",
       "      <td>0.978022</td>\n",
       "      <td>0.977212</td>\n",
       "      <td>0.976467</td>\n",
       "      <td>0.977935</td>\n",
       "      <td>0.976717</td>\n",
       "      <td>0.977700</td>\n",
       "      <td>0.977855</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_3</th>\n",
       "      <td>0.986448</td>\n",
       "      <td>0.986295</td>\n",
       "      <td>0.987435</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.984308</td>\n",
       "      <td>0.986038</td>\n",
       "      <td>0.996268</td>\n",
       "      <td>0.976276</td>\n",
       "      <td>0.979169</td>\n",
       "      <td>0.978624</td>\n",
       "      <td>0.983136</td>\n",
       "      <td>0.979642</td>\n",
       "      <td>0.983276</td>\n",
       "      <td>0.984887</td>\n",
       "      <td>0.978003</td>\n",
       "      <td>0.984594</td>\n",
       "      <td>0.979907</td>\n",
       "      <td>0.983532</td>\n",
       "      <td>0.985561</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_4</th>\n",
       "      <td>0.984380</td>\n",
       "      <td>0.995884</td>\n",
       "      <td>0.988193</td>\n",
       "      <td>0.984308</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.988589</td>\n",
       "      <td>0.984010</td>\n",
       "      <td>0.974411</td>\n",
       "      <td>0.977064</td>\n",
       "      <td>0.976446</td>\n",
       "      <td>0.976906</td>\n",
       "      <td>0.971661</td>\n",
       "      <td>0.976735</td>\n",
       "      <td>0.976505</td>\n",
       "      <td>0.976157</td>\n",
       "      <td>0.977627</td>\n",
       "      <td>0.977743</td>\n",
       "      <td>0.976696</td>\n",
       "      <td>0.977415</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_5</th>\n",
       "      <td>0.987315</td>\n",
       "      <td>0.988644</td>\n",
       "      <td>0.996147</td>\n",
       "      <td>0.986038</td>\n",
       "      <td>0.988589</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.986175</td>\n",
       "      <td>0.971035</td>\n",
       "      <td>0.974898</td>\n",
       "      <td>0.975453</td>\n",
       "      <td>0.976168</td>\n",
       "      <td>0.971536</td>\n",
       "      <td>0.976450</td>\n",
       "      <td>0.975633</td>\n",
       "      <td>0.974975</td>\n",
       "      <td>0.976913</td>\n",
       "      <td>0.975815</td>\n",
       "      <td>0.976077</td>\n",
       "      <td>0.976816</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_6</th>\n",
       "      <td>0.985907</td>\n",
       "      <td>0.985872</td>\n",
       "      <td>0.987102</td>\n",
       "      <td>0.996268</td>\n",
       "      <td>0.984010</td>\n",
       "      <td>0.986175</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.975417</td>\n",
       "      <td>0.977783</td>\n",
       "      <td>0.977023</td>\n",
       "      <td>0.981811</td>\n",
       "      <td>0.977934</td>\n",
       "      <td>0.981879</td>\n",
       "      <td>0.983655</td>\n",
       "      <td>0.976346</td>\n",
       "      <td>0.983055</td>\n",
       "      <td>0.978560</td>\n",
       "      <td>0.982419</td>\n",
       "      <td>0.984168</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_7</th>\n",
       "      <td>0.973378</td>\n",
       "      <td>0.976543</td>\n",
       "      <td>0.970575</td>\n",
       "      <td>0.976276</td>\n",
       "      <td>0.974411</td>\n",
       "      <td>0.971035</td>\n",
       "      <td>0.975417</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.988611</td>\n",
       "      <td>0.982953</td>\n",
       "      <td>0.986930</td>\n",
       "      <td>0.983626</td>\n",
       "      <td>0.979390</td>\n",
       "      <td>0.982267</td>\n",
       "      <td>0.982601</td>\n",
       "      <td>0.986865</td>\n",
       "      <td>0.989011</td>\n",
       "      <td>0.979822</td>\n",
       "      <td>0.983091</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_8</th>\n",
       "      <td>0.975002</td>\n",
       "      <td>0.978513</td>\n",
       "      <td>0.976039</td>\n",
       "      <td>0.979169</td>\n",
       "      <td>0.977064</td>\n",
       "      <td>0.974898</td>\n",
       "      <td>0.977783</td>\n",
       "      <td>0.988611</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.987310</td>\n",
       "      <td>0.990456</td>\n",
       "      <td>0.983518</td>\n",
       "      <td>0.984780</td>\n",
       "      <td>0.985321</td>\n",
       "      <td>0.987066</td>\n",
       "      <td>0.990289</td>\n",
       "      <td>0.997975</td>\n",
       "      <td>0.984720</td>\n",
       "      <td>0.985544</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_9</th>\n",
       "      <td>0.977427</td>\n",
       "      <td>0.977921</td>\n",
       "      <td>0.976866</td>\n",
       "      <td>0.978624</td>\n",
       "      <td>0.976446</td>\n",
       "      <td>0.975453</td>\n",
       "      <td>0.977023</td>\n",
       "      <td>0.982953</td>\n",
       "      <td>0.987310</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.984884</td>\n",
       "      <td>0.989911</td>\n",
       "      <td>0.989481</td>\n",
       "      <td>0.987805</td>\n",
       "      <td>0.998649</td>\n",
       "      <td>0.985065</td>\n",
       "      <td>0.987453</td>\n",
       "      <td>0.989079</td>\n",
       "      <td>0.988042</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_10</th>\n",
       "      <td>0.975342</td>\n",
       "      <td>0.978879</td>\n",
       "      <td>0.977199</td>\n",
       "      <td>0.983136</td>\n",
       "      <td>0.976906</td>\n",
       "      <td>0.976168</td>\n",
       "      <td>0.981811</td>\n",
       "      <td>0.986930</td>\n",
       "      <td>0.990456</td>\n",
       "      <td>0.984884</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.983689</td>\n",
       "      <td>0.987243</td>\n",
       "      <td>0.989598</td>\n",
       "      <td>0.984538</td>\n",
       "      <td>0.995895</td>\n",
       "      <td>0.991031</td>\n",
       "      <td>0.987307</td>\n",
       "      <td>0.990067</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_11</th>\n",
       "      <td>0.976909</td>\n",
       "      <td>0.974114</td>\n",
       "      <td>0.973728</td>\n",
       "      <td>0.979642</td>\n",
       "      <td>0.971661</td>\n",
       "      <td>0.971536</td>\n",
       "      <td>0.977934</td>\n",
       "      <td>0.983626</td>\n",
       "      <td>0.983518</td>\n",
       "      <td>0.989911</td>\n",
       "      <td>0.983689</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.986204</td>\n",
       "      <td>0.987806</td>\n",
       "      <td>0.989436</td>\n",
       "      <td>0.984225</td>\n",
       "      <td>0.983439</td>\n",
       "      <td>0.986123</td>\n",
       "      <td>0.987201</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_12</th>\n",
       "      <td>0.976050</td>\n",
       "      <td>0.978497</td>\n",
       "      <td>0.978022</td>\n",
       "      <td>0.983276</td>\n",
       "      <td>0.976735</td>\n",
       "      <td>0.976450</td>\n",
       "      <td>0.981879</td>\n",
       "      <td>0.979390</td>\n",
       "      <td>0.984780</td>\n",
       "      <td>0.989481</td>\n",
       "      <td>0.987243</td>\n",
       "      <td>0.986204</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.991436</td>\n",
       "      <td>0.989118</td>\n",
       "      <td>0.987471</td>\n",
       "      <td>0.985098</td>\n",
       "      <td>0.998035</td>\n",
       "      <td>0.991292</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_13</th>\n",
       "      <td>0.976170</td>\n",
       "      <td>0.978545</td>\n",
       "      <td>0.977212</td>\n",
       "      <td>0.984887</td>\n",
       "      <td>0.976505</td>\n",
       "      <td>0.975633</td>\n",
       "      <td>0.983655</td>\n",
       "      <td>0.982267</td>\n",
       "      <td>0.985321</td>\n",
       "      <td>0.987805</td>\n",
       "      <td>0.989598</td>\n",
       "      <td>0.987806</td>\n",
       "      <td>0.991436</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.987627</td>\n",
       "      <td>0.990288</td>\n",
       "      <td>0.985720</td>\n",
       "      <td>0.991746</td>\n",
       "      <td>0.996646</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_14</th>\n",
       "      <td>0.976952</td>\n",
       "      <td>0.977649</td>\n",
       "      <td>0.976467</td>\n",
       "      <td>0.978003</td>\n",
       "      <td>0.976157</td>\n",
       "      <td>0.974975</td>\n",
       "      <td>0.976346</td>\n",
       "      <td>0.982601</td>\n",
       "      <td>0.987066</td>\n",
       "      <td>0.998649</td>\n",
       "      <td>0.984538</td>\n",
       "      <td>0.989436</td>\n",
       "      <td>0.989118</td>\n",
       "      <td>0.987627</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.984773</td>\n",
       "      <td>0.987139</td>\n",
       "      <td>0.988966</td>\n",
       "      <td>0.987761</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_15</th>\n",
       "      <td>0.976174</td>\n",
       "      <td>0.979854</td>\n",
       "      <td>0.977935</td>\n",
       "      <td>0.984594</td>\n",
       "      <td>0.977627</td>\n",
       "      <td>0.976913</td>\n",
       "      <td>0.983055</td>\n",
       "      <td>0.986865</td>\n",
       "      <td>0.990289</td>\n",
       "      <td>0.985065</td>\n",
       "      <td>0.995895</td>\n",
       "      <td>0.984225</td>\n",
       "      <td>0.987471</td>\n",
       "      <td>0.990288</td>\n",
       "      <td>0.984773</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.991009</td>\n",
       "      <td>0.987715</td>\n",
       "      <td>0.990837</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_16</th>\n",
       "      <td>0.975576</td>\n",
       "      <td>0.979520</td>\n",
       "      <td>0.976717</td>\n",
       "      <td>0.979907</td>\n",
       "      <td>0.977743</td>\n",
       "      <td>0.975815</td>\n",
       "      <td>0.978560</td>\n",
       "      <td>0.989011</td>\n",
       "      <td>0.997975</td>\n",
       "      <td>0.987453</td>\n",
       "      <td>0.991031</td>\n",
       "      <td>0.983439</td>\n",
       "      <td>0.985098</td>\n",
       "      <td>0.985720</td>\n",
       "      <td>0.987139</td>\n",
       "      <td>0.991009</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.985160</td>\n",
       "      <td>0.986302</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_17</th>\n",
       "      <td>0.975719</td>\n",
       "      <td>0.978561</td>\n",
       "      <td>0.977700</td>\n",
       "      <td>0.983532</td>\n",
       "      <td>0.976696</td>\n",
       "      <td>0.976077</td>\n",
       "      <td>0.982419</td>\n",
       "      <td>0.979822</td>\n",
       "      <td>0.984720</td>\n",
       "      <td>0.989079</td>\n",
       "      <td>0.987307</td>\n",
       "      <td>0.986123</td>\n",
       "      <td>0.998035</td>\n",
       "      <td>0.991746</td>\n",
       "      <td>0.988966</td>\n",
       "      <td>0.987715</td>\n",
       "      <td>0.985160</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.991585</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_agreed_18</th>\n",
       "      <td>0.976823</td>\n",
       "      <td>0.979853</td>\n",
       "      <td>0.977855</td>\n",
       "      <td>0.985561</td>\n",
       "      <td>0.977415</td>\n",
       "      <td>0.976816</td>\n",
       "      <td>0.984168</td>\n",
       "      <td>0.983091</td>\n",
       "      <td>0.985544</td>\n",
       "      <td>0.988042</td>\n",
       "      <td>0.990067</td>\n",
       "      <td>0.987201</td>\n",
       "      <td>0.991292</td>\n",
       "      <td>0.996646</td>\n",
       "      <td>0.987761</td>\n",
       "      <td>0.990837</td>\n",
       "      <td>0.986302</td>\n",
       "      <td>0.991585</td>\n",
       "      <td>1.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                oofs_agreed_0  oofs_agreed_1  oofs_agreed_2  oofs_agreed_3  \\\n",
       "oofs_agreed_0        1.000000       0.985454       0.988323       0.986448   \n",
       "oofs_agreed_1        0.985454       1.000000       0.988241       0.986295   \n",
       "oofs_agreed_2        0.988323       0.988241       1.000000       0.987435   \n",
       "oofs_agreed_3        0.986448       0.986295       0.987435       1.000000   \n",
       "oofs_agreed_4        0.984380       0.995884       0.988193       0.984308   \n",
       "oofs_agreed_5        0.987315       0.988644       0.996147       0.986038   \n",
       "oofs_agreed_6        0.985907       0.985872       0.987102       0.996268   \n",
       "oofs_agreed_7        0.973378       0.976543       0.970575       0.976276   \n",
       "oofs_agreed_8        0.975002       0.978513       0.976039       0.979169   \n",
       "oofs_agreed_9        0.977427       0.977921       0.976866       0.978624   \n",
       "oofs_agreed_10       0.975342       0.978879       0.977199       0.983136   \n",
       "oofs_agreed_11       0.976909       0.974114       0.973728       0.979642   \n",
       "oofs_agreed_12       0.976050       0.978497       0.978022       0.983276   \n",
       "oofs_agreed_13       0.976170       0.978545       0.977212       0.984887   \n",
       "oofs_agreed_14       0.976952       0.977649       0.976467       0.978003   \n",
       "oofs_agreed_15       0.976174       0.979854       0.977935       0.984594   \n",
       "oofs_agreed_16       0.975576       0.979520       0.976717       0.979907   \n",
       "oofs_agreed_17       0.975719       0.978561       0.977700       0.983532   \n",
       "oofs_agreed_18       0.976823       0.979853       0.977855       0.985561   \n",
       "\n",
       "                oofs_agreed_4  oofs_agreed_5  oofs_agreed_6  oofs_agreed_7  \\\n",
       "oofs_agreed_0        0.984380       0.987315       0.985907       0.973378   \n",
       "oofs_agreed_1        0.995884       0.988644       0.985872       0.976543   \n",
       "oofs_agreed_2        0.988193       0.996147       0.987102       0.970575   \n",
       "oofs_agreed_3        0.984308       0.986038       0.996268       0.976276   \n",
       "oofs_agreed_4        1.000000       0.988589       0.984010       0.974411   \n",
       "oofs_agreed_5        0.988589       1.000000       0.986175       0.971035   \n",
       "oofs_agreed_6        0.984010       0.986175       1.000000       0.975417   \n",
       "oofs_agreed_7        0.974411       0.971035       0.975417       1.000000   \n",
       "oofs_agreed_8        0.977064       0.974898       0.977783       0.988611   \n",
       "oofs_agreed_9        0.976446       0.975453       0.977023       0.982953   \n",
       "oofs_agreed_10       0.976906       0.976168       0.981811       0.986930   \n",
       "oofs_agreed_11       0.971661       0.971536       0.977934       0.983626   \n",
       "oofs_agreed_12       0.976735       0.976450       0.981879       0.979390   \n",
       "oofs_agreed_13       0.976505       0.975633       0.983655       0.982267   \n",
       "oofs_agreed_14       0.976157       0.974975       0.976346       0.982601   \n",
       "oofs_agreed_15       0.977627       0.976913       0.983055       0.986865   \n",
       "oofs_agreed_16       0.977743       0.975815       0.978560       0.989011   \n",
       "oofs_agreed_17       0.976696       0.976077       0.982419       0.979822   \n",
       "oofs_agreed_18       0.977415       0.976816       0.984168       0.983091   \n",
       "\n",
       "                oofs_agreed_8  oofs_agreed_9  oofs_agreed_10  oofs_agreed_11  \\\n",
       "oofs_agreed_0        0.975002       0.977427        0.975342        0.976909   \n",
       "oofs_agreed_1        0.978513       0.977921        0.978879        0.974114   \n",
       "oofs_agreed_2        0.976039       0.976866        0.977199        0.973728   \n",
       "oofs_agreed_3        0.979169       0.978624        0.983136        0.979642   \n",
       "oofs_agreed_4        0.977064       0.976446        0.976906        0.971661   \n",
       "oofs_agreed_5        0.974898       0.975453        0.976168        0.971536   \n",
       "oofs_agreed_6        0.977783       0.977023        0.981811        0.977934   \n",
       "oofs_agreed_7        0.988611       0.982953        0.986930        0.983626   \n",
       "oofs_agreed_8        1.000000       0.987310        0.990456        0.983518   \n",
       "oofs_agreed_9        0.987310       1.000000        0.984884        0.989911   \n",
       "oofs_agreed_10       0.990456       0.984884        1.000000        0.983689   \n",
       "oofs_agreed_11       0.983518       0.989911        0.983689        1.000000   \n",
       "oofs_agreed_12       0.984780       0.989481        0.987243        0.986204   \n",
       "oofs_agreed_13       0.985321       0.987805        0.989598        0.987806   \n",
       "oofs_agreed_14       0.987066       0.998649        0.984538        0.989436   \n",
       "oofs_agreed_15       0.990289       0.985065        0.995895        0.984225   \n",
       "oofs_agreed_16       0.997975       0.987453        0.991031        0.983439   \n",
       "oofs_agreed_17       0.984720       0.989079        0.987307        0.986123   \n",
       "oofs_agreed_18       0.985544       0.988042        0.990067        0.987201   \n",
       "\n",
       "                oofs_agreed_12  oofs_agreed_13  oofs_agreed_14  \\\n",
       "oofs_agreed_0         0.976050        0.976170        0.976952   \n",
       "oofs_agreed_1         0.978497        0.978545        0.977649   \n",
       "oofs_agreed_2         0.978022        0.977212        0.976467   \n",
       "oofs_agreed_3         0.983276        0.984887        0.978003   \n",
       "oofs_agreed_4         0.976735        0.976505        0.976157   \n",
       "oofs_agreed_5         0.976450        0.975633        0.974975   \n",
       "oofs_agreed_6         0.981879        0.983655        0.976346   \n",
       "oofs_agreed_7         0.979390        0.982267        0.982601   \n",
       "oofs_agreed_8         0.984780        0.985321        0.987066   \n",
       "oofs_agreed_9         0.989481        0.987805        0.998649   \n",
       "oofs_agreed_10        0.987243        0.989598        0.984538   \n",
       "oofs_agreed_11        0.986204        0.987806        0.989436   \n",
       "oofs_agreed_12        1.000000        0.991436        0.989118   \n",
       "oofs_agreed_13        0.991436        1.000000        0.987627   \n",
       "oofs_agreed_14        0.989118        0.987627        1.000000   \n",
       "oofs_agreed_15        0.987471        0.990288        0.984773   \n",
       "oofs_agreed_16        0.985098        0.985720        0.987139   \n",
       "oofs_agreed_17        0.998035        0.991746        0.988966   \n",
       "oofs_agreed_18        0.991292        0.996646        0.987761   \n",
       "\n",
       "                oofs_agreed_15  oofs_agreed_16  oofs_agreed_17  oofs_agreed_18  \n",
       "oofs_agreed_0         0.976174        0.975576        0.975719        0.976823  \n",
       "oofs_agreed_1         0.979854        0.979520        0.978561        0.979853  \n",
       "oofs_agreed_2         0.977935        0.976717        0.977700        0.977855  \n",
       "oofs_agreed_3         0.984594        0.979907        0.983532        0.985561  \n",
       "oofs_agreed_4         0.977627        0.977743        0.976696        0.977415  \n",
       "oofs_agreed_5         0.976913        0.975815        0.976077        0.976816  \n",
       "oofs_agreed_6         0.983055        0.978560        0.982419        0.984168  \n",
       "oofs_agreed_7         0.986865        0.989011        0.979822        0.983091  \n",
       "oofs_agreed_8         0.990289        0.997975        0.984720        0.985544  \n",
       "oofs_agreed_9         0.985065        0.987453        0.989079        0.988042  \n",
       "oofs_agreed_10        0.995895        0.991031        0.987307        0.990067  \n",
       "oofs_agreed_11        0.984225        0.983439        0.986123        0.987201  \n",
       "oofs_agreed_12        0.987471        0.985098        0.998035        0.991292  \n",
       "oofs_agreed_13        0.990288        0.985720        0.991746        0.996646  \n",
       "oofs_agreed_14        0.984773        0.987139        0.988966        0.987761  \n",
       "oofs_agreed_15        1.000000        0.991009        0.987715        0.990837  \n",
       "oofs_agreed_16        0.991009        1.000000        0.985160        0.986302  \n",
       "oofs_agreed_17        0.987715        0.985160        1.000000        0.991585  \n",
       "oofs_agreed_18        0.990837        0.986302        0.991585        1.000000  "
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "agreeds.corr()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>oofs_disagreeds_0</th>\n",
       "      <th>oofs_disagreeds_1</th>\n",
       "      <th>oofs_disagreeds_2</th>\n",
       "      <th>oofs_disagreeds_3</th>\n",
       "      <th>oofs_disagreeds_4</th>\n",
       "      <th>oofs_disagreeds_5</th>\n",
       "      <th>oofs_disagreeds_6</th>\n",
       "      <th>oofs_disagreeds_7</th>\n",
       "      <th>oofs_disagreeds_8</th>\n",
       "      <th>oofs_disagreeds_9</th>\n",
       "      <th>oofs_disagreeds_10</th>\n",
       "      <th>oofs_disagreeds_11</th>\n",
       "      <th>oofs_disagreeds_12</th>\n",
       "      <th>oofs_disagreeds_13</th>\n",
       "      <th>oofs_disagreeds_14</th>\n",
       "      <th>oofs_disagreeds_15</th>\n",
       "      <th>oofs_disagreeds_16</th>\n",
       "      <th>oofs_disagreeds_17</th>\n",
       "      <th>oofs_disagreeds_18</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_0</th>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.956195</td>\n",
       "      <td>0.965381</td>\n",
       "      <td>0.967006</td>\n",
       "      <td>0.970607</td>\n",
       "      <td>0.973446</td>\n",
       "      <td>0.969427</td>\n",
       "      <td>0.947563</td>\n",
       "      <td>0.953924</td>\n",
       "      <td>0.953973</td>\n",
       "      <td>0.953546</td>\n",
       "      <td>0.955650</td>\n",
       "      <td>0.952962</td>\n",
       "      <td>0.952829</td>\n",
       "      <td>0.948303</td>\n",
       "      <td>0.949407</td>\n",
       "      <td>0.948455</td>\n",
       "      <td>0.949762</td>\n",
       "      <td>0.947914</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_1</th>\n",
       "      <td>0.956195</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.972663</td>\n",
       "      <td>0.969680</td>\n",
       "      <td>0.980875</td>\n",
       "      <td>0.961191</td>\n",
       "      <td>0.958854</td>\n",
       "      <td>0.940807</td>\n",
       "      <td>0.950593</td>\n",
       "      <td>0.941773</td>\n",
       "      <td>0.946255</td>\n",
       "      <td>0.942006</td>\n",
       "      <td>0.941758</td>\n",
       "      <td>0.944460</td>\n",
       "      <td>0.953687</td>\n",
       "      <td>0.960452</td>\n",
       "      <td>0.958069</td>\n",
       "      <td>0.955050</td>\n",
       "      <td>0.956554</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_2</th>\n",
       "      <td>0.965381</td>\n",
       "      <td>0.972663</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.973263</td>\n",
       "      <td>0.974988</td>\n",
       "      <td>0.984404</td>\n",
       "      <td>0.967257</td>\n",
       "      <td>0.938211</td>\n",
       "      <td>0.951614</td>\n",
       "      <td>0.948799</td>\n",
       "      <td>0.949472</td>\n",
       "      <td>0.945175</td>\n",
       "      <td>0.947688</td>\n",
       "      <td>0.950114</td>\n",
       "      <td>0.955320</td>\n",
       "      <td>0.955833</td>\n",
       "      <td>0.955834</td>\n",
       "      <td>0.956771</td>\n",
       "      <td>0.954623</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_3</th>\n",
       "      <td>0.967006</td>\n",
       "      <td>0.969680</td>\n",
       "      <td>0.973263</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.973872</td>\n",
       "      <td>0.970303</td>\n",
       "      <td>0.986330</td>\n",
       "      <td>0.955033</td>\n",
       "      <td>0.962220</td>\n",
       "      <td>0.955903</td>\n",
       "      <td>0.963340</td>\n",
       "      <td>0.957298</td>\n",
       "      <td>0.961192</td>\n",
       "      <td>0.963206</td>\n",
       "      <td>0.959059</td>\n",
       "      <td>0.969456</td>\n",
       "      <td>0.963931</td>\n",
       "      <td>0.967005</td>\n",
       "      <td>0.968558</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_4</th>\n",
       "      <td>0.970607</td>\n",
       "      <td>0.980875</td>\n",
       "      <td>0.974988</td>\n",
       "      <td>0.973872</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.976251</td>\n",
       "      <td>0.972630</td>\n",
       "      <td>0.948467</td>\n",
       "      <td>0.960164</td>\n",
       "      <td>0.957989</td>\n",
       "      <td>0.960040</td>\n",
       "      <td>0.955228</td>\n",
       "      <td>0.957282</td>\n",
       "      <td>0.957418</td>\n",
       "      <td>0.956558</td>\n",
       "      <td>0.959968</td>\n",
       "      <td>0.958002</td>\n",
       "      <td>0.959996</td>\n",
       "      <td>0.956790</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_5</th>\n",
       "      <td>0.973446</td>\n",
       "      <td>0.961191</td>\n",
       "      <td>0.984404</td>\n",
       "      <td>0.970303</td>\n",
       "      <td>0.976251</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.971418</td>\n",
       "      <td>0.938822</td>\n",
       "      <td>0.953536</td>\n",
       "      <td>0.955377</td>\n",
       "      <td>0.955229</td>\n",
       "      <td>0.950740</td>\n",
       "      <td>0.954774</td>\n",
       "      <td>0.956343</td>\n",
       "      <td>0.949685</td>\n",
       "      <td>0.951494</td>\n",
       "      <td>0.949397</td>\n",
       "      <td>0.953315</td>\n",
       "      <td>0.948816</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_6</th>\n",
       "      <td>0.969427</td>\n",
       "      <td>0.958854</td>\n",
       "      <td>0.967257</td>\n",
       "      <td>0.986330</td>\n",
       "      <td>0.972630</td>\n",
       "      <td>0.971418</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.954179</td>\n",
       "      <td>0.960763</td>\n",
       "      <td>0.957241</td>\n",
       "      <td>0.964684</td>\n",
       "      <td>0.959270</td>\n",
       "      <td>0.963397</td>\n",
       "      <td>0.965154</td>\n",
       "      <td>0.951198</td>\n",
       "      <td>0.961519</td>\n",
       "      <td>0.955037</td>\n",
       "      <td>0.960886</td>\n",
       "      <td>0.960835</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_7</th>\n",
       "      <td>0.947563</td>\n",
       "      <td>0.940807</td>\n",
       "      <td>0.938211</td>\n",
       "      <td>0.955033</td>\n",
       "      <td>0.948467</td>\n",
       "      <td>0.938822</td>\n",
       "      <td>0.954179</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.975049</td>\n",
       "      <td>0.961024</td>\n",
       "      <td>0.967412</td>\n",
       "      <td>0.970211</td>\n",
       "      <td>0.958116</td>\n",
       "      <td>0.962381</td>\n",
       "      <td>0.960166</td>\n",
       "      <td>0.969288</td>\n",
       "      <td>0.969771</td>\n",
       "      <td>0.959363</td>\n",
       "      <td>0.964948</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_8</th>\n",
       "      <td>0.953924</td>\n",
       "      <td>0.950593</td>\n",
       "      <td>0.951614</td>\n",
       "      <td>0.962220</td>\n",
       "      <td>0.960164</td>\n",
       "      <td>0.953536</td>\n",
       "      <td>0.960763</td>\n",
       "      <td>0.975049</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.976337</td>\n",
       "      <td>0.977972</td>\n",
       "      <td>0.971753</td>\n",
       "      <td>0.971154</td>\n",
       "      <td>0.973719</td>\n",
       "      <td>0.972987</td>\n",
       "      <td>0.978265</td>\n",
       "      <td>0.990127</td>\n",
       "      <td>0.972071</td>\n",
       "      <td>0.971182</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_9</th>\n",
       "      <td>0.953973</td>\n",
       "      <td>0.941773</td>\n",
       "      <td>0.948799</td>\n",
       "      <td>0.955903</td>\n",
       "      <td>0.957989</td>\n",
       "      <td>0.955377</td>\n",
       "      <td>0.957241</td>\n",
       "      <td>0.961024</td>\n",
       "      <td>0.976337</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.972073</td>\n",
       "      <td>0.978640</td>\n",
       "      <td>0.978412</td>\n",
       "      <td>0.978475</td>\n",
       "      <td>0.987397</td>\n",
       "      <td>0.966143</td>\n",
       "      <td>0.969127</td>\n",
       "      <td>0.976464</td>\n",
       "      <td>0.970646</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_10</th>\n",
       "      <td>0.953546</td>\n",
       "      <td>0.946255</td>\n",
       "      <td>0.949472</td>\n",
       "      <td>0.963340</td>\n",
       "      <td>0.960040</td>\n",
       "      <td>0.955229</td>\n",
       "      <td>0.964684</td>\n",
       "      <td>0.967412</td>\n",
       "      <td>0.977972</td>\n",
       "      <td>0.972073</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.970571</td>\n",
       "      <td>0.973727</td>\n",
       "      <td>0.978318</td>\n",
       "      <td>0.963091</td>\n",
       "      <td>0.981756</td>\n",
       "      <td>0.971032</td>\n",
       "      <td>0.969401</td>\n",
       "      <td>0.970960</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_11</th>\n",
       "      <td>0.955650</td>\n",
       "      <td>0.942006</td>\n",
       "      <td>0.945175</td>\n",
       "      <td>0.957298</td>\n",
       "      <td>0.955228</td>\n",
       "      <td>0.950740</td>\n",
       "      <td>0.959270</td>\n",
       "      <td>0.970211</td>\n",
       "      <td>0.971753</td>\n",
       "      <td>0.978640</td>\n",
       "      <td>0.970571</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.975214</td>\n",
       "      <td>0.977735</td>\n",
       "      <td>0.968885</td>\n",
       "      <td>0.965114</td>\n",
       "      <td>0.963684</td>\n",
       "      <td>0.970969</td>\n",
       "      <td>0.968152</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_12</th>\n",
       "      <td>0.952962</td>\n",
       "      <td>0.941758</td>\n",
       "      <td>0.947688</td>\n",
       "      <td>0.961192</td>\n",
       "      <td>0.957282</td>\n",
       "      <td>0.954774</td>\n",
       "      <td>0.963397</td>\n",
       "      <td>0.958116</td>\n",
       "      <td>0.971154</td>\n",
       "      <td>0.978412</td>\n",
       "      <td>0.973727</td>\n",
       "      <td>0.975214</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.981216</td>\n",
       "      <td>0.968082</td>\n",
       "      <td>0.966762</td>\n",
       "      <td>0.963199</td>\n",
       "      <td>0.987711</td>\n",
       "      <td>0.970934</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_13</th>\n",
       "      <td>0.952829</td>\n",
       "      <td>0.944460</td>\n",
       "      <td>0.950114</td>\n",
       "      <td>0.963206</td>\n",
       "      <td>0.957418</td>\n",
       "      <td>0.956343</td>\n",
       "      <td>0.965154</td>\n",
       "      <td>0.962381</td>\n",
       "      <td>0.973719</td>\n",
       "      <td>0.978475</td>\n",
       "      <td>0.978318</td>\n",
       "      <td>0.977735</td>\n",
       "      <td>0.981216</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.969075</td>\n",
       "      <td>0.972275</td>\n",
       "      <td>0.966843</td>\n",
       "      <td>0.977263</td>\n",
       "      <td>0.980874</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_14</th>\n",
       "      <td>0.948303</td>\n",
       "      <td>0.953687</td>\n",
       "      <td>0.955320</td>\n",
       "      <td>0.959059</td>\n",
       "      <td>0.956558</td>\n",
       "      <td>0.949685</td>\n",
       "      <td>0.951198</td>\n",
       "      <td>0.960166</td>\n",
       "      <td>0.972987</td>\n",
       "      <td>0.987397</td>\n",
       "      <td>0.963091</td>\n",
       "      <td>0.968885</td>\n",
       "      <td>0.968082</td>\n",
       "      <td>0.969075</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.972104</td>\n",
       "      <td>0.977651</td>\n",
       "      <td>0.980593</td>\n",
       "      <td>0.978070</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_15</th>\n",
       "      <td>0.949407</td>\n",
       "      <td>0.960452</td>\n",
       "      <td>0.955833</td>\n",
       "      <td>0.969456</td>\n",
       "      <td>0.959968</td>\n",
       "      <td>0.951494</td>\n",
       "      <td>0.961519</td>\n",
       "      <td>0.969288</td>\n",
       "      <td>0.978265</td>\n",
       "      <td>0.966143</td>\n",
       "      <td>0.981756</td>\n",
       "      <td>0.965114</td>\n",
       "      <td>0.966762</td>\n",
       "      <td>0.972275</td>\n",
       "      <td>0.972104</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.982537</td>\n",
       "      <td>0.975701</td>\n",
       "      <td>0.980916</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_16</th>\n",
       "      <td>0.948455</td>\n",
       "      <td>0.958069</td>\n",
       "      <td>0.955834</td>\n",
       "      <td>0.963931</td>\n",
       "      <td>0.958002</td>\n",
       "      <td>0.949397</td>\n",
       "      <td>0.955037</td>\n",
       "      <td>0.969771</td>\n",
       "      <td>0.990127</td>\n",
       "      <td>0.969127</td>\n",
       "      <td>0.971032</td>\n",
       "      <td>0.963684</td>\n",
       "      <td>0.963199</td>\n",
       "      <td>0.966843</td>\n",
       "      <td>0.977651</td>\n",
       "      <td>0.982537</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.973746</td>\n",
       "      <td>0.975964</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_17</th>\n",
       "      <td>0.949762</td>\n",
       "      <td>0.955050</td>\n",
       "      <td>0.956771</td>\n",
       "      <td>0.967005</td>\n",
       "      <td>0.959996</td>\n",
       "      <td>0.953315</td>\n",
       "      <td>0.960886</td>\n",
       "      <td>0.959363</td>\n",
       "      <td>0.972071</td>\n",
       "      <td>0.976464</td>\n",
       "      <td>0.969401</td>\n",
       "      <td>0.970969</td>\n",
       "      <td>0.987711</td>\n",
       "      <td>0.977263</td>\n",
       "      <td>0.980593</td>\n",
       "      <td>0.975701</td>\n",
       "      <td>0.973746</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.981802</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_disagreeds_18</th>\n",
       "      <td>0.947914</td>\n",
       "      <td>0.956554</td>\n",
       "      <td>0.954623</td>\n",
       "      <td>0.968558</td>\n",
       "      <td>0.956790</td>\n",
       "      <td>0.948816</td>\n",
       "      <td>0.960835</td>\n",
       "      <td>0.964948</td>\n",
       "      <td>0.971182</td>\n",
       "      <td>0.970646</td>\n",
       "      <td>0.970960</td>\n",
       "      <td>0.968152</td>\n",
       "      <td>0.970934</td>\n",
       "      <td>0.980874</td>\n",
       "      <td>0.978070</td>\n",
       "      <td>0.980916</td>\n",
       "      <td>0.975964</td>\n",
       "      <td>0.981802</td>\n",
       "      <td>1.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                    oofs_disagreeds_0  oofs_disagreeds_1  oofs_disagreeds_2  \\\n",
       "oofs_disagreeds_0            1.000000           0.956195           0.965381   \n",
       "oofs_disagreeds_1            0.956195           1.000000           0.972663   \n",
       "oofs_disagreeds_2            0.965381           0.972663           1.000000   \n",
       "oofs_disagreeds_3            0.967006           0.969680           0.973263   \n",
       "oofs_disagreeds_4            0.970607           0.980875           0.974988   \n",
       "oofs_disagreeds_5            0.973446           0.961191           0.984404   \n",
       "oofs_disagreeds_6            0.969427           0.958854           0.967257   \n",
       "oofs_disagreeds_7            0.947563           0.940807           0.938211   \n",
       "oofs_disagreeds_8            0.953924           0.950593           0.951614   \n",
       "oofs_disagreeds_9            0.953973           0.941773           0.948799   \n",
       "oofs_disagreeds_10           0.953546           0.946255           0.949472   \n",
       "oofs_disagreeds_11           0.955650           0.942006           0.945175   \n",
       "oofs_disagreeds_12           0.952962           0.941758           0.947688   \n",
       "oofs_disagreeds_13           0.952829           0.944460           0.950114   \n",
       "oofs_disagreeds_14           0.948303           0.953687           0.955320   \n",
       "oofs_disagreeds_15           0.949407           0.960452           0.955833   \n",
       "oofs_disagreeds_16           0.948455           0.958069           0.955834   \n",
       "oofs_disagreeds_17           0.949762           0.955050           0.956771   \n",
       "oofs_disagreeds_18           0.947914           0.956554           0.954623   \n",
       "\n",
       "                    oofs_disagreeds_3  oofs_disagreeds_4  oofs_disagreeds_5  \\\n",
       "oofs_disagreeds_0            0.967006           0.970607           0.973446   \n",
       "oofs_disagreeds_1            0.969680           0.980875           0.961191   \n",
       "oofs_disagreeds_2            0.973263           0.974988           0.984404   \n",
       "oofs_disagreeds_3            1.000000           0.973872           0.970303   \n",
       "oofs_disagreeds_4            0.973872           1.000000           0.976251   \n",
       "oofs_disagreeds_5            0.970303           0.976251           1.000000   \n",
       "oofs_disagreeds_6            0.986330           0.972630           0.971418   \n",
       "oofs_disagreeds_7            0.955033           0.948467           0.938822   \n",
       "oofs_disagreeds_8            0.962220           0.960164           0.953536   \n",
       "oofs_disagreeds_9            0.955903           0.957989           0.955377   \n",
       "oofs_disagreeds_10           0.963340           0.960040           0.955229   \n",
       "oofs_disagreeds_11           0.957298           0.955228           0.950740   \n",
       "oofs_disagreeds_12           0.961192           0.957282           0.954774   \n",
       "oofs_disagreeds_13           0.963206           0.957418           0.956343   \n",
       "oofs_disagreeds_14           0.959059           0.956558           0.949685   \n",
       "oofs_disagreeds_15           0.969456           0.959968           0.951494   \n",
       "oofs_disagreeds_16           0.963931           0.958002           0.949397   \n",
       "oofs_disagreeds_17           0.967005           0.959996           0.953315   \n",
       "oofs_disagreeds_18           0.968558           0.956790           0.948816   \n",
       "\n",
       "                    oofs_disagreeds_6  oofs_disagreeds_7  oofs_disagreeds_8  \\\n",
       "oofs_disagreeds_0            0.969427           0.947563           0.953924   \n",
       "oofs_disagreeds_1            0.958854           0.940807           0.950593   \n",
       "oofs_disagreeds_2            0.967257           0.938211           0.951614   \n",
       "oofs_disagreeds_3            0.986330           0.955033           0.962220   \n",
       "oofs_disagreeds_4            0.972630           0.948467           0.960164   \n",
       "oofs_disagreeds_5            0.971418           0.938822           0.953536   \n",
       "oofs_disagreeds_6            1.000000           0.954179           0.960763   \n",
       "oofs_disagreeds_7            0.954179           1.000000           0.975049   \n",
       "oofs_disagreeds_8            0.960763           0.975049           1.000000   \n",
       "oofs_disagreeds_9            0.957241           0.961024           0.976337   \n",
       "oofs_disagreeds_10           0.964684           0.967412           0.977972   \n",
       "oofs_disagreeds_11           0.959270           0.970211           0.971753   \n",
       "oofs_disagreeds_12           0.963397           0.958116           0.971154   \n",
       "oofs_disagreeds_13           0.965154           0.962381           0.973719   \n",
       "oofs_disagreeds_14           0.951198           0.960166           0.972987   \n",
       "oofs_disagreeds_15           0.961519           0.969288           0.978265   \n",
       "oofs_disagreeds_16           0.955037           0.969771           0.990127   \n",
       "oofs_disagreeds_17           0.960886           0.959363           0.972071   \n",
       "oofs_disagreeds_18           0.960835           0.964948           0.971182   \n",
       "\n",
       "                    oofs_disagreeds_9  oofs_disagreeds_10  oofs_disagreeds_11  \\\n",
       "oofs_disagreeds_0            0.953973            0.953546            0.955650   \n",
       "oofs_disagreeds_1            0.941773            0.946255            0.942006   \n",
       "oofs_disagreeds_2            0.948799            0.949472            0.945175   \n",
       "oofs_disagreeds_3            0.955903            0.963340            0.957298   \n",
       "oofs_disagreeds_4            0.957989            0.960040            0.955228   \n",
       "oofs_disagreeds_5            0.955377            0.955229            0.950740   \n",
       "oofs_disagreeds_6            0.957241            0.964684            0.959270   \n",
       "oofs_disagreeds_7            0.961024            0.967412            0.970211   \n",
       "oofs_disagreeds_8            0.976337            0.977972            0.971753   \n",
       "oofs_disagreeds_9            1.000000            0.972073            0.978640   \n",
       "oofs_disagreeds_10           0.972073            1.000000            0.970571   \n",
       "oofs_disagreeds_11           0.978640            0.970571            1.000000   \n",
       "oofs_disagreeds_12           0.978412            0.973727            0.975214   \n",
       "oofs_disagreeds_13           0.978475            0.978318            0.977735   \n",
       "oofs_disagreeds_14           0.987397            0.963091            0.968885   \n",
       "oofs_disagreeds_15           0.966143            0.981756            0.965114   \n",
       "oofs_disagreeds_16           0.969127            0.971032            0.963684   \n",
       "oofs_disagreeds_17           0.976464            0.969401            0.970969   \n",
       "oofs_disagreeds_18           0.970646            0.970960            0.968152   \n",
       "\n",
       "                    oofs_disagreeds_12  oofs_disagreeds_13  \\\n",
       "oofs_disagreeds_0             0.952962            0.952829   \n",
       "oofs_disagreeds_1             0.941758            0.944460   \n",
       "oofs_disagreeds_2             0.947688            0.950114   \n",
       "oofs_disagreeds_3             0.961192            0.963206   \n",
       "oofs_disagreeds_4             0.957282            0.957418   \n",
       "oofs_disagreeds_5             0.954774            0.956343   \n",
       "oofs_disagreeds_6             0.963397            0.965154   \n",
       "oofs_disagreeds_7             0.958116            0.962381   \n",
       "oofs_disagreeds_8             0.971154            0.973719   \n",
       "oofs_disagreeds_9             0.978412            0.978475   \n",
       "oofs_disagreeds_10            0.973727            0.978318   \n",
       "oofs_disagreeds_11            0.975214            0.977735   \n",
       "oofs_disagreeds_12            1.000000            0.981216   \n",
       "oofs_disagreeds_13            0.981216            1.000000   \n",
       "oofs_disagreeds_14            0.968082            0.969075   \n",
       "oofs_disagreeds_15            0.966762            0.972275   \n",
       "oofs_disagreeds_16            0.963199            0.966843   \n",
       "oofs_disagreeds_17            0.987711            0.977263   \n",
       "oofs_disagreeds_18            0.970934            0.980874   \n",
       "\n",
       "                    oofs_disagreeds_14  oofs_disagreeds_15  \\\n",
       "oofs_disagreeds_0             0.948303            0.949407   \n",
       "oofs_disagreeds_1             0.953687            0.960452   \n",
       "oofs_disagreeds_2             0.955320            0.955833   \n",
       "oofs_disagreeds_3             0.959059            0.969456   \n",
       "oofs_disagreeds_4             0.956558            0.959968   \n",
       "oofs_disagreeds_5             0.949685            0.951494   \n",
       "oofs_disagreeds_6             0.951198            0.961519   \n",
       "oofs_disagreeds_7             0.960166            0.969288   \n",
       "oofs_disagreeds_8             0.972987            0.978265   \n",
       "oofs_disagreeds_9             0.987397            0.966143   \n",
       "oofs_disagreeds_10            0.963091            0.981756   \n",
       "oofs_disagreeds_11            0.968885            0.965114   \n",
       "oofs_disagreeds_12            0.968082            0.966762   \n",
       "oofs_disagreeds_13            0.969075            0.972275   \n",
       "oofs_disagreeds_14            1.000000            0.972104   \n",
       "oofs_disagreeds_15            0.972104            1.000000   \n",
       "oofs_disagreeds_16            0.977651            0.982537   \n",
       "oofs_disagreeds_17            0.980593            0.975701   \n",
       "oofs_disagreeds_18            0.978070            0.980916   \n",
       "\n",
       "                    oofs_disagreeds_16  oofs_disagreeds_17  oofs_disagreeds_18  \n",
       "oofs_disagreeds_0             0.948455            0.949762            0.947914  \n",
       "oofs_disagreeds_1             0.958069            0.955050            0.956554  \n",
       "oofs_disagreeds_2             0.955834            0.956771            0.954623  \n",
       "oofs_disagreeds_3             0.963931            0.967005            0.968558  \n",
       "oofs_disagreeds_4             0.958002            0.959996            0.956790  \n",
       "oofs_disagreeds_5             0.949397            0.953315            0.948816  \n",
       "oofs_disagreeds_6             0.955037            0.960886            0.960835  \n",
       "oofs_disagreeds_7             0.969771            0.959363            0.964948  \n",
       "oofs_disagreeds_8             0.990127            0.972071            0.971182  \n",
       "oofs_disagreeds_9             0.969127            0.976464            0.970646  \n",
       "oofs_disagreeds_10            0.971032            0.969401            0.970960  \n",
       "oofs_disagreeds_11            0.963684            0.970969            0.968152  \n",
       "oofs_disagreeds_12            0.963199            0.987711            0.970934  \n",
       "oofs_disagreeds_13            0.966843            0.977263            0.980874  \n",
       "oofs_disagreeds_14            0.977651            0.980593            0.978070  \n",
       "oofs_disagreeds_15            0.982537            0.975701            0.980916  \n",
       "oofs_disagreeds_16            1.000000            0.973746            0.975964  \n",
       "oofs_disagreeds_17            0.973746            1.000000            0.981802  \n",
       "oofs_disagreeds_18            0.975964            0.981802            1.000000  "
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "disagreeds.corr()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>oofs_unrelated_0</th>\n",
       "      <th>oofs_unrelated_1</th>\n",
       "      <th>oofs_unrelated_2</th>\n",
       "      <th>oofs_unrelated_3</th>\n",
       "      <th>oofs_unrelated_4</th>\n",
       "      <th>oofs_unrelated_5</th>\n",
       "      <th>oofs_unrelated_6</th>\n",
       "      <th>oofs_unrelated_7</th>\n",
       "      <th>oofs_unrelated_8</th>\n",
       "      <th>oofs_unrelated_9</th>\n",
       "      <th>oofs_unrelated_10</th>\n",
       "      <th>oofs_unrelated_11</th>\n",
       "      <th>oofs_unrelated_12</th>\n",
       "      <th>oofs_unrelated_13</th>\n",
       "      <th>oofs_unrelated_14</th>\n",
       "      <th>oofs_unrelated_15</th>\n",
       "      <th>oofs_unrelated_16</th>\n",
       "      <th>oofs_unrelated_17</th>\n",
       "      <th>oofs_unrelated_18</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_0</th>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.979483</td>\n",
       "      <td>0.982918</td>\n",
       "      <td>0.982116</td>\n",
       "      <td>0.980407</td>\n",
       "      <td>0.983841</td>\n",
       "      <td>0.982145</td>\n",
       "      <td>0.967810</td>\n",
       "      <td>0.969136</td>\n",
       "      <td>0.971615</td>\n",
       "      <td>0.969398</td>\n",
       "      <td>0.971206</td>\n",
       "      <td>0.969555</td>\n",
       "      <td>0.970029</td>\n",
       "      <td>0.970348</td>\n",
       "      <td>0.969500</td>\n",
       "      <td>0.968974</td>\n",
       "      <td>0.969428</td>\n",
       "      <td>0.970400</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_1</th>\n",
       "      <td>0.979483</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.985805</td>\n",
       "      <td>0.983235</td>\n",
       "      <td>0.993880</td>\n",
       "      <td>0.983194</td>\n",
       "      <td>0.981397</td>\n",
       "      <td>0.970652</td>\n",
       "      <td>0.972543</td>\n",
       "      <td>0.969819</td>\n",
       "      <td>0.972554</td>\n",
       "      <td>0.966131</td>\n",
       "      <td>0.969558</td>\n",
       "      <td>0.970829</td>\n",
       "      <td>0.973004</td>\n",
       "      <td>0.975851</td>\n",
       "      <td>0.975135</td>\n",
       "      <td>0.973627</td>\n",
       "      <td>0.975486</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_2</th>\n",
       "      <td>0.982918</td>\n",
       "      <td>0.985805</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.984631</td>\n",
       "      <td>0.985170</td>\n",
       "      <td>0.992641</td>\n",
       "      <td>0.983242</td>\n",
       "      <td>0.964436</td>\n",
       "      <td>0.969656</td>\n",
       "      <td>0.968727</td>\n",
       "      <td>0.970621</td>\n",
       "      <td>0.965539</td>\n",
       "      <td>0.969039</td>\n",
       "      <td>0.969442</td>\n",
       "      <td>0.971613</td>\n",
       "      <td>0.973276</td>\n",
       "      <td>0.971808</td>\n",
       "      <td>0.972618</td>\n",
       "      <td>0.973165</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_3</th>\n",
       "      <td>0.982116</td>\n",
       "      <td>0.983235</td>\n",
       "      <td>0.984631</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.981340</td>\n",
       "      <td>0.981809</td>\n",
       "      <td>0.994782</td>\n",
       "      <td>0.971740</td>\n",
       "      <td>0.974504</td>\n",
       "      <td>0.972338</td>\n",
       "      <td>0.978712</td>\n",
       "      <td>0.973816</td>\n",
       "      <td>0.977123</td>\n",
       "      <td>0.979705</td>\n",
       "      <td>0.973593</td>\n",
       "      <td>0.981349</td>\n",
       "      <td>0.975971</td>\n",
       "      <td>0.979870</td>\n",
       "      <td>0.982335</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_4</th>\n",
       "      <td>0.980407</td>\n",
       "      <td>0.993880</td>\n",
       "      <td>0.985170</td>\n",
       "      <td>0.981340</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.985285</td>\n",
       "      <td>0.980849</td>\n",
       "      <td>0.969350</td>\n",
       "      <td>0.972134</td>\n",
       "      <td>0.970553</td>\n",
       "      <td>0.971981</td>\n",
       "      <td>0.965403</td>\n",
       "      <td>0.970200</td>\n",
       "      <td>0.970659</td>\n",
       "      <td>0.971251</td>\n",
       "      <td>0.973084</td>\n",
       "      <td>0.973036</td>\n",
       "      <td>0.972064</td>\n",
       "      <td>0.972748</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_5</th>\n",
       "      <td>0.983841</td>\n",
       "      <td>0.983194</td>\n",
       "      <td>0.992641</td>\n",
       "      <td>0.981809</td>\n",
       "      <td>0.985285</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.982544</td>\n",
       "      <td>0.964530</td>\n",
       "      <td>0.968744</td>\n",
       "      <td>0.969367</td>\n",
       "      <td>0.970257</td>\n",
       "      <td>0.964712</td>\n",
       "      <td>0.970017</td>\n",
       "      <td>0.969717</td>\n",
       "      <td>0.968046</td>\n",
       "      <td>0.970220</td>\n",
       "      <td>0.969092</td>\n",
       "      <td>0.969949</td>\n",
       "      <td>0.970316</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_6</th>\n",
       "      <td>0.982145</td>\n",
       "      <td>0.981397</td>\n",
       "      <td>0.983242</td>\n",
       "      <td>0.994782</td>\n",
       "      <td>0.980849</td>\n",
       "      <td>0.982544</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.970761</td>\n",
       "      <td>0.973068</td>\n",
       "      <td>0.971318</td>\n",
       "      <td>0.977646</td>\n",
       "      <td>0.972614</td>\n",
       "      <td>0.976537</td>\n",
       "      <td>0.979104</td>\n",
       "      <td>0.970859</td>\n",
       "      <td>0.978754</td>\n",
       "      <td>0.973566</td>\n",
       "      <td>0.978102</td>\n",
       "      <td>0.979990</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_7</th>\n",
       "      <td>0.967810</td>\n",
       "      <td>0.970652</td>\n",
       "      <td>0.964436</td>\n",
       "      <td>0.971740</td>\n",
       "      <td>0.969350</td>\n",
       "      <td>0.964530</td>\n",
       "      <td>0.970761</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.986337</td>\n",
       "      <td>0.978792</td>\n",
       "      <td>0.983965</td>\n",
       "      <td>0.980390</td>\n",
       "      <td>0.974268</td>\n",
       "      <td>0.978164</td>\n",
       "      <td>0.978848</td>\n",
       "      <td>0.983940</td>\n",
       "      <td>0.986260</td>\n",
       "      <td>0.975830</td>\n",
       "      <td>0.979610</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_8</th>\n",
       "      <td>0.969136</td>\n",
       "      <td>0.972543</td>\n",
       "      <td>0.969656</td>\n",
       "      <td>0.974504</td>\n",
       "      <td>0.972134</td>\n",
       "      <td>0.968744</td>\n",
       "      <td>0.973068</td>\n",
       "      <td>0.986337</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.984056</td>\n",
       "      <td>0.987820</td>\n",
       "      <td>0.979701</td>\n",
       "      <td>0.980506</td>\n",
       "      <td>0.981747</td>\n",
       "      <td>0.983653</td>\n",
       "      <td>0.987454</td>\n",
       "      <td>0.996736</td>\n",
       "      <td>0.981314</td>\n",
       "      <td>0.982084</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_9</th>\n",
       "      <td>0.971615</td>\n",
       "      <td>0.969819</td>\n",
       "      <td>0.968727</td>\n",
       "      <td>0.972338</td>\n",
       "      <td>0.970553</td>\n",
       "      <td>0.969367</td>\n",
       "      <td>0.971318</td>\n",
       "      <td>0.978792</td>\n",
       "      <td>0.984056</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.981037</td>\n",
       "      <td>0.987146</td>\n",
       "      <td>0.986571</td>\n",
       "      <td>0.984970</td>\n",
       "      <td>0.996133</td>\n",
       "      <td>0.979866</td>\n",
       "      <td>0.982942</td>\n",
       "      <td>0.985613</td>\n",
       "      <td>0.983777</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_10</th>\n",
       "      <td>0.969398</td>\n",
       "      <td>0.972554</td>\n",
       "      <td>0.970621</td>\n",
       "      <td>0.978712</td>\n",
       "      <td>0.971981</td>\n",
       "      <td>0.970257</td>\n",
       "      <td>0.977646</td>\n",
       "      <td>0.983965</td>\n",
       "      <td>0.987820</td>\n",
       "      <td>0.981037</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.979647</td>\n",
       "      <td>0.983315</td>\n",
       "      <td>0.986668</td>\n",
       "      <td>0.980061</td>\n",
       "      <td>0.993637</td>\n",
       "      <td>0.987707</td>\n",
       "      <td>0.983753</td>\n",
       "      <td>0.986783</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_11</th>\n",
       "      <td>0.971206</td>\n",
       "      <td>0.966131</td>\n",
       "      <td>0.965539</td>\n",
       "      <td>0.973816</td>\n",
       "      <td>0.965403</td>\n",
       "      <td>0.964712</td>\n",
       "      <td>0.972614</td>\n",
       "      <td>0.980390</td>\n",
       "      <td>0.979701</td>\n",
       "      <td>0.987146</td>\n",
       "      <td>0.979647</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.982623</td>\n",
       "      <td>0.984743</td>\n",
       "      <td>0.985059</td>\n",
       "      <td>0.979124</td>\n",
       "      <td>0.978441</td>\n",
       "      <td>0.982137</td>\n",
       "      <td>0.982855</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_12</th>\n",
       "      <td>0.969555</td>\n",
       "      <td>0.969558</td>\n",
       "      <td>0.969039</td>\n",
       "      <td>0.977123</td>\n",
       "      <td>0.970200</td>\n",
       "      <td>0.970017</td>\n",
       "      <td>0.976537</td>\n",
       "      <td>0.974268</td>\n",
       "      <td>0.980506</td>\n",
       "      <td>0.986571</td>\n",
       "      <td>0.983315</td>\n",
       "      <td>0.982623</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.988826</td>\n",
       "      <td>0.983774</td>\n",
       "      <td>0.981790</td>\n",
       "      <td>0.979319</td>\n",
       "      <td>0.995708</td>\n",
       "      <td>0.986629</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_13</th>\n",
       "      <td>0.970029</td>\n",
       "      <td>0.970829</td>\n",
       "      <td>0.969442</td>\n",
       "      <td>0.979705</td>\n",
       "      <td>0.970659</td>\n",
       "      <td>0.969717</td>\n",
       "      <td>0.979104</td>\n",
       "      <td>0.978164</td>\n",
       "      <td>0.981747</td>\n",
       "      <td>0.984970</td>\n",
       "      <td>0.986668</td>\n",
       "      <td>0.984743</td>\n",
       "      <td>0.988826</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.983028</td>\n",
       "      <td>0.986013</td>\n",
       "      <td>0.981009</td>\n",
       "      <td>0.988582</td>\n",
       "      <td>0.993820</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_14</th>\n",
       "      <td>0.970348</td>\n",
       "      <td>0.973004</td>\n",
       "      <td>0.971613</td>\n",
       "      <td>0.973593</td>\n",
       "      <td>0.971251</td>\n",
       "      <td>0.968046</td>\n",
       "      <td>0.970859</td>\n",
       "      <td>0.978848</td>\n",
       "      <td>0.983653</td>\n",
       "      <td>0.996133</td>\n",
       "      <td>0.980061</td>\n",
       "      <td>0.985059</td>\n",
       "      <td>0.983774</td>\n",
       "      <td>0.983028</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.981758</td>\n",
       "      <td>0.984669</td>\n",
       "      <td>0.986677</td>\n",
       "      <td>0.985435</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_15</th>\n",
       "      <td>0.969500</td>\n",
       "      <td>0.975851</td>\n",
       "      <td>0.973276</td>\n",
       "      <td>0.981349</td>\n",
       "      <td>0.973084</td>\n",
       "      <td>0.970220</td>\n",
       "      <td>0.978754</td>\n",
       "      <td>0.983940</td>\n",
       "      <td>0.987454</td>\n",
       "      <td>0.979866</td>\n",
       "      <td>0.993637</td>\n",
       "      <td>0.979124</td>\n",
       "      <td>0.981790</td>\n",
       "      <td>0.986013</td>\n",
       "      <td>0.981758</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.989113</td>\n",
       "      <td>0.984935</td>\n",
       "      <td>0.988874</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_16</th>\n",
       "      <td>0.968974</td>\n",
       "      <td>0.975135</td>\n",
       "      <td>0.971808</td>\n",
       "      <td>0.975971</td>\n",
       "      <td>0.973036</td>\n",
       "      <td>0.969092</td>\n",
       "      <td>0.973566</td>\n",
       "      <td>0.986260</td>\n",
       "      <td>0.996736</td>\n",
       "      <td>0.982942</td>\n",
       "      <td>0.987707</td>\n",
       "      <td>0.978441</td>\n",
       "      <td>0.979319</td>\n",
       "      <td>0.981009</td>\n",
       "      <td>0.984669</td>\n",
       "      <td>0.989113</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.982159</td>\n",
       "      <td>0.983709</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_17</th>\n",
       "      <td>0.969428</td>\n",
       "      <td>0.973627</td>\n",
       "      <td>0.972618</td>\n",
       "      <td>0.979870</td>\n",
       "      <td>0.972064</td>\n",
       "      <td>0.969949</td>\n",
       "      <td>0.978102</td>\n",
       "      <td>0.975830</td>\n",
       "      <td>0.981314</td>\n",
       "      <td>0.985613</td>\n",
       "      <td>0.983753</td>\n",
       "      <td>0.982137</td>\n",
       "      <td>0.995708</td>\n",
       "      <td>0.988582</td>\n",
       "      <td>0.986677</td>\n",
       "      <td>0.984935</td>\n",
       "      <td>0.982159</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.989607</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>oofs_unrelated_18</th>\n",
       "      <td>0.970400</td>\n",
       "      <td>0.975486</td>\n",
       "      <td>0.973165</td>\n",
       "      <td>0.982335</td>\n",
       "      <td>0.972748</td>\n",
       "      <td>0.970316</td>\n",
       "      <td>0.979990</td>\n",
       "      <td>0.979610</td>\n",
       "      <td>0.982084</td>\n",
       "      <td>0.983777</td>\n",
       "      <td>0.986783</td>\n",
       "      <td>0.982855</td>\n",
       "      <td>0.986629</td>\n",
       "      <td>0.993820</td>\n",
       "      <td>0.985435</td>\n",
       "      <td>0.988874</td>\n",
       "      <td>0.983709</td>\n",
       "      <td>0.989607</td>\n",
       "      <td>1.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                   oofs_unrelated_0  oofs_unrelated_1  oofs_unrelated_2  \\\n",
       "oofs_unrelated_0           1.000000          0.979483          0.982918   \n",
       "oofs_unrelated_1           0.979483          1.000000          0.985805   \n",
       "oofs_unrelated_2           0.982918          0.985805          1.000000   \n",
       "oofs_unrelated_3           0.982116          0.983235          0.984631   \n",
       "oofs_unrelated_4           0.980407          0.993880          0.985170   \n",
       "oofs_unrelated_5           0.983841          0.983194          0.992641   \n",
       "oofs_unrelated_6           0.982145          0.981397          0.983242   \n",
       "oofs_unrelated_7           0.967810          0.970652          0.964436   \n",
       "oofs_unrelated_8           0.969136          0.972543          0.969656   \n",
       "oofs_unrelated_9           0.971615          0.969819          0.968727   \n",
       "oofs_unrelated_10          0.969398          0.972554          0.970621   \n",
       "oofs_unrelated_11          0.971206          0.966131          0.965539   \n",
       "oofs_unrelated_12          0.969555          0.969558          0.969039   \n",
       "oofs_unrelated_13          0.970029          0.970829          0.969442   \n",
       "oofs_unrelated_14          0.970348          0.973004          0.971613   \n",
       "oofs_unrelated_15          0.969500          0.975851          0.973276   \n",
       "oofs_unrelated_16          0.968974          0.975135          0.971808   \n",
       "oofs_unrelated_17          0.969428          0.973627          0.972618   \n",
       "oofs_unrelated_18          0.970400          0.975486          0.973165   \n",
       "\n",
       "                   oofs_unrelated_3  oofs_unrelated_4  oofs_unrelated_5  \\\n",
       "oofs_unrelated_0           0.982116          0.980407          0.983841   \n",
       "oofs_unrelated_1           0.983235          0.993880          0.983194   \n",
       "oofs_unrelated_2           0.984631          0.985170          0.992641   \n",
       "oofs_unrelated_3           1.000000          0.981340          0.981809   \n",
       "oofs_unrelated_4           0.981340          1.000000          0.985285   \n",
       "oofs_unrelated_5           0.981809          0.985285          1.000000   \n",
       "oofs_unrelated_6           0.994782          0.980849          0.982544   \n",
       "oofs_unrelated_7           0.971740          0.969350          0.964530   \n",
       "oofs_unrelated_8           0.974504          0.972134          0.968744   \n",
       "oofs_unrelated_9           0.972338          0.970553          0.969367   \n",
       "oofs_unrelated_10          0.978712          0.971981          0.970257   \n",
       "oofs_unrelated_11          0.973816          0.965403          0.964712   \n",
       "oofs_unrelated_12          0.977123          0.970200          0.970017   \n",
       "oofs_unrelated_13          0.979705          0.970659          0.969717   \n",
       "oofs_unrelated_14          0.973593          0.971251          0.968046   \n",
       "oofs_unrelated_15          0.981349          0.973084          0.970220   \n",
       "oofs_unrelated_16          0.975971          0.973036          0.969092   \n",
       "oofs_unrelated_17          0.979870          0.972064          0.969949   \n",
       "oofs_unrelated_18          0.982335          0.972748          0.970316   \n",
       "\n",
       "                   oofs_unrelated_6  oofs_unrelated_7  oofs_unrelated_8  \\\n",
       "oofs_unrelated_0           0.982145          0.967810          0.969136   \n",
       "oofs_unrelated_1           0.981397          0.970652          0.972543   \n",
       "oofs_unrelated_2           0.983242          0.964436          0.969656   \n",
       "oofs_unrelated_3           0.994782          0.971740          0.974504   \n",
       "oofs_unrelated_4           0.980849          0.969350          0.972134   \n",
       "oofs_unrelated_5           0.982544          0.964530          0.968744   \n",
       "oofs_unrelated_6           1.000000          0.970761          0.973068   \n",
       "oofs_unrelated_7           0.970761          1.000000          0.986337   \n",
       "oofs_unrelated_8           0.973068          0.986337          1.000000   \n",
       "oofs_unrelated_9           0.971318          0.978792          0.984056   \n",
       "oofs_unrelated_10          0.977646          0.983965          0.987820   \n",
       "oofs_unrelated_11          0.972614          0.980390          0.979701   \n",
       "oofs_unrelated_12          0.976537          0.974268          0.980506   \n",
       "oofs_unrelated_13          0.979104          0.978164          0.981747   \n",
       "oofs_unrelated_14          0.970859          0.978848          0.983653   \n",
       "oofs_unrelated_15          0.978754          0.983940          0.987454   \n",
       "oofs_unrelated_16          0.973566          0.986260          0.996736   \n",
       "oofs_unrelated_17          0.978102          0.975830          0.981314   \n",
       "oofs_unrelated_18          0.979990          0.979610          0.982084   \n",
       "\n",
       "                   oofs_unrelated_9  oofs_unrelated_10  oofs_unrelated_11  \\\n",
       "oofs_unrelated_0           0.971615           0.969398           0.971206   \n",
       "oofs_unrelated_1           0.969819           0.972554           0.966131   \n",
       "oofs_unrelated_2           0.968727           0.970621           0.965539   \n",
       "oofs_unrelated_3           0.972338           0.978712           0.973816   \n",
       "oofs_unrelated_4           0.970553           0.971981           0.965403   \n",
       "oofs_unrelated_5           0.969367           0.970257           0.964712   \n",
       "oofs_unrelated_6           0.971318           0.977646           0.972614   \n",
       "oofs_unrelated_7           0.978792           0.983965           0.980390   \n",
       "oofs_unrelated_8           0.984056           0.987820           0.979701   \n",
       "oofs_unrelated_9           1.000000           0.981037           0.987146   \n",
       "oofs_unrelated_10          0.981037           1.000000           0.979647   \n",
       "oofs_unrelated_11          0.987146           0.979647           1.000000   \n",
       "oofs_unrelated_12          0.986571           0.983315           0.982623   \n",
       "oofs_unrelated_13          0.984970           0.986668           0.984743   \n",
       "oofs_unrelated_14          0.996133           0.980061           0.985059   \n",
       "oofs_unrelated_15          0.979866           0.993637           0.979124   \n",
       "oofs_unrelated_16          0.982942           0.987707           0.978441   \n",
       "oofs_unrelated_17          0.985613           0.983753           0.982137   \n",
       "oofs_unrelated_18          0.983777           0.986783           0.982855   \n",
       "\n",
       "                   oofs_unrelated_12  oofs_unrelated_13  oofs_unrelated_14  \\\n",
       "oofs_unrelated_0            0.969555           0.970029           0.970348   \n",
       "oofs_unrelated_1            0.969558           0.970829           0.973004   \n",
       "oofs_unrelated_2            0.969039           0.969442           0.971613   \n",
       "oofs_unrelated_3            0.977123           0.979705           0.973593   \n",
       "oofs_unrelated_4            0.970200           0.970659           0.971251   \n",
       "oofs_unrelated_5            0.970017           0.969717           0.968046   \n",
       "oofs_unrelated_6            0.976537           0.979104           0.970859   \n",
       "oofs_unrelated_7            0.974268           0.978164           0.978848   \n",
       "oofs_unrelated_8            0.980506           0.981747           0.983653   \n",
       "oofs_unrelated_9            0.986571           0.984970           0.996133   \n",
       "oofs_unrelated_10           0.983315           0.986668           0.980061   \n",
       "oofs_unrelated_11           0.982623           0.984743           0.985059   \n",
       "oofs_unrelated_12           1.000000           0.988826           0.983774   \n",
       "oofs_unrelated_13           0.988826           1.000000           0.983028   \n",
       "oofs_unrelated_14           0.983774           0.983028           1.000000   \n",
       "oofs_unrelated_15           0.981790           0.986013           0.981758   \n",
       "oofs_unrelated_16           0.979319           0.981009           0.984669   \n",
       "oofs_unrelated_17           0.995708           0.988582           0.986677   \n",
       "oofs_unrelated_18           0.986629           0.993820           0.985435   \n",
       "\n",
       "                   oofs_unrelated_15  oofs_unrelated_16  oofs_unrelated_17  \\\n",
       "oofs_unrelated_0            0.969500           0.968974           0.969428   \n",
       "oofs_unrelated_1            0.975851           0.975135           0.973627   \n",
       "oofs_unrelated_2            0.973276           0.971808           0.972618   \n",
       "oofs_unrelated_3            0.981349           0.975971           0.979870   \n",
       "oofs_unrelated_4            0.973084           0.973036           0.972064   \n",
       "oofs_unrelated_5            0.970220           0.969092           0.969949   \n",
       "oofs_unrelated_6            0.978754           0.973566           0.978102   \n",
       "oofs_unrelated_7            0.983940           0.986260           0.975830   \n",
       "oofs_unrelated_8            0.987454           0.996736           0.981314   \n",
       "oofs_unrelated_9            0.979866           0.982942           0.985613   \n",
       "oofs_unrelated_10           0.993637           0.987707           0.983753   \n",
       "oofs_unrelated_11           0.979124           0.978441           0.982137   \n",
       "oofs_unrelated_12           0.981790           0.979319           0.995708   \n",
       "oofs_unrelated_13           0.986013           0.981009           0.988582   \n",
       "oofs_unrelated_14           0.981758           0.984669           0.986677   \n",
       "oofs_unrelated_15           1.000000           0.989113           0.984935   \n",
       "oofs_unrelated_16           0.989113           1.000000           0.982159   \n",
       "oofs_unrelated_17           0.984935           0.982159           1.000000   \n",
       "oofs_unrelated_18           0.988874           0.983709           0.989607   \n",
       "\n",
       "                   oofs_unrelated_18  \n",
       "oofs_unrelated_0            0.970400  \n",
       "oofs_unrelated_1            0.975486  \n",
       "oofs_unrelated_2            0.973165  \n",
       "oofs_unrelated_3            0.982335  \n",
       "oofs_unrelated_4            0.972748  \n",
       "oofs_unrelated_5            0.970316  \n",
       "oofs_unrelated_6            0.979990  \n",
       "oofs_unrelated_7            0.979610  \n",
       "oofs_unrelated_8            0.982084  \n",
       "oofs_unrelated_9            0.983777  \n",
       "oofs_unrelated_10           0.986783  \n",
       "oofs_unrelated_11           0.982855  \n",
       "oofs_unrelated_12           0.986629  \n",
       "oofs_unrelated_13           0.993820  \n",
       "oofs_unrelated_14           0.985435  \n",
       "oofs_unrelated_15           0.988874  \n",
       "oofs_unrelated_16           0.983709  \n",
       "oofs_unrelated_17           0.989607  \n",
       "oofs_unrelated_18           1.000000  "
      ]
     },
     "execution_count": 12,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "unrelated.corr()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Prepare Different Inputs"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# Only use oofs\n",
    "ensemble_trains = trains.values\n",
    "ensemble_tests = tests.values"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Ensemble With NN"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "########################################\n",
    "## import packages\n",
    "########################################\n",
    "import os\n",
    "import re\n",
    "import csv\n",
    "import codecs\n",
    "import numpy as np\n",
    "np.random.seed(1337)\n",
    "\n",
    "import pandas as pd\n",
    "import operator\n",
    "import sys\n",
    "\n",
    "from string import punctuation\n",
    "from keras.preprocessing.text import Tokenizer\n",
    "from keras.preprocessing.sequence import pad_sequences\n",
    "\n",
    "from iwillwin.trainer.supervised_trainer import KerasModelTrainer\n",
    "from iwillwin.data_utils.data_helpers import DataTransformer, DataLoader\n",
    "from iwillwin.model.sim_zoos import *\n",
    "import tensorflow as tf\n",
    "from keras.layers import Dense, Input, MaxPooling1D, CuDNNLSTM, Embedding, Add, Lambda, Dropout, Activation, SpatialDropout1D, Reshape, GlobalAveragePooling1D, merge, Flatten, Bidirectional, CuDNNGRU, add, Conv1D, GlobalMaxPooling1D\n",
    "from keras.layers.merge import concatenate\n",
    "from keras.models import Model\n",
    "from keras import optimizers\n",
    "from keras import initializers\n",
    "from keras.engine import InputSpec, Layer\n",
    "from iwillwin.config import dataset_config, model_config\n",
    "from keras.models import Sequential\n",
    "from keras.layers.embeddings import Embedding\n",
    "from keras.layers.core import Lambda, Dense, Dropout\n",
    "from keras.layers.recurrent import LSTM, GRU\n",
    "from keras.layers.wrappers import Bidirectional\n",
    "from keras.legacy.layers import Highway\n",
    "from keras.layers import TimeDistributed\n",
    "from keras.layers.normalization import BatchNormalization\n",
    "import keras.backend as K\n",
    "\n",
    "from sklearn.metrics import roc_auc_score, log_loss\n",
    "from keras.callbacks import EarlyStopping, ModelCheckpoint\n",
    "from sklearn.metrics import log_loss"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from keras import regularizers\n",
    "\n",
    "def get_dense_add_net(feature_nums):\n",
    "    features_inputs = Input(shape=(feature_nums,), name='mata-features', dtype=\"float32\")\n",
    "    features = features_inputs\n",
    "    \n",
    "    depth = 5\n",
    "    for i in range(depth):\n",
    "        new_features = Dense(24, activation='relu')(features)\n",
    "        new_features = Dropout(0.1)(new_features)\n",
    "        features = Concatenate()([features, new_features])\n",
    "\n",
    "    h = Highway(activation='relu')(features)\n",
    "    out_ = Dense(3, activation='softmax')(h)\n",
    "    \n",
    "    model = Model(inputs=[features_inputs], outputs=out_)\n",
    "    model.compile(optimizer=Adam(lr=1e-3, decay=1e-6,), loss='categorical_crossentropy',\n",
    "    metrics=['accuracy', weighted_accuracy])\n",
    "    model.summary()\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def get_logit_net(feature_nums):\n",
    "    features_inputs = Input(shape=(feature_nums,), name='mata-features', dtype=\"float32\")    \n",
    "    out_ = Dense(3, activation='softmax')(features_inputs)\n",
    "    model = Model(inputs=[features_inputs], outputs=out_)\n",
    "    model.compile(optimizer=Adam(lr=1e-3, decay=1e-6,), loss='categorical_crossentropy',\n",
    "    metrics=['accuracy', weighted_accuracy])\n",
    "    model.summary()\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import pandas as pd\n",
    "import torch\n",
    "import torch.nn.functional as F\n",
    "import importlib\n",
    "\n",
    "from sklearn.metrics import roc_auc_score, log_loss\n",
    "from keras.callbacks import EarlyStopping, ModelCheckpoint\n",
    "\n",
    "from iwillwin.config import model_config\n",
    "\n",
    "class ModelTrainer(object):\n",
    "\n",
    "    def __init__(self, model_stamp, epoch_num, learning_rate=1e-3,\n",
    "                 shuffle_inputs=False, verbose_round=40, early_stopping_round=8):\n",
    "        self.models = []\n",
    "        self.model_stamp = model_stamp\n",
    "        self.val_loss = -1\n",
    "        self.auc = -1\n",
    "        self.epoch_num = epoch_num\n",
    "        self.learning_rate = learning_rate\n",
    "        self.eps = 1e-10\n",
    "        self.verbose_round = verbose_round\n",
    "        self.early_stopping_round = early_stopping_round\n",
    "        self.shuffle_inputs = shuffle_inputs\n",
    "        self.class_weight = [0.93, 1.21]\n",
    "\n",
    "    def train_folds(self, features, y, fold_count, batch_size, get_model_func, augments=None, skip_fold=0, patience=10, scale_sample_weight=False,\n",
    "                    class_weight=None, self_aware=False, swap_input=False, early_stop_on=None):\n",
    "        weight_val=scale_sample_weight\n",
    "        fold_size = len(features) // fold_count\n",
    "        models = []\n",
    "        fold_predictions = []\n",
    "        score = 0\n",
    "\n",
    "        for fold_id in range(0, fold_count):\n",
    "            fold_start = fold_size * fold_id\n",
    "            fold_end = fold_start + fold_size\n",
    "\n",
    "            if fold_id == fold_count - 1:\n",
    "                fold_end = len(features)\n",
    "\n",
    "            train_features = np.concatenate([features[:fold_start], features[fold_end:]])\n",
    "            train_y = np.concatenate([y[:fold_start], y[fold_end:]])\n",
    "            \n",
    "            val_features = features[fold_start:fold_end]\n",
    "            val_y = y[fold_start:fold_end]\n",
    "            fold_pos = (np.sum(train_y) / len(train_features))\n",
    "\n",
    "            train_data = {\n",
    "                \"mata-features\": train_features,\n",
    "            }\n",
    "\n",
    "            val_data = {\n",
    "                \"mata-features\": val_features,\n",
    "            }\n",
    "\n",
    "            model, bst_val_score, fold_prediction = self._train_model_by_logloss(\n",
    "                get_model_func(), batch_size, train_data, train_y, val_data, val_y, fold_id,\n",
    "                patience, class_weight, early_stop_on, weight_val=None)\n",
    "    \n",
    "            score += bst_val_score\n",
    "            models.append(model)\n",
    "            fold_predictions.append(fold_prediction)\n",
    "\n",
    "        self.models = models\n",
    "        self.val_loss = score / fold_count\n",
    "        return models, self.val_loss, fold_predictions\n",
    "\n",
    "    def _train_model_by_logloss(self, model, batch_size, train_x, train_y, val_x, val_y, fold_id, patience):\n",
    "        # return a list which holds [models, val_loss, auc, prediction]\n",
    "        raise NotImplementedError\n",
    "\n",
    "class KerasModelTrainer(ModelTrainer):\n",
    "\n",
    "    def __init__(self, *args, **kwargs):\n",
    "        super(KerasModelTrainer, self).__init__(*args, **kwargs)\n",
    "        pass\n",
    "\n",
    "    def _train_model_by_logloss(self, model, batch_size, train_x, train_y, val_x, val_y, fold_id, patience, class_weight, early_stop_on, weight_val):\n",
    "        early_stopping = EarlyStopping(monitor=early_stop_on, patience=10)\n",
    "        bst_model_path = self.model_stamp + str(fold_id) + '.h5'\n",
    "        val_data = (val_x, val_y, weight_val) if weight_val is not None else (val_x, val_y)\n",
    "        model_checkpoint = ModelCheckpoint(bst_model_path, save_best_only=True, save_weights_only=True)\n",
    "        hist = model.fit(train_x, train_y,\n",
    "                         validation_data=val_data,\n",
    "                         epochs=self.epoch_num, batch_size=batch_size, shuffle=True,\n",
    "                         verbose=2, class_weight=class_weight,\n",
    "                         callbacks=[early_stopping, model_checkpoint],)\n",
    "        bst_val_score = max(hist.history['val_weighted_accuracy'])\n",
    "        model.load_weights(bst_model_path)\n",
    "        predictions = model.predict(val_x)\n",
    "\n",
    "        return model, bst_val_score, predictions"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def _agent_get_model():\n",
    "    return get_dense_add_net(ensemble_trains.shape[1])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def np_weighted_accuracy(y_true, y_pred):\n",
    "    weight = np.array([[1/16, 1/15, 1/5]])\n",
    "    norm = [(1/16) + (1/15) + (1/5)]\n",
    "    weight_mask = weight * y_true\n",
    "    weight_mask = np.max(weight_mask, axis=-1)\n",
    "    norms = np.sum(weight_mask)\n",
    "    \n",
    "    y_true = np.argmax(y_true, axis=-1)\n",
    "    y_pred = np.argmax(y_pred, axis=-1)\n",
    "    \n",
    "    res = ((y_true == y_pred) * weight_mask).sum() / norms\n",
    "    return res"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def weighted_accuracy(y_true, y_pred):\n",
    "    weight = np.array([[1/16, 1/15, 1/5]])\n",
    "    norm = [(1/16) + (1/15) + (1/5)]\n",
    "    weight_mask = weight * y_true\n",
    "    label_weights = K.max(K.cast(weight_mask, 'float32'), axis=-1)\n",
    "    \n",
    "    true_label = K.argmax(y_true, axis=-1)\n",
    "    pred_label = K.argmax(y_pred, axis=-1)\n",
    "    \n",
    "    res = K.cast(K.equal(true_label, pred_label), tf.float32) * label_weights / K.sum(label_weights)\n",
    "    res = K.sum(res)\n",
    "    return res"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Scale, Early Stopping on WACC"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\zake7\\Anaconda3\\lib\\site-packages\\keras\\legacy\\layers.py:198: UserWarning: The `Highway` layer is deprecated and will be removed after 06/2017.\n",
      "  warnings.warn('The `Highway` layer is deprecated '\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_1 (Dense)                 (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_1 (Dropout)             (None, 24)           0           dense_1[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_1 (Concatenate)     (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_1[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_2 (Dense)                 (None, 24)           1968        concatenate_1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_2 (Dropout)             (None, 24)           0           dense_2[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_2 (Concatenate)     (None, 105)          0           concatenate_1[0][0]              \n",
      "                                                                 dropout_2[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_3 (Dense)                 (None, 24)           2544        concatenate_2[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_3 (Dropout)             (None, 24)           0           dense_3[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_3 (Concatenate)     (None, 129)          0           concatenate_2[0][0]              \n",
      "                                                                 dropout_3[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_4 (Dense)                 (None, 24)           3120        concatenate_3[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_4 (Dropout)             (None, 24)           0           dense_4[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_4 (Concatenate)     (None, 153)          0           concatenate_3[0][0]              \n",
      "                                                                 dropout_4[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_5 (Dense)                 (None, 24)           3696        concatenate_4[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_5 (Dropout)             (None, 24)           0           dense_5[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_5 (Concatenate)     (None, 177)          0           concatenate_4[0][0]              \n",
      "                                                                 dropout_5[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "highway_1 (Highway)             (None, 177)          63012       concatenate_5[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dense_6 (Dense)                 (None, 3)            534         highway_1[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 6s - loss: 0.0198 - acc: 0.8844 - weighted_accuracy: 0.8763 - val_loss: 0.2644 - val_acc: 0.8876 - val_weighted_accuracy: 0.8796\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8858 - weighted_accuracy: 0.8782 - val_loss: 0.2572 - val_acc: 0.8893 - val_weighted_accuracy: 0.8788\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8858 - weighted_accuracy: 0.8782 - val_loss: 0.2638 - val_acc: 0.8863 - val_weighted_accuracy: 0.8776\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8859 - weighted_accuracy: 0.8782 - val_loss: 0.2577 - val_acc: 0.8888 - val_weighted_accuracy: 0.8797\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8861 - weighted_accuracy: 0.8785 - val_loss: 0.2620 - val_acc: 0.8873 - val_weighted_accuracy: 0.8783\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8863 - weighted_accuracy: 0.8787 - val_loss: 0.2595 - val_acc: 0.8883 - val_weighted_accuracy: 0.8791\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8866 - weighted_accuracy: 0.8790 - val_loss: 0.2595 - val_acc: 0.8891 - val_weighted_accuracy: 0.8801\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8865 - weighted_accuracy: 0.8789 - val_loss: 0.2652 - val_acc: 0.8861 - val_weighted_accuracy: 0.8789\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8868 - weighted_accuracy: 0.8791 - val_loss: 0.2641 - val_acc: 0.8860 - val_weighted_accuracy: 0.8787\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8870 - weighted_accuracy: 0.8794 - val_loss: 0.2613 - val_acc: 0.8879 - val_weighted_accuracy: 0.8787\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8871 - weighted_accuracy: 0.8794 - val_loss: 0.2575 - val_acc: 0.8894 - val_weighted_accuracy: 0.8794\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8869 - weighted_accuracy: 0.8791 - val_loss: 0.2606 - val_acc: 0.8884 - val_weighted_accuracy: 0.8795\n",
      "Epoch 13/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8871 - weighted_accuracy: 0.8793 - val_loss: 0.2639 - val_acc: 0.8871 - val_weighted_accuracy: 0.8786\n",
      "Epoch 14/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8873 - weighted_accuracy: 0.8794 - val_loss: 0.2653 - val_acc: 0.8866 - val_weighted_accuracy: 0.8794\n",
      "Epoch 15/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8875 - weighted_accuracy: 0.8798 - val_loss: 0.2650 - val_acc: 0.8870 - val_weighted_accuracy: 0.8791\n",
      "Epoch 16/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8874 - weighted_accuracy: 0.8797 - val_loss: 0.2650 - val_acc: 0.8856 - val_weighted_accuracy: 0.8779\n",
      "Epoch 17/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8874 - weighted_accuracy: 0.8796 - val_loss: 0.2592 - val_acc: 0.8885 - val_weighted_accuracy: 0.8793\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_7 (Dense)                 (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_6 (Dropout)             (None, 24)           0           dense_7[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_6 (Concatenate)     (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_6[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_8 (Dense)                 (None, 24)           1968        concatenate_6[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_7 (Dropout)             (None, 24)           0           dense_8[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_7 (Concatenate)     (None, 105)          0           concatenate_6[0][0]              \n",
      "                                                                 dropout_7[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_9 (Dense)                 (None, 24)           2544        concatenate_7[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_8 (Dropout)             (None, 24)           0           dense_9[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_8 (Concatenate)     (None, 129)          0           concatenate_7[0][0]              \n",
      "                                                                 dropout_8[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_10 (Dense)                (None, 24)           3120        concatenate_8[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_9 (Dropout)             (None, 24)           0           dense_10[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_9 (Concatenate)     (None, 153)          0           concatenate_8[0][0]              \n",
      "                                                                 dropout_9[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_11 (Dense)                (None, 24)           3696        concatenate_9[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_10 (Dropout)            (None, 24)           0           dense_11[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_10 (Concatenate)    (None, 177)          0           concatenate_9[0][0]              \n",
      "                                                                 dropout_10[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_2 (Highway)             (None, 177)          63012       concatenate_10[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_12 (Dense)                (None, 3)            534         highway_2[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 3s - loss: 0.0206 - acc: 0.8785 - weighted_accuracy: 0.8700 - val_loss: 0.2423 - val_acc: 0.8943 - val_weighted_accuracy: 0.8885\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.0191 - acc: 0.8851 - weighted_accuracy: 0.8772 - val_loss: 0.2383 - val_acc: 0.8951 - val_weighted_accuracy: 0.8891\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.0190 - acc: 0.8850 - weighted_accuracy: 0.8772 - val_loss: 0.2426 - val_acc: 0.8913 - val_weighted_accuracy: 0.8867\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.0190 - acc: 0.8854 - weighted_accuracy: 0.8776 - val_loss: 0.2438 - val_acc: 0.8933 - val_weighted_accuracy: 0.8881\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.0190 - acc: 0.8854 - weighted_accuracy: 0.8776 - val_loss: 0.2354 - val_acc: 0.8953 - val_weighted_accuracy: 0.8885\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.0190 - acc: 0.8853 - weighted_accuracy: 0.8773 - val_loss: 0.2359 - val_acc: 0.8948 - val_weighted_accuracy: 0.8885\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8856 - weighted_accuracy: 0.8777 - val_loss: 0.2376 - val_acc: 0.8952 - val_weighted_accuracy: 0.8881\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8855 - weighted_accuracy: 0.8775 - val_loss: 0.2358 - val_acc: 0.8952 - val_weighted_accuracy: 0.8881\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8853 - weighted_accuracy: 0.8773 - val_loss: 0.2416 - val_acc: 0.8935 - val_weighted_accuracy: 0.8877\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8859 - weighted_accuracy: 0.8781 - val_loss: 0.2373 - val_acc: 0.8949 - val_weighted_accuracy: 0.8878\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8859 - weighted_accuracy: 0.8780 - val_loss: 0.2359 - val_acc: 0.8949 - val_weighted_accuracy: 0.8877\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8860 - weighted_accuracy: 0.8781 - val_loss: 0.2366 - val_acc: 0.8957 - val_weighted_accuracy: 0.8881\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_13 (Dense)                (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_11 (Dropout)            (None, 24)           0           dense_13[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_11 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_11[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_14 (Dense)                (None, 24)           1968        concatenate_11[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_12 (Dropout)            (None, 24)           0           dense_14[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_12 (Concatenate)    (None, 105)          0           concatenate_11[0][0]             \n",
      "                                                                 dropout_12[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_15 (Dense)                (None, 24)           2544        concatenate_12[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_13 (Dropout)            (None, 24)           0           dense_15[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_13 (Concatenate)    (None, 129)          0           concatenate_12[0][0]             \n",
      "                                                                 dropout_13[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_16 (Dense)                (None, 24)           3120        concatenate_13[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_14 (Dropout)            (None, 24)           0           dense_16[0][0]                   \n",
      "__________________________________________________________________________________________________\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "concatenate_14 (Concatenate)    (None, 153)          0           concatenate_13[0][0]             \n",
      "                                                                 dropout_14[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_17 (Dense)                (None, 24)           3696        concatenate_14[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_15 (Dropout)            (None, 24)           0           dense_17[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_15 (Concatenate)    (None, 177)          0           concatenate_14[0][0]             \n",
      "                                                                 dropout_15[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_3 (Highway)             (None, 177)          63012       concatenate_15[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_18 (Dense)                (None, 3)            534         highway_3[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 3s - loss: 0.0199 - acc: 0.8830 - weighted_accuracy: 0.8745 - val_loss: 0.2577 - val_acc: 0.8879 - val_weighted_accuracy: 0.8776\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8863 - weighted_accuracy: 0.8782 - val_loss: 0.2592 - val_acc: 0.8869 - val_weighted_accuracy: 0.8786\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8863 - weighted_accuracy: 0.8782 - val_loss: 0.2565 - val_acc: 0.8875 - val_weighted_accuracy: 0.8781\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8865 - weighted_accuracy: 0.8784 - val_loss: 0.2641 - val_acc: 0.8843 - val_weighted_accuracy: 0.8769\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8867 - weighted_accuracy: 0.8786 - val_loss: 0.2571 - val_acc: 0.8879 - val_weighted_accuracy: 0.8777\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8871 - weighted_accuracy: 0.8791 - val_loss: 0.2567 - val_acc: 0.8879 - val_weighted_accuracy: 0.8776\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8869 - weighted_accuracy: 0.8788 - val_loss: 0.2576 - val_acc: 0.8872 - val_weighted_accuracy: 0.8779\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8870 - weighted_accuracy: 0.8789 - val_loss: 0.2578 - val_acc: 0.8877 - val_weighted_accuracy: 0.8785\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8868 - weighted_accuracy: 0.8788 - val_loss: 0.2560 - val_acc: 0.8870 - val_weighted_accuracy: 0.8771\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8874 - weighted_accuracy: 0.8793 - val_loss: 0.2609 - val_acc: 0.8868 - val_weighted_accuracy: 0.8784\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8872 - weighted_accuracy: 0.8791 - val_loss: 0.2569 - val_acc: 0.8869 - val_weighted_accuracy: 0.8779\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8874 - weighted_accuracy: 0.8795 - val_loss: 0.2607 - val_acc: 0.8852 - val_weighted_accuracy: 0.8770\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_19 (Dense)                (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_16 (Dropout)            (None, 24)           0           dense_19[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_16 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_16[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_20 (Dense)                (None, 24)           1968        concatenate_16[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_17 (Dropout)            (None, 24)           0           dense_20[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_17 (Concatenate)    (None, 105)          0           concatenate_16[0][0]             \n",
      "                                                                 dropout_17[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_21 (Dense)                (None, 24)           2544        concatenate_17[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_18 (Dropout)            (None, 24)           0           dense_21[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_18 (Concatenate)    (None, 129)          0           concatenate_17[0][0]             \n",
      "                                                                 dropout_18[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_22 (Dense)                (None, 24)           3120        concatenate_18[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_19 (Dropout)            (None, 24)           0           dense_22[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_19 (Concatenate)    (None, 153)          0           concatenate_18[0][0]             \n",
      "                                                                 dropout_19[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_23 (Dense)                (None, 24)           3696        concatenate_19[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_20 (Dropout)            (None, 24)           0           dense_23[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_20 (Concatenate)    (None, 177)          0           concatenate_19[0][0]             \n",
      "                                                                 dropout_20[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_4 (Highway)             (None, 177)          63012       concatenate_20[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_24 (Dense)                (None, 3)            534         highway_4[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 3s - loss: 0.0209 - acc: 0.8747 - weighted_accuracy: 0.8670 - val_loss: 0.2745 - val_acc: 0.8779 - val_weighted_accuracy: 0.8718\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8871 - weighted_accuracy: 0.8788 - val_loss: 0.2766 - val_acc: 0.8781 - val_weighted_accuracy: 0.8723\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 3/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8875 - weighted_accuracy: 0.8793 - val_loss: 0.2747 - val_acc: 0.8788 - val_weighted_accuracy: 0.8725\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8874 - weighted_accuracy: 0.8794 - val_loss: 0.2764 - val_acc: 0.8779 - val_weighted_accuracy: 0.8720\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8871 - weighted_accuracy: 0.8793 - val_loss: 0.2797 - val_acc: 0.8767 - val_weighted_accuracy: 0.8725\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8869 - weighted_accuracy: 0.8788 - val_loss: 0.2760 - val_acc: 0.8773 - val_weighted_accuracy: 0.8726\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8875 - weighted_accuracy: 0.8795 - val_loss: 0.2696 - val_acc: 0.8801 - val_weighted_accuracy: 0.8727\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8875 - weighted_accuracy: 0.8795 - val_loss: 0.2765 - val_acc: 0.8779 - val_weighted_accuracy: 0.8724\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8874 - weighted_accuracy: 0.8795 - val_loss: 0.2665 - val_acc: 0.8814 - val_weighted_accuracy: 0.8730\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8876 - weighted_accuracy: 0.8796 - val_loss: 0.2829 - val_acc: 0.8754 - val_weighted_accuracy: 0.8722\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8875 - weighted_accuracy: 0.8795 - val_loss: 0.2737 - val_acc: 0.8786 - val_weighted_accuracy: 0.8724\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8877 - weighted_accuracy: 0.8797 - val_loss: 0.2718 - val_acc: 0.8791 - val_weighted_accuracy: 0.8720\n",
      "Epoch 13/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8881 - weighted_accuracy: 0.8801 - val_loss: 0.2721 - val_acc: 0.8789 - val_weighted_accuracy: 0.8725\n",
      "Epoch 14/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8882 - weighted_accuracy: 0.8801 - val_loss: 0.2723 - val_acc: 0.8785 - val_weighted_accuracy: 0.8726\n",
      "Epoch 15/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8879 - weighted_accuracy: 0.8799 - val_loss: 0.2694 - val_acc: 0.8814 - val_weighted_accuracy: 0.8734\n",
      "Epoch 16/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8882 - weighted_accuracy: 0.8801 - val_loss: 0.2755 - val_acc: 0.8777 - val_weighted_accuracy: 0.8727\n",
      "Epoch 17/500\n",
      " - 2s - loss: 0.0184 - acc: 0.8884 - weighted_accuracy: 0.8804 - val_loss: 0.2698 - val_acc: 0.8805 - val_weighted_accuracy: 0.8728\n",
      "Epoch 18/500\n",
      " - 2s - loss: 0.0184 - acc: 0.8881 - weighted_accuracy: 0.8799 - val_loss: 0.2807 - val_acc: 0.8775 - val_weighted_accuracy: 0.8726\n",
      "Epoch 19/500\n",
      " - 2s - loss: 0.0184 - acc: 0.8884 - weighted_accuracy: 0.8803 - val_loss: 0.2731 - val_acc: 0.8800 - val_weighted_accuracy: 0.8725\n",
      "Epoch 20/500\n",
      " - 2s - loss: 0.0184 - acc: 0.8886 - weighted_accuracy: 0.8804 - val_loss: 0.2704 - val_acc: 0.8814 - val_weighted_accuracy: 0.8725\n",
      "Epoch 21/500\n",
      " - 2s - loss: 0.0184 - acc: 0.8886 - weighted_accuracy: 0.8804 - val_loss: 0.2690 - val_acc: 0.8813 - val_weighted_accuracy: 0.8723\n",
      "Epoch 22/500\n",
      " - 2s - loss: 0.0184 - acc: 0.8888 - weighted_accuracy: 0.8806 - val_loss: 0.2833 - val_acc: 0.8743 - val_weighted_accuracy: 0.8708\n",
      "Epoch 23/500\n",
      " - 2s - loss: 0.0184 - acc: 0.8889 - weighted_accuracy: 0.8809 - val_loss: 0.2829 - val_acc: 0.8769 - val_weighted_accuracy: 0.8727\n",
      "Epoch 24/500\n",
      " - 2s - loss: 0.0183 - acc: 0.8890 - weighted_accuracy: 0.8810 - val_loss: 0.2697 - val_acc: 0.8807 - val_weighted_accuracy: 0.8717\n",
      "Epoch 25/500\n",
      " - 2s - loss: 0.0183 - acc: 0.8895 - weighted_accuracy: 0.8816 - val_loss: 0.2732 - val_acc: 0.8806 - val_weighted_accuracy: 0.8726\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_25 (Dense)                (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_21 (Dropout)            (None, 24)           0           dense_25[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_21 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_21[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_26 (Dense)                (None, 24)           1968        concatenate_21[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_22 (Dropout)            (None, 24)           0           dense_26[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_22 (Concatenate)    (None, 105)          0           concatenate_21[0][0]             \n",
      "                                                                 dropout_22[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_27 (Dense)                (None, 24)           2544        concatenate_22[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_23 (Dropout)            (None, 24)           0           dense_27[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_23 (Concatenate)    (None, 129)          0           concatenate_22[0][0]             \n",
      "                                                                 dropout_23[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_28 (Dense)                (None, 24)           3120        concatenate_23[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_24 (Dropout)            (None, 24)           0           dense_28[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_24 (Concatenate)    (None, 153)          0           concatenate_23[0][0]             \n",
      "                                                                 dropout_24[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_29 (Dense)                (None, 24)           3696        concatenate_24[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_25 (Dropout)            (None, 24)           0           dense_29[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_25 (Concatenate)    (None, 177)          0           concatenate_24[0][0]             \n",
      "                                                                 dropout_25[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_5 (Highway)             (None, 177)          63012       concatenate_25[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_30 (Dense)                (None, 3)            534         highway_5[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 3s - loss: 0.0203 - acc: 0.8812 - weighted_accuracy: 0.8728 - val_loss: 0.2445 - val_acc: 0.8976 - val_weighted_accuracy: 0.8911\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.0191 - acc: 0.8848 - weighted_accuracy: 0.8767 - val_loss: 0.2469 - val_acc: 0.8976 - val_weighted_accuracy: 0.8921\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.0190 - acc: 0.8847 - weighted_accuracy: 0.8768 - val_loss: 0.2451 - val_acc: 0.8974 - val_weighted_accuracy: 0.8920\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.0190 - acc: 0.8850 - weighted_accuracy: 0.8769 - val_loss: 0.2463 - val_acc: 0.8964 - val_weighted_accuracy: 0.8915\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8850 - weighted_accuracy: 0.8768 - val_loss: 0.2434 - val_acc: 0.8981 - val_weighted_accuracy: 0.8910\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8853 - weighted_accuracy: 0.8771 - val_loss: 0.2489 - val_acc: 0.8971 - val_weighted_accuracy: 0.8919\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8856 - weighted_accuracy: 0.8775 - val_loss: 0.2469 - val_acc: 0.8969 - val_weighted_accuracy: 0.8913\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8857 - weighted_accuracy: 0.8776 - val_loss: 0.2465 - val_acc: 0.8975 - val_weighted_accuracy: 0.8920\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8854 - weighted_accuracy: 0.8770 - val_loss: 0.2478 - val_acc: 0.8966 - val_weighted_accuracy: 0.8913\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8859 - weighted_accuracy: 0.8776 - val_loss: 0.2448 - val_acc: 0.8976 - val_weighted_accuracy: 0.8917\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8859 - weighted_accuracy: 0.8777 - val_loss: 0.2463 - val_acc: 0.8969 - val_weighted_accuracy: 0.8918\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8859 - weighted_accuracy: 0.8776 - val_loss: 0.2467 - val_acc: 0.8971 - val_weighted_accuracy: 0.8914\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_31 (Dense)                (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_26 (Dropout)            (None, 24)           0           dense_31[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_26 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_26[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_32 (Dense)                (None, 24)           1968        concatenate_26[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_27 (Dropout)            (None, 24)           0           dense_32[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_27 (Concatenate)    (None, 105)          0           concatenate_26[0][0]             \n",
      "                                                                 dropout_27[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_33 (Dense)                (None, 24)           2544        concatenate_27[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_28 (Dropout)            (None, 24)           0           dense_33[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_28 (Concatenate)    (None, 129)          0           concatenate_27[0][0]             \n",
      "                                                                 dropout_28[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_34 (Dense)                (None, 24)           3120        concatenate_28[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_29 (Dropout)            (None, 24)           0           dense_34[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_29 (Concatenate)    (None, 153)          0           concatenate_28[0][0]             \n",
      "                                                                 dropout_29[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_35 (Dense)                (None, 24)           3696        concatenate_29[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_30 (Dropout)            (None, 24)           0           dense_35[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_30 (Concatenate)    (None, 177)          0           concatenate_29[0][0]             \n",
      "                                                                 dropout_30[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_6 (Highway)             (None, 177)          63012       concatenate_30[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_36 (Dense)                (None, 3)            534         highway_6[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 3s - loss: 0.0200 - acc: 0.8851 - weighted_accuracy: 0.8763 - val_loss: 0.2775 - val_acc: 0.8794 - val_weighted_accuracy: 0.8762\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8866 - weighted_accuracy: 0.8785 - val_loss: 0.2687 - val_acc: 0.8835 - val_weighted_accuracy: 0.8765\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8866 - weighted_accuracy: 0.8784 - val_loss: 0.2743 - val_acc: 0.8791 - val_weighted_accuracy: 0.8757\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8872 - weighted_accuracy: 0.8791 - val_loss: 0.2673 - val_acc: 0.8808 - val_weighted_accuracy: 0.8763\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8870 - weighted_accuracy: 0.8790 - val_loss: 0.2662 - val_acc: 0.8825 - val_weighted_accuracy: 0.8763\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8874 - weighted_accuracy: 0.8792 - val_loss: 0.2648 - val_acc: 0.8828 - val_weighted_accuracy: 0.8760\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8867 - weighted_accuracy: 0.8786 - val_loss: 0.2650 - val_acc: 0.8837 - val_weighted_accuracy: 0.8758\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8873 - weighted_accuracy: 0.8791 - val_loss: 0.2721 - val_acc: 0.8815 - val_weighted_accuracy: 0.8755\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8874 - weighted_accuracy: 0.8791 - val_loss: 0.2641 - val_acc: 0.8832 - val_weighted_accuracy: 0.8760\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8875 - weighted_accuracy: 0.8795 - val_loss: 0.2685 - val_acc: 0.8815 - val_weighted_accuracy: 0.8762\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8877 - weighted_accuracy: 0.8795 - val_loss: 0.2661 - val_acc: 0.8813 - val_weighted_accuracy: 0.8759\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8875 - weighted_accuracy: 0.8793 - val_loss: 0.2678 - val_acc: 0.8820 - val_weighted_accuracy: 0.8770\n",
      "Epoch 13/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8877 - weighted_accuracy: 0.8794 - val_loss: 0.2700 - val_acc: 0.8820 - val_weighted_accuracy: 0.8763\n",
      "Epoch 14/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8876 - weighted_accuracy: 0.8795 - val_loss: 0.2717 - val_acc: 0.8811 - val_weighted_accuracy: 0.8760\n",
      "Epoch 15/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8881 - weighted_accuracy: 0.8800 - val_loss: 0.2631 - val_acc: 0.8832 - val_weighted_accuracy: 0.8754\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 16/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8882 - weighted_accuracy: 0.8801 - val_loss: 0.2663 - val_acc: 0.8834 - val_weighted_accuracy: 0.8757\n",
      "Epoch 17/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8881 - weighted_accuracy: 0.8799 - val_loss: 0.2633 - val_acc: 0.8846 - val_weighted_accuracy: 0.8757\n",
      "Epoch 18/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8883 - weighted_accuracy: 0.8801 - val_loss: 0.2627 - val_acc: 0.8833 - val_weighted_accuracy: 0.8751\n",
      "Epoch 19/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8883 - weighted_accuracy: 0.8801 - val_loss: 0.2661 - val_acc: 0.8816 - val_weighted_accuracy: 0.8758\n",
      "Epoch 20/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8884 - weighted_accuracy: 0.8802 - val_loss: 0.2657 - val_acc: 0.8819 - val_weighted_accuracy: 0.8756\n",
      "Epoch 21/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8884 - weighted_accuracy: 0.8803 - val_loss: 0.2644 - val_acc: 0.8838 - val_weighted_accuracy: 0.8758\n",
      "Epoch 22/500\n",
      " - 2s - loss: 0.0184 - acc: 0.8888 - weighted_accuracy: 0.8806 - val_loss: 0.2665 - val_acc: 0.8815 - val_weighted_accuracy: 0.8748\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_37 (Dense)                (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_31 (Dropout)            (None, 24)           0           dense_37[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_31 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_31[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_38 (Dense)                (None, 24)           1968        concatenate_31[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_32 (Dropout)            (None, 24)           0           dense_38[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_32 (Concatenate)    (None, 105)          0           concatenate_31[0][0]             \n",
      "                                                                 dropout_32[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_39 (Dense)                (None, 24)           2544        concatenate_32[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_33 (Dropout)            (None, 24)           0           dense_39[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_33 (Concatenate)    (None, 129)          0           concatenate_32[0][0]             \n",
      "                                                                 dropout_33[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_40 (Dense)                (None, 24)           3120        concatenate_33[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_34 (Dropout)            (None, 24)           0           dense_40[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_34 (Concatenate)    (None, 153)          0           concatenate_33[0][0]             \n",
      "                                                                 dropout_34[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_41 (Dense)                (None, 24)           3696        concatenate_34[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_35 (Dropout)            (None, 24)           0           dense_41[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_35 (Concatenate)    (None, 177)          0           concatenate_34[0][0]             \n",
      "                                                                 dropout_35[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_7 (Highway)             (None, 177)          63012       concatenate_35[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_42 (Dense)                (None, 3)            534         highway_7[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 3s - loss: 0.0197 - acc: 0.8852 - weighted_accuracy: 0.8769 - val_loss: 0.2765 - val_acc: 0.8759 - val_weighted_accuracy: 0.8621\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8881 - weighted_accuracy: 0.8797 - val_loss: 0.2798 - val_acc: 0.8749 - val_weighted_accuracy: 0.8623\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8881 - weighted_accuracy: 0.8796 - val_loss: 0.2764 - val_acc: 0.8764 - val_weighted_accuracy: 0.8628\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8884 - weighted_accuracy: 0.8800 - val_loss: 0.2746 - val_acc: 0.8772 - val_weighted_accuracy: 0.8638\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8884 - weighted_accuracy: 0.8801 - val_loss: 0.2799 - val_acc: 0.8753 - val_weighted_accuracy: 0.8667\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8883 - weighted_accuracy: 0.8802 - val_loss: 0.2769 - val_acc: 0.8761 - val_weighted_accuracy: 0.8632\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8889 - weighted_accuracy: 0.8805 - val_loss: 0.2812 - val_acc: 0.8747 - val_weighted_accuracy: 0.8656\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8886 - weighted_accuracy: 0.8803 - val_loss: 0.2781 - val_acc: 0.8761 - val_weighted_accuracy: 0.8643\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8887 - weighted_accuracy: 0.8802 - val_loss: 0.2749 - val_acc: 0.8770 - val_weighted_accuracy: 0.8643\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.0184 - acc: 0.8888 - weighted_accuracy: 0.8804 - val_loss: 0.2788 - val_acc: 0.8747 - val_weighted_accuracy: 0.8634\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.0184 - acc: 0.8891 - weighted_accuracy: 0.8807 - val_loss: 0.2776 - val_acc: 0.8761 - val_weighted_accuracy: 0.8643\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.0184 - acc: 0.8891 - weighted_accuracy: 0.8807 - val_loss: 0.2785 - val_acc: 0.8745 - val_weighted_accuracy: 0.8653\n",
      "Epoch 13/500\n",
      " - 2s - loss: 0.0184 - acc: 0.8889 - weighted_accuracy: 0.8806 - val_loss: 0.2830 - val_acc: 0.8731 - val_weighted_accuracy: 0.8647\n",
      "Epoch 14/500\n",
      " - 2s - loss: 0.0184 - acc: 0.8891 - weighted_accuracy: 0.8807 - val_loss: 0.2820 - val_acc: 0.8727 - val_weighted_accuracy: 0.8658\n",
      "Epoch 15/500\n",
      " - 2s - loss: 0.0184 - acc: 0.8893 - weighted_accuracy: 0.8809 - val_loss: 0.2767 - val_acc: 0.8754 - val_weighted_accuracy: 0.8631\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_43 (Dense)                (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_36 (Dropout)            (None, 24)           0           dense_43[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_36 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_36[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_44 (Dense)                (None, 24)           1968        concatenate_36[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_37 (Dropout)            (None, 24)           0           dense_44[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_37 (Concatenate)    (None, 105)          0           concatenate_36[0][0]             \n",
      "                                                                 dropout_37[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_45 (Dense)                (None, 24)           2544        concatenate_37[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_38 (Dropout)            (None, 24)           0           dense_45[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_38 (Concatenate)    (None, 129)          0           concatenate_37[0][0]             \n",
      "                                                                 dropout_38[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_46 (Dense)                (None, 24)           3120        concatenate_38[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_39 (Dropout)            (None, 24)           0           dense_46[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_39 (Concatenate)    (None, 153)          0           concatenate_38[0][0]             \n",
      "                                                                 dropout_39[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_47 (Dense)                (None, 24)           3696        concatenate_39[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_40 (Dropout)            (None, 24)           0           dense_47[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_40 (Concatenate)    (None, 177)          0           concatenate_39[0][0]             \n",
      "                                                                 dropout_40[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_8 (Highway)             (None, 177)          63012       concatenate_40[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_48 (Dense)                (None, 3)            534         highway_8[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 3s - loss: 0.0204 - acc: 0.8804 - weighted_accuracy: 0.8720 - val_loss: 0.2776 - val_acc: 0.8770 - val_weighted_accuracy: 0.8735\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8862 - weighted_accuracy: 0.8785 - val_loss: 0.2656 - val_acc: 0.8823 - val_weighted_accuracy: 0.8743\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8867 - weighted_accuracy: 0.8788 - val_loss: 0.2600 - val_acc: 0.8852 - val_weighted_accuracy: 0.8737\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8870 - weighted_accuracy: 0.8792 - val_loss: 0.2690 - val_acc: 0.8785 - val_weighted_accuracy: 0.8722\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8871 - weighted_accuracy: 0.8792 - val_loss: 0.2672 - val_acc: 0.8799 - val_weighted_accuracy: 0.8727\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8871 - weighted_accuracy: 0.8793 - val_loss: 0.2652 - val_acc: 0.8805 - val_weighted_accuracy: 0.8728\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8874 - weighted_accuracy: 0.8796 - val_loss: 0.2677 - val_acc: 0.8792 - val_weighted_accuracy: 0.8724\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8875 - weighted_accuracy: 0.8798 - val_loss: 0.2662 - val_acc: 0.8798 - val_weighted_accuracy: 0.8718\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8870 - weighted_accuracy: 0.8791 - val_loss: 0.2615 - val_acc: 0.8821 - val_weighted_accuracy: 0.8726\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8876 - weighted_accuracy: 0.8797 - val_loss: 0.2678 - val_acc: 0.8792 - val_weighted_accuracy: 0.8724\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8876 - weighted_accuracy: 0.8797 - val_loss: 0.2654 - val_acc: 0.8817 - val_weighted_accuracy: 0.8727\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8879 - weighted_accuracy: 0.8799 - val_loss: 0.2618 - val_acc: 0.8820 - val_weighted_accuracy: 0.8724\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_49 (Dense)                (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_41 (Dropout)            (None, 24)           0           dense_49[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_41 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_41[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_50 (Dense)                (None, 24)           1968        concatenate_41[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_42 (Dropout)            (None, 24)           0           dense_50[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_42 (Concatenate)    (None, 105)          0           concatenate_41[0][0]             \n",
      "                                                                 dropout_42[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_51 (Dense)                (None, 24)           2544        concatenate_42[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_43 (Dropout)            (None, 24)           0           dense_51[0][0]                   \n",
      "__________________________________________________________________________________________________\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "concatenate_43 (Concatenate)    (None, 129)          0           concatenate_42[0][0]             \n",
      "                                                                 dropout_43[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_52 (Dense)                (None, 24)           3120        concatenate_43[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_44 (Dropout)            (None, 24)           0           dense_52[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_44 (Concatenate)    (None, 153)          0           concatenate_43[0][0]             \n",
      "                                                                 dropout_44[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_53 (Dense)                (None, 24)           3696        concatenate_44[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_45 (Dropout)            (None, 24)           0           dense_53[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_45 (Concatenate)    (None, 177)          0           concatenate_44[0][0]             \n",
      "                                                                 dropout_45[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_9 (Highway)             (None, 177)          63012       concatenate_45[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_54 (Dense)                (None, 3)            534         highway_9[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 3s - loss: 0.0201 - acc: 0.8823 - weighted_accuracy: 0.8734 - val_loss: 0.2619 - val_acc: 0.8886 - val_weighted_accuracy: 0.8832\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8853 - weighted_accuracy: 0.8775 - val_loss: 0.2602 - val_acc: 0.8879 - val_weighted_accuracy: 0.8823\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.0189 - acc: 0.8857 - weighted_accuracy: 0.8777 - val_loss: 0.2567 - val_acc: 0.8898 - val_weighted_accuracy: 0.8834\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8859 - weighted_accuracy: 0.8780 - val_loss: 0.2641 - val_acc: 0.8859 - val_weighted_accuracy: 0.8820\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8864 - weighted_accuracy: 0.8783 - val_loss: 0.2630 - val_acc: 0.8873 - val_weighted_accuracy: 0.8827\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8860 - weighted_accuracy: 0.8780 - val_loss: 0.2630 - val_acc: 0.8869 - val_weighted_accuracy: 0.8821\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8863 - weighted_accuracy: 0.8783 - val_loss: 0.2651 - val_acc: 0.8852 - val_weighted_accuracy: 0.8820\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8862 - weighted_accuracy: 0.8781 - val_loss: 0.2563 - val_acc: 0.8887 - val_weighted_accuracy: 0.8824\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8866 - weighted_accuracy: 0.8786 - val_loss: 0.2548 - val_acc: 0.8912 - val_weighted_accuracy: 0.8839\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8867 - weighted_accuracy: 0.8785 - val_loss: 0.2575 - val_acc: 0.8902 - val_weighted_accuracy: 0.8837\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8868 - weighted_accuracy: 0.8786 - val_loss: 0.2605 - val_acc: 0.8871 - val_weighted_accuracy: 0.8825\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8866 - weighted_accuracy: 0.8785 - val_loss: 0.2576 - val_acc: 0.8897 - val_weighted_accuracy: 0.8834\n",
      "Epoch 13/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8868 - weighted_accuracy: 0.8789 - val_loss: 0.2585 - val_acc: 0.8883 - val_weighted_accuracy: 0.8830\n",
      "Epoch 14/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8871 - weighted_accuracy: 0.8790 - val_loss: 0.2601 - val_acc: 0.8885 - val_weighted_accuracy: 0.8837\n",
      "Epoch 15/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8871 - weighted_accuracy: 0.8791 - val_loss: 0.2593 - val_acc: 0.8902 - val_weighted_accuracy: 0.8840\n",
      "Epoch 16/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8870 - weighted_accuracy: 0.8790 - val_loss: 0.2661 - val_acc: 0.8863 - val_weighted_accuracy: 0.8832\n",
      "Epoch 17/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8873 - weighted_accuracy: 0.8793 - val_loss: 0.2574 - val_acc: 0.8902 - val_weighted_accuracy: 0.8837\n",
      "Epoch 18/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8876 - weighted_accuracy: 0.8793 - val_loss: 0.2613 - val_acc: 0.8880 - val_weighted_accuracy: 0.8827\n",
      "Epoch 19/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8876 - weighted_accuracy: 0.8794 - val_loss: 0.2583 - val_acc: 0.8889 - val_weighted_accuracy: 0.8828\n",
      "Epoch 20/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8875 - weighted_accuracy: 0.8795 - val_loss: 0.2554 - val_acc: 0.8904 - val_weighted_accuracy: 0.8834\n",
      "Epoch 21/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8877 - weighted_accuracy: 0.8795 - val_loss: 0.2575 - val_acc: 0.8903 - val_weighted_accuracy: 0.8838\n",
      "Epoch 22/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8879 - weighted_accuracy: 0.8799 - val_loss: 0.2595 - val_acc: 0.8898 - val_weighted_accuracy: 0.8830\n",
      "Epoch 23/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8884 - weighted_accuracy: 0.8800 - val_loss: 0.2590 - val_acc: 0.8886 - val_weighted_accuracy: 0.8834\n",
      "Epoch 24/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8878 - weighted_accuracy: 0.8799 - val_loss: 0.2577 - val_acc: 0.8891 - val_weighted_accuracy: 0.8827\n",
      "Epoch 25/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8879 - weighted_accuracy: 0.8800 - val_loss: 0.2566 - val_acc: 0.8901 - val_weighted_accuracy: 0.8836\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_55 (Dense)                (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_46 (Dropout)            (None, 24)           0           dense_55[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_46 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_46[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_56 (Dense)                (None, 24)           1968        concatenate_46[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_47 (Dropout)            (None, 24)           0           dense_56[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_47 (Concatenate)    (None, 105)          0           concatenate_46[0][0]             \n",
      "                                                                 dropout_47[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_57 (Dense)                (None, 24)           2544        concatenate_47[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_48 (Dropout)            (None, 24)           0           dense_57[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_48 (Concatenate)    (None, 129)          0           concatenate_47[0][0]             \n",
      "                                                                 dropout_48[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_58 (Dense)                (None, 24)           3120        concatenate_48[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_49 (Dropout)            (None, 24)           0           dense_58[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_49 (Concatenate)    (None, 153)          0           concatenate_48[0][0]             \n",
      "                                                                 dropout_49[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_59 (Dense)                (None, 24)           3696        concatenate_49[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_50 (Dropout)            (None, 24)           0           dense_59[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_50 (Concatenate)    (None, 177)          0           concatenate_49[0][0]             \n",
      "                                                                 dropout_50[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_10 (Highway)            (None, 177)          63012       concatenate_50[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_60 (Dense)                (None, 3)            534         highway_10[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288495 samples, validate on 32057 samples\n",
      "Epoch 1/500\n",
      " - 4s - loss: 0.0202 - acc: 0.8814 - weighted_accuracy: 0.8736 - val_loss: 0.2468 - val_acc: 0.8928 - val_weighted_accuracy: 0.8760\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.0190 - acc: 0.8855 - weighted_accuracy: 0.8778 - val_loss: 0.2521 - val_acc: 0.8894 - val_weighted_accuracy: 0.8774\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8858 - weighted_accuracy: 0.8783 - val_loss: 0.2578 - val_acc: 0.8855 - val_weighted_accuracy: 0.8767\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8857 - weighted_accuracy: 0.8779 - val_loss: 0.2552 - val_acc: 0.8868 - val_weighted_accuracy: 0.8774\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8858 - weighted_accuracy: 0.8784 - val_loss: 0.2530 - val_acc: 0.8893 - val_weighted_accuracy: 0.8780\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.0188 - acc: 0.8864 - weighted_accuracy: 0.8789 - val_loss: 0.2492 - val_acc: 0.8906 - val_weighted_accuracy: 0.8775\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8867 - weighted_accuracy: 0.8791 - val_loss: 0.2514 - val_acc: 0.8897 - val_weighted_accuracy: 0.8780\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8866 - weighted_accuracy: 0.8790 - val_loss: 0.2511 - val_acc: 0.8901 - val_weighted_accuracy: 0.8776\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8865 - weighted_accuracy: 0.8788 - val_loss: 0.2485 - val_acc: 0.8920 - val_weighted_accuracy: 0.8777\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8870 - weighted_accuracy: 0.8793 - val_loss: 0.2491 - val_acc: 0.8904 - val_weighted_accuracy: 0.8767\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8868 - weighted_accuracy: 0.8790 - val_loss: 0.2528 - val_acc: 0.8880 - val_weighted_accuracy: 0.8766\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.0187 - acc: 0.8871 - weighted_accuracy: 0.8793 - val_loss: 0.2510 - val_acc: 0.8906 - val_weighted_accuracy: 0.8778\n",
      "Epoch 13/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8871 - weighted_accuracy: 0.8794 - val_loss: 0.2524 - val_acc: 0.8893 - val_weighted_accuracy: 0.8781\n",
      "Epoch 14/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8873 - weighted_accuracy: 0.8797 - val_loss: 0.2511 - val_acc: 0.8905 - val_weighted_accuracy: 0.8784\n",
      "Epoch 15/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8874 - weighted_accuracy: 0.8796 - val_loss: 0.2489 - val_acc: 0.8911 - val_weighted_accuracy: 0.8775\n",
      "Epoch 16/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8876 - weighted_accuracy: 0.8799 - val_loss: 0.2502 - val_acc: 0.8903 - val_weighted_accuracy: 0.8776\n",
      "Epoch 17/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8875 - weighted_accuracy: 0.8800 - val_loss: 0.2484 - val_acc: 0.8920 - val_weighted_accuracy: 0.8776\n",
      "Epoch 18/500\n",
      " - 2s - loss: 0.0186 - acc: 0.8879 - weighted_accuracy: 0.8802 - val_loss: 0.2549 - val_acc: 0.8871 - val_weighted_accuracy: 0.8775\n",
      "Epoch 19/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8876 - weighted_accuracy: 0.8799 - val_loss: 0.2525 - val_acc: 0.8889 - val_weighted_accuracy: 0.8772\n",
      "Epoch 20/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8878 - weighted_accuracy: 0.8801 - val_loss: 0.2550 - val_acc: 0.8884 - val_weighted_accuracy: 0.8776\n",
      "Epoch 21/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8878 - weighted_accuracy: 0.8802 - val_loss: 0.2500 - val_acc: 0.8911 - val_weighted_accuracy: 0.8774\n",
      "Epoch 22/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8879 - weighted_accuracy: 0.8802 - val_loss: 0.2529 - val_acc: 0.8900 - val_weighted_accuracy: 0.8775\n",
      "Epoch 23/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8881 - weighted_accuracy: 0.8805 - val_loss: 0.2551 - val_acc: 0.8891 - val_weighted_accuracy: 0.8781\n",
      "Epoch 24/500\n",
      " - 2s - loss: 0.0185 - acc: 0.8883 - weighted_accuracy: 0.8806 - val_loss: 0.2563 - val_acc: 0.8877 - val_weighted_accuracy: 0.8770\n"
     ]
    }
   ],
   "source": [
    "trainer = KerasModelTrainer(model_stamp=\"Ensemble-DenseNet\", epoch_num=500)\n",
    "models, score, folds_preds = trainer.train_folds(features=ensemble_trains, y=to_categorical(labels), augments=None, fold_count=10,\n",
    "    batch_size=1024, early_stop_on='val_weighted_accuracy',\n",
    "    scale_sample_weight=None, class_weight={0: 1/16, 1:1/15, 2:1/5},\n",
    "    get_model_func=_agent_get_model, \n",
    "    patience=20)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "score 0.8793773095658487\n"
     ]
    }
   ],
   "source": [
    "print(\"score\", score)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "score 0.8793773095658487\n",
      "Predicting training results...\n",
      "Predicting testing results...\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "Predicting labeled testing results...\n"
     ]
    }
   ],
   "source": [
    "print(\"score\", score)\n",
    "oofs_dir = \"../data/p_ensemble/oofs/\"\n",
    "output_dir = \"../data/p_ensemble/preds/\"\n",
    "onehot_pred_dir = \"../data/p_ensemble/nn_one_hot/\"\n",
    "\n",
    "model_submit_prefix = \"AddNN-Ensemble-Weighted-StopOnAcc\"\n",
    "\n",
    "oofs_path = oofs_dir + model_submit_prefix\n",
    "output_path = output_dir + model_submit_prefix\n",
    "one_hot_pred_path = onehot_pred_dir + \"One-Hot\" + model_submit_prefix\n",
    "\n",
    "print(\"Predicting training results...\")\n",
    "train_predicts = np.concatenate(folds_preds, axis=0)\n",
    "score = np_weighted_accuracy(to_categorical(labels), train_predicts)\n",
    "\n",
    "oofs = pd.DataFrame({\"unrelated\": train_predicts[:, 0], \"agreed\": train_predicts[:, 1], \"disagreed\": train_predicts[:, 2]})\n",
    "submit_path = oofs_path + \"-Train-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "oofs.to_csv(submit_path, index=False)\n",
    "\n",
    "print(\"Predicting testing results...\")\n",
    "test_predicts_list = []\n",
    "for fold_id, model in enumerate(models):\n",
    "    test_predicts = model.predict({\"mata-features\": ensemble_tests}, batch_size=128, verbose=1)\n",
    "    test_predicts_list.append(test_predicts)\n",
    "\n",
    "test_predicts = np.zeros(test_predicts_list[0].shape)\n",
    "for fold_predict in test_predicts_list:\n",
    "    test_predicts += fold_predict\n",
    "test_predicts /= len(test_predicts_list)\n",
    "\n",
    "test_predicts = pd.DataFrame({\"unrelated\": test_predicts[:, 0], \"agreed\": test_predicts[:, 1], \"disagreed\": test_predicts[:, 2]})\n",
    "submit_path = output_path + \"-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "test_predicts.to_csv(submit_path, index=False) # 0.3343\n",
    "\n",
    "print(\"Predicting labeled testing results...\")\n",
    "ids = pd.read_csv(\"../data/dataset/test.csv\")\n",
    "pred_labels = test_predicts.idxmax(axis=1)\n",
    "sub = pd.DataFrame({\"Id\": ids['id'].values, \"Category\": pred_labels})\n",
    "submit_path = one_hot_pred_path + \"-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "sub.to_csv(submit_path, index=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "score = np_weighted_accuracy(to_categorical(labels), train_predicts)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.8774185929693885"
      ]
     },
     "execution_count": 25,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "score"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Not Scale, Early Stopping on Loss"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\zake7\\Anaconda3\\lib\\site-packages\\keras\\legacy\\layers.py:198: UserWarning: The `Highway` layer is deprecated and will be removed after 06/2017.\n",
      "  warnings.warn('The `Highway` layer is deprecated '\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_61 (Dense)                (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_51 (Dropout)            (None, 24)           0           dense_61[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_51 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_51[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_62 (Dense)                (None, 24)           1968        concatenate_51[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_52 (Dropout)            (None, 24)           0           dense_62[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_52 (Concatenate)    (None, 105)          0           concatenate_51[0][0]             \n",
      "                                                                 dropout_52[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_63 (Dense)                (None, 24)           2544        concatenate_52[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_53 (Dropout)            (None, 24)           0           dense_63[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_53 (Concatenate)    (None, 129)          0           concatenate_52[0][0]             \n",
      "                                                                 dropout_53[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_64 (Dense)                (None, 24)           3120        concatenate_53[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_54 (Dropout)            (None, 24)           0           dense_64[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_54 (Concatenate)    (None, 153)          0           concatenate_53[0][0]             \n",
      "                                                                 dropout_54[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_65 (Dense)                (None, 24)           3696        concatenate_54[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_55 (Dropout)            (None, 24)           0           dense_65[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_55 (Concatenate)    (None, 177)          0           concatenate_54[0][0]             \n",
      "                                                                 dropout_55[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_11 (Highway)            (None, 177)          63012       concatenate_55[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_66 (Dense)                (None, 3)            534         highway_11[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 4s - loss: 0.2696 - acc: 0.8850 - weighted_accuracy: 0.8631 - val_loss: 0.2492 - val_acc: 0.8933 - val_weighted_accuracy: 0.8692\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.2512 - acc: 0.8909 - weighted_accuracy: 0.8695 - val_loss: 0.2482 - val_acc: 0.8936 - val_weighted_accuracy: 0.8695\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.2501 - acc: 0.8912 - weighted_accuracy: 0.8699 - val_loss: 0.2481 - val_acc: 0.8936 - val_weighted_accuracy: 0.8731\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.2496 - acc: 0.8913 - weighted_accuracy: 0.8705 - val_loss: 0.2485 - val_acc: 0.8930 - val_weighted_accuracy: 0.8730\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.2491 - acc: 0.8914 - weighted_accuracy: 0.8707 - val_loss: 0.2516 - val_acc: 0.8920 - val_weighted_accuracy: 0.8712\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.2490 - acc: 0.8914 - weighted_accuracy: 0.8707 - val_loss: 0.2486 - val_acc: 0.8930 - val_weighted_accuracy: 0.8735\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.2487 - acc: 0.8916 - weighted_accuracy: 0.8708 - val_loss: 0.2481 - val_acc: 0.8937 - val_weighted_accuracy: 0.8712\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.2485 - acc: 0.8919 - weighted_accuracy: 0.8713 - val_loss: 0.2485 - val_acc: 0.8937 - val_weighted_accuracy: 0.8726\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.2484 - acc: 0.8918 - weighted_accuracy: 0.8713 - val_loss: 0.2492 - val_acc: 0.8926 - val_weighted_accuracy: 0.8709\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.2484 - acc: 0.8922 - weighted_accuracy: 0.8716 - val_loss: 0.2497 - val_acc: 0.8919 - val_weighted_accuracy: 0.8736\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.2481 - acc: 0.8920 - weighted_accuracy: 0.8716 - val_loss: 0.2500 - val_acc: 0.8932 - val_weighted_accuracy: 0.8745\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.2481 - acc: 0.8918 - weighted_accuracy: 0.8711 - val_loss: 0.2495 - val_acc: 0.8928 - val_weighted_accuracy: 0.8720\n",
      "Epoch 13/500\n",
      " - 2s - loss: 0.2479 - acc: 0.8918 - weighted_accuracy: 0.8712 - val_loss: 0.2477 - val_acc: 0.8936 - val_weighted_accuracy: 0.8729\n",
      "Epoch 14/500\n",
      " - 2s - loss: 0.2478 - acc: 0.8920 - weighted_accuracy: 0.8716 - val_loss: 0.2518 - val_acc: 0.8922 - val_weighted_accuracy: 0.8711\n",
      "Epoch 15/500\n",
      " - 2s - loss: 0.2476 - acc: 0.8921 - weighted_accuracy: 0.8714 - val_loss: 0.2494 - val_acc: 0.8939 - val_weighted_accuracy: 0.8744\n",
      "Epoch 16/500\n",
      " - 2s - loss: 0.2473 - acc: 0.8926 - weighted_accuracy: 0.8721 - val_loss: 0.2527 - val_acc: 0.8919 - val_weighted_accuracy: 0.8733\n",
      "Epoch 17/500\n",
      " - 2s - loss: 0.2474 - acc: 0.8923 - weighted_accuracy: 0.8718 - val_loss: 0.2498 - val_acc: 0.8928 - val_weighted_accuracy: 0.8711\n",
      "Epoch 18/500\n",
      " - 2s - loss: 0.2472 - acc: 0.8924 - weighted_accuracy: 0.8720 - val_loss: 0.2518 - val_acc: 0.8918 - val_weighted_accuracy: 0.8718\n",
      "Epoch 19/500\n",
      " - 2s - loss: 0.2470 - acc: 0.8926 - weighted_accuracy: 0.8724 - val_loss: 0.2486 - val_acc: 0.8942 - val_weighted_accuracy: 0.8737\n",
      "Epoch 20/500\n",
      " - 2s - loss: 0.2468 - acc: 0.8927 - weighted_accuracy: 0.8721 - val_loss: 0.2494 - val_acc: 0.8929 - val_weighted_accuracy: 0.8724\n",
      "Epoch 21/500\n",
      " - 2s - loss: 0.2468 - acc: 0.8926 - weighted_accuracy: 0.8721 - val_loss: 0.2513 - val_acc: 0.8918 - val_weighted_accuracy: 0.8730\n",
      "Epoch 22/500\n",
      " - 2s - loss: 0.2465 - acc: 0.8927 - weighted_accuracy: 0.8726 - val_loss: 0.2485 - val_acc: 0.8930 - val_weighted_accuracy: 0.8724\n",
      "Epoch 23/500\n",
      " - 3s - loss: 0.2463 - acc: 0.8928 - weighted_accuracy: 0.8725 - val_loss: 0.2506 - val_acc: 0.8928 - val_weighted_accuracy: 0.8723\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_67 (Dense)                (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_56 (Dropout)            (None, 24)           0           dense_67[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_56 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_56[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_68 (Dense)                (None, 24)           1968        concatenate_56[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_57 (Dropout)            (None, 24)           0           dense_68[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_57 (Concatenate)    (None, 105)          0           concatenate_56[0][0]             \n",
      "                                                                 dropout_57[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_69 (Dense)                (None, 24)           2544        concatenate_57[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_58 (Dropout)            (None, 24)           0           dense_69[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_58 (Concatenate)    (None, 129)          0           concatenate_57[0][0]             \n",
      "                                                                 dropout_58[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_70 (Dense)                (None, 24)           3120        concatenate_58[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_59 (Dropout)            (None, 24)           0           dense_70[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_59 (Concatenate)    (None, 153)          0           concatenate_58[0][0]             \n",
      "                                                                 dropout_59[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_71 (Dense)                (None, 24)           3696        concatenate_59[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_60 (Dropout)            (None, 24)           0           dense_71[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_60 (Concatenate)    (None, 177)          0           concatenate_59[0][0]             \n",
      "                                                                 dropout_60[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_12 (Highway)            (None, 177)          63012       concatenate_60[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_72 (Dense)                (None, 3)            534         highway_12[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 4s - loss: 0.2745 - acc: 0.8832 - weighted_accuracy: 0.8610 - val_loss: 0.2295 - val_acc: 0.8993 - val_weighted_accuracy: 0.8847\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.2539 - acc: 0.8899 - weighted_accuracy: 0.8683 - val_loss: 0.2273 - val_acc: 0.8999 - val_weighted_accuracy: 0.8841\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.2528 - acc: 0.8902 - weighted_accuracy: 0.8687 - val_loss: 0.2278 - val_acc: 0.9001 - val_weighted_accuracy: 0.8834\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.2527 - acc: 0.8903 - weighted_accuracy: 0.8692 - val_loss: 0.2265 - val_acc: 0.9001 - val_weighted_accuracy: 0.8832\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.2518 - acc: 0.8906 - weighted_accuracy: 0.8693 - val_loss: 0.2266 - val_acc: 0.8999 - val_weighted_accuracy: 0.8832\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.2515 - acc: 0.8907 - weighted_accuracy: 0.8694 - val_loss: 0.2278 - val_acc: 0.8998 - val_weighted_accuracy: 0.8848\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.2514 - acc: 0.8904 - weighted_accuracy: 0.8692 - val_loss: 0.2266 - val_acc: 0.8994 - val_weighted_accuracy: 0.8841\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.2511 - acc: 0.8911 - weighted_accuracy: 0.8700 - val_loss: 0.2265 - val_acc: 0.8995 - val_weighted_accuracy: 0.8845\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.2510 - acc: 0.8909 - weighted_accuracy: 0.8699 - val_loss: 0.2269 - val_acc: 0.8993 - val_weighted_accuracy: 0.8842\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.2507 - acc: 0.8909 - weighted_accuracy: 0.8699 - val_loss: 0.2273 - val_acc: 0.8996 - val_weighted_accuracy: 0.8835\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.2505 - acc: 0.8913 - weighted_accuracy: 0.8702 - val_loss: 0.2265 - val_acc: 0.8997 - val_weighted_accuracy: 0.8812\n",
      "Epoch 12/500\n",
      " - 3s - loss: 0.2506 - acc: 0.8912 - weighted_accuracy: 0.8700 - val_loss: 0.2277 - val_acc: 0.8987 - val_weighted_accuracy: 0.8851\n",
      "Epoch 13/500\n",
      " - 2s - loss: 0.2505 - acc: 0.8911 - weighted_accuracy: 0.8703 - val_loss: 0.2277 - val_acc: 0.8994 - val_weighted_accuracy: 0.8853\n",
      "Epoch 14/500\n",
      " - 2s - loss: 0.2503 - acc: 0.8913 - weighted_accuracy: 0.8706 - val_loss: 0.2273 - val_acc: 0.8999 - val_weighted_accuracy: 0.8828\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_73 (Dense)                (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_61 (Dropout)            (None, 24)           0           dense_73[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_61 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_61[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_74 (Dense)                (None, 24)           1968        concatenate_61[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_62 (Dropout)            (None, 24)           0           dense_74[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_62 (Concatenate)    (None, 105)          0           concatenate_61[0][0]             \n",
      "                                                                 dropout_62[0][0]                 \n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "__________________________________________________________________________________________________\n",
      "dense_75 (Dense)                (None, 24)           2544        concatenate_62[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_63 (Dropout)            (None, 24)           0           dense_75[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_63 (Concatenate)    (None, 129)          0           concatenate_62[0][0]             \n",
      "                                                                 dropout_63[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_76 (Dense)                (None, 24)           3120        concatenate_63[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_64 (Dropout)            (None, 24)           0           dense_76[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_64 (Concatenate)    (None, 153)          0           concatenate_63[0][0]             \n",
      "                                                                 dropout_64[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_77 (Dense)                (None, 24)           3696        concatenate_64[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_65 (Dropout)            (None, 24)           0           dense_77[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_65 (Concatenate)    (None, 177)          0           concatenate_64[0][0]             \n",
      "                                                                 dropout_65[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_13 (Highway)            (None, 177)          63012       concatenate_65[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_78 (Dense)                (None, 3)            534         highway_13[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 4s - loss: 0.2655 - acc: 0.8885 - weighted_accuracy: 0.8666 - val_loss: 0.2506 - val_acc: 0.8918 - val_weighted_accuracy: 0.8737\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.2510 - acc: 0.8909 - weighted_accuracy: 0.8694 - val_loss: 0.2494 - val_acc: 0.8914 - val_weighted_accuracy: 0.8725\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.2501 - acc: 0.8910 - weighted_accuracy: 0.8698 - val_loss: 0.2500 - val_acc: 0.8910 - val_weighted_accuracy: 0.8721\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.2497 - acc: 0.8916 - weighted_accuracy: 0.8707 - val_loss: 0.2493 - val_acc: 0.8911 - val_weighted_accuracy: 0.8721\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.2492 - acc: 0.8915 - weighted_accuracy: 0.8706 - val_loss: 0.2490 - val_acc: 0.8913 - val_weighted_accuracy: 0.8716\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.2490 - acc: 0.8915 - weighted_accuracy: 0.8706 - val_loss: 0.2492 - val_acc: 0.8926 - val_weighted_accuracy: 0.8717\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.2488 - acc: 0.8916 - weighted_accuracy: 0.8709 - val_loss: 0.2492 - val_acc: 0.8917 - val_weighted_accuracy: 0.8699\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.2486 - acc: 0.8919 - weighted_accuracy: 0.8711 - val_loss: 0.2492 - val_acc: 0.8913 - val_weighted_accuracy: 0.8707\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.2482 - acc: 0.8916 - weighted_accuracy: 0.8709 - val_loss: 0.2489 - val_acc: 0.8912 - val_weighted_accuracy: 0.8705\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.2480 - acc: 0.8920 - weighted_accuracy: 0.8714 - val_loss: 0.2491 - val_acc: 0.8914 - val_weighted_accuracy: 0.8716\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.2480 - acc: 0.8921 - weighted_accuracy: 0.8714 - val_loss: 0.2489 - val_acc: 0.8919 - val_weighted_accuracy: 0.8697\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.2477 - acc: 0.8924 - weighted_accuracy: 0.8717 - val_loss: 0.2506 - val_acc: 0.8910 - val_weighted_accuracy: 0.8699\n",
      "Epoch 13/500\n",
      " - 2s - loss: 0.2475 - acc: 0.8924 - weighted_accuracy: 0.8719 - val_loss: 0.2501 - val_acc: 0.8915 - val_weighted_accuracy: 0.8697\n",
      "Epoch 14/500\n",
      " - 2s - loss: 0.2475 - acc: 0.8922 - weighted_accuracy: 0.8716 - val_loss: 0.2494 - val_acc: 0.8909 - val_weighted_accuracy: 0.8701\n",
      "Epoch 15/500\n",
      " - 2s - loss: 0.2475 - acc: 0.8927 - weighted_accuracy: 0.8724 - val_loss: 0.2492 - val_acc: 0.8914 - val_weighted_accuracy: 0.8719\n",
      "Epoch 16/500\n",
      " - 2s - loss: 0.2470 - acc: 0.8928 - weighted_accuracy: 0.8724 - val_loss: 0.2495 - val_acc: 0.8905 - val_weighted_accuracy: 0.8720\n",
      "Epoch 17/500\n",
      " - 2s - loss: 0.2468 - acc: 0.8926 - weighted_accuracy: 0.8723 - val_loss: 0.2499 - val_acc: 0.8922 - val_weighted_accuracy: 0.8715\n",
      "Epoch 18/500\n",
      " - 2s - loss: 0.2468 - acc: 0.8924 - weighted_accuracy: 0.8720 - val_loss: 0.2497 - val_acc: 0.8912 - val_weighted_accuracy: 0.8695\n",
      "Epoch 19/500\n",
      " - 2s - loss: 0.2466 - acc: 0.8929 - weighted_accuracy: 0.8724 - val_loss: 0.2499 - val_acc: 0.8912 - val_weighted_accuracy: 0.8710\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_79 (Dense)                (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_66 (Dropout)            (None, 24)           0           dense_79[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_66 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_66[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_80 (Dense)                (None, 24)           1968        concatenate_66[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_67 (Dropout)            (None, 24)           0           dense_80[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_67 (Concatenate)    (None, 105)          0           concatenate_66[0][0]             \n",
      "                                                                 dropout_67[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_81 (Dense)                (None, 24)           2544        concatenate_67[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_68 (Dropout)            (None, 24)           0           dense_81[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_68 (Concatenate)    (None, 129)          0           concatenate_67[0][0]             \n",
      "                                                                 dropout_68[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_82 (Dense)                (None, 24)           3120        concatenate_68[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_69 (Dropout)            (None, 24)           0           dense_82[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_69 (Concatenate)    (None, 153)          0           concatenate_68[0][0]             \n",
      "                                                                 dropout_69[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_83 (Dense)                (None, 24)           3696        concatenate_69[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_70 (Dropout)            (None, 24)           0           dense_83[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_70 (Concatenate)    (None, 177)          0           concatenate_69[0][0]             \n",
      "                                                                 dropout_70[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_14 (Highway)            (None, 177)          63012       concatenate_70[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_84 (Dense)                (None, 3)            534         highway_14[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 4s - loss: 0.2647 - acc: 0.8881 - weighted_accuracy: 0.8660 - val_loss: 0.2615 - val_acc: 0.8862 - val_weighted_accuracy: 0.8616\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.2495 - acc: 0.8918 - weighted_accuracy: 0.8710 - val_loss: 0.2620 - val_acc: 0.8865 - val_weighted_accuracy: 0.8663\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.2486 - acc: 0.8920 - weighted_accuracy: 0.8714 - val_loss: 0.2627 - val_acc: 0.8859 - val_weighted_accuracy: 0.8672\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.2481 - acc: 0.8922 - weighted_accuracy: 0.8716 - val_loss: 0.2600 - val_acc: 0.8867 - val_weighted_accuracy: 0.8653\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.2480 - acc: 0.8921 - weighted_accuracy: 0.8717 - val_loss: 0.2602 - val_acc: 0.8874 - val_weighted_accuracy: 0.8659\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.2477 - acc: 0.8921 - weighted_accuracy: 0.8718 - val_loss: 0.2613 - val_acc: 0.8857 - val_weighted_accuracy: 0.8687\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.2474 - acc: 0.8923 - weighted_accuracy: 0.8719 - val_loss: 0.2600 - val_acc: 0.8865 - val_weighted_accuracy: 0.8605\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.2473 - acc: 0.8922 - weighted_accuracy: 0.8718 - val_loss: 0.2597 - val_acc: 0.8872 - val_weighted_accuracy: 0.8637\n",
      "Epoch 9/500\n",
      " - 3s - loss: 0.2472 - acc: 0.8925 - weighted_accuracy: 0.8722 - val_loss: 0.2601 - val_acc: 0.8870 - val_weighted_accuracy: 0.8673\n",
      "Epoch 10/500\n",
      " - 3s - loss: 0.2472 - acc: 0.8923 - weighted_accuracy: 0.8720 - val_loss: 0.2605 - val_acc: 0.8863 - val_weighted_accuracy: 0.8605\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.2467 - acc: 0.8926 - weighted_accuracy: 0.8722 - val_loss: 0.2604 - val_acc: 0.8866 - val_weighted_accuracy: 0.8677\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.2467 - acc: 0.8925 - weighted_accuracy: 0.8723 - val_loss: 0.2601 - val_acc: 0.8871 - val_weighted_accuracy: 0.8649\n",
      "Epoch 13/500\n",
      " - 2s - loss: 0.2465 - acc: 0.8927 - weighted_accuracy: 0.8724 - val_loss: 0.2601 - val_acc: 0.8870 - val_weighted_accuracy: 0.8648\n",
      "Epoch 14/500\n",
      " - 2s - loss: 0.2464 - acc: 0.8930 - weighted_accuracy: 0.8728 - val_loss: 0.2594 - val_acc: 0.8878 - val_weighted_accuracy: 0.8665\n",
      "Epoch 15/500\n",
      " - 2s - loss: 0.2462 - acc: 0.8927 - weighted_accuracy: 0.8725 - val_loss: 0.2603 - val_acc: 0.8871 - val_weighted_accuracy: 0.8672\n",
      "Epoch 16/500\n",
      " - 2s - loss: 0.2461 - acc: 0.8930 - weighted_accuracy: 0.8729 - val_loss: 0.2603 - val_acc: 0.8860 - val_weighted_accuracy: 0.8621\n",
      "Epoch 17/500\n",
      " - 2s - loss: 0.2459 - acc: 0.8930 - weighted_accuracy: 0.8729 - val_loss: 0.2601 - val_acc: 0.8870 - val_weighted_accuracy: 0.8628\n",
      "Epoch 18/500\n",
      " - 2s - loss: 0.2457 - acc: 0.8932 - weighted_accuracy: 0.8732 - val_loss: 0.2606 - val_acc: 0.8858 - val_weighted_accuracy: 0.8590\n",
      "Epoch 19/500\n",
      " - 2s - loss: 0.2455 - acc: 0.8929 - weighted_accuracy: 0.8728 - val_loss: 0.2606 - val_acc: 0.8871 - val_weighted_accuracy: 0.8630\n",
      "Epoch 20/500\n",
      " - 2s - loss: 0.2453 - acc: 0.8935 - weighted_accuracy: 0.8735 - val_loss: 0.2604 - val_acc: 0.8864 - val_weighted_accuracy: 0.8608\n",
      "Epoch 21/500\n",
      " - 2s - loss: 0.2452 - acc: 0.8935 - weighted_accuracy: 0.8733 - val_loss: 0.2618 - val_acc: 0.8860 - val_weighted_accuracy: 0.8667\n",
      "Epoch 22/500\n",
      " - 2s - loss: 0.2450 - acc: 0.8935 - weighted_accuracy: 0.8738 - val_loss: 0.2613 - val_acc: 0.8867 - val_weighted_accuracy: 0.8674\n",
      "Epoch 23/500\n",
      " - 2s - loss: 0.2447 - acc: 0.8935 - weighted_accuracy: 0.8734 - val_loss: 0.2610 - val_acc: 0.8869 - val_weighted_accuracy: 0.8647\n",
      "Epoch 24/500\n",
      " - 2s - loss: 0.2445 - acc: 0.8938 - weighted_accuracy: 0.8742 - val_loss: 0.2610 - val_acc: 0.8863 - val_weighted_accuracy: 0.8618\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_85 (Dense)                (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_71 (Dropout)            (None, 24)           0           dense_85[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_71 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_71[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_86 (Dense)                (None, 24)           1968        concatenate_71[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_72 (Dropout)            (None, 24)           0           dense_86[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_72 (Concatenate)    (None, 105)          0           concatenate_71[0][0]             \n",
      "                                                                 dropout_72[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_87 (Dense)                (None, 24)           2544        concatenate_72[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_73 (Dropout)            (None, 24)           0           dense_87[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_73 (Concatenate)    (None, 129)          0           concatenate_72[0][0]             \n",
      "                                                                 dropout_73[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_88 (Dense)                (None, 24)           3120        concatenate_73[0][0]             \n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "__________________________________________________________________________________________________\n",
      "dropout_74 (Dropout)            (None, 24)           0           dense_88[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_74 (Concatenate)    (None, 153)          0           concatenate_73[0][0]             \n",
      "                                                                 dropout_74[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_89 (Dense)                (None, 24)           3696        concatenate_74[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_75 (Dropout)            (None, 24)           0           dense_89[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_75 (Concatenate)    (None, 177)          0           concatenate_74[0][0]             \n",
      "                                                                 dropout_75[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_15 (Highway)            (None, 177)          63012       concatenate_75[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_90 (Dense)                (None, 3)            534         highway_15[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 4s - loss: 0.2666 - acc: 0.8870 - weighted_accuracy: 0.8650 - val_loss: 0.2370 - val_acc: 0.9010 - val_weighted_accuracy: 0.8880\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.2522 - acc: 0.8902 - weighted_accuracy: 0.8682 - val_loss: 0.2395 - val_acc: 0.8996 - val_weighted_accuracy: 0.8856\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.2513 - acc: 0.8903 - weighted_accuracy: 0.8687 - val_loss: 0.2362 - val_acc: 0.9006 - val_weighted_accuracy: 0.8870\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.2513 - acc: 0.8905 - weighted_accuracy: 0.8689 - val_loss: 0.2358 - val_acc: 0.9006 - val_weighted_accuracy: 0.8859\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.2507 - acc: 0.8905 - weighted_accuracy: 0.8687 - val_loss: 0.2358 - val_acc: 0.9004 - val_weighted_accuracy: 0.8869\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.2502 - acc: 0.8908 - weighted_accuracy: 0.8694 - val_loss: 0.2362 - val_acc: 0.9001 - val_weighted_accuracy: 0.8869\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.2502 - acc: 0.8907 - weighted_accuracy: 0.8696 - val_loss: 0.2360 - val_acc: 0.9008 - val_weighted_accuracy: 0.8886\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.2500 - acc: 0.8907 - weighted_accuracy: 0.8692 - val_loss: 0.2367 - val_acc: 0.8998 - val_weighted_accuracy: 0.8871\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.2498 - acc: 0.8908 - weighted_accuracy: 0.8696 - val_loss: 0.2372 - val_acc: 0.9008 - val_weighted_accuracy: 0.8883\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.2497 - acc: 0.8910 - weighted_accuracy: 0.8698 - val_loss: 0.2356 - val_acc: 0.9000 - val_weighted_accuracy: 0.8869\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.2495 - acc: 0.8909 - weighted_accuracy: 0.8697 - val_loss: 0.2376 - val_acc: 0.8995 - val_weighted_accuracy: 0.8870\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.2494 - acc: 0.8913 - weighted_accuracy: 0.8700 - val_loss: 0.2364 - val_acc: 0.8998 - val_weighted_accuracy: 0.8860\n",
      "Epoch 13/500\n",
      " - 2s - loss: 0.2493 - acc: 0.8910 - weighted_accuracy: 0.8699 - val_loss: 0.2359 - val_acc: 0.9000 - val_weighted_accuracy: 0.8844\n",
      "Epoch 14/500\n",
      " - 2s - loss: 0.2492 - acc: 0.8915 - weighted_accuracy: 0.8704 - val_loss: 0.2359 - val_acc: 0.9002 - val_weighted_accuracy: 0.8872\n",
      "Epoch 15/500\n",
      " - 2s - loss: 0.2489 - acc: 0.8914 - weighted_accuracy: 0.8703 - val_loss: 0.2365 - val_acc: 0.9003 - val_weighted_accuracy: 0.8878\n",
      "Epoch 16/500\n",
      " - 2s - loss: 0.2488 - acc: 0.8913 - weighted_accuracy: 0.8701 - val_loss: 0.2364 - val_acc: 0.8999 - val_weighted_accuracy: 0.8883\n",
      "Epoch 17/500\n",
      " - 2s - loss: 0.2486 - acc: 0.8913 - weighted_accuracy: 0.8704 - val_loss: 0.2356 - val_acc: 0.9006 - val_weighted_accuracy: 0.8839\n",
      "Epoch 18/500\n",
      " - 2s - loss: 0.2487 - acc: 0.8915 - weighted_accuracy: 0.8705 - val_loss: 0.2355 - val_acc: 0.9005 - val_weighted_accuracy: 0.8857\n",
      "Epoch 19/500\n",
      " - 2s - loss: 0.2482 - acc: 0.8916 - weighted_accuracy: 0.8706 - val_loss: 0.2358 - val_acc: 0.9008 - val_weighted_accuracy: 0.8872\n",
      "Epoch 20/500\n",
      " - 2s - loss: 0.2482 - acc: 0.8916 - weighted_accuracy: 0.8707 - val_loss: 0.2356 - val_acc: 0.9008 - val_weighted_accuracy: 0.8874\n",
      "Epoch 21/500\n",
      " - 2s - loss: 0.2480 - acc: 0.8916 - weighted_accuracy: 0.8708 - val_loss: 0.2360 - val_acc: 0.9003 - val_weighted_accuracy: 0.8858\n",
      "Epoch 22/500\n",
      " - 2s - loss: 0.2480 - acc: 0.8919 - weighted_accuracy: 0.8709 - val_loss: 0.2363 - val_acc: 0.9007 - val_weighted_accuracy: 0.8873\n",
      "Epoch 23/500\n",
      " - 2s - loss: 0.2478 - acc: 0.8922 - weighted_accuracy: 0.8716 - val_loss: 0.2371 - val_acc: 0.8998 - val_weighted_accuracy: 0.8871\n",
      "Epoch 24/500\n",
      " - 2s - loss: 0.2475 - acc: 0.8921 - weighted_accuracy: 0.8714 - val_loss: 0.2359 - val_acc: 0.8995 - val_weighted_accuracy: 0.8855\n",
      "Epoch 25/500\n",
      " - 2s - loss: 0.2474 - acc: 0.8924 - weighted_accuracy: 0.8719 - val_loss: 0.2365 - val_acc: 0.9006 - val_weighted_accuracy: 0.8860\n",
      "Epoch 26/500\n",
      " - 2s - loss: 0.2474 - acc: 0.8921 - weighted_accuracy: 0.8715 - val_loss: 0.2365 - val_acc: 0.8995 - val_weighted_accuracy: 0.8863\n",
      "Epoch 27/500\n",
      " - 2s - loss: 0.2470 - acc: 0.8923 - weighted_accuracy: 0.8718 - val_loss: 0.2365 - val_acc: 0.9006 - val_weighted_accuracy: 0.8868\n",
      "Epoch 28/500\n",
      " - 2s - loss: 0.2468 - acc: 0.8925 - weighted_accuracy: 0.8719 - val_loss: 0.2362 - val_acc: 0.8999 - val_weighted_accuracy: 0.8858\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_91 (Dense)                (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_76 (Dropout)            (None, 24)           0           dense_91[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_76 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_76[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_92 (Dense)                (None, 24)           1968        concatenate_76[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_77 (Dropout)            (None, 24)           0           dense_92[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_77 (Concatenate)    (None, 105)          0           concatenate_76[0][0]             \n",
      "                                                                 dropout_77[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_93 (Dense)                (None, 24)           2544        concatenate_77[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_78 (Dropout)            (None, 24)           0           dense_93[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_78 (Concatenate)    (None, 129)          0           concatenate_77[0][0]             \n",
      "                                                                 dropout_78[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_94 (Dense)                (None, 24)           3120        concatenate_78[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_79 (Dropout)            (None, 24)           0           dense_94[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_79 (Concatenate)    (None, 153)          0           concatenate_78[0][0]             \n",
      "                                                                 dropout_79[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_95 (Dense)                (None, 24)           3696        concatenate_79[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_80 (Dropout)            (None, 24)           0           dense_95[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_80 (Concatenate)    (None, 177)          0           concatenate_79[0][0]             \n",
      "                                                                 dropout_80[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_16 (Highway)            (None, 177)          63012       concatenate_80[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_96 (Dense)                (None, 3)            534         highway_16[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 4s - loss: 0.2706 - acc: 0.8846 - weighted_accuracy: 0.8624 - val_loss: 0.2570 - val_acc: 0.8872 - val_weighted_accuracy: 0.8725\n",
      "Epoch 2/500\n",
      " - 3s - loss: 0.2499 - acc: 0.8918 - weighted_accuracy: 0.8701 - val_loss: 0.2565 - val_acc: 0.8862 - val_weighted_accuracy: 0.8688\n",
      "Epoch 3/500\n",
      " - 3s - loss: 0.2494 - acc: 0.8916 - weighted_accuracy: 0.8703 - val_loss: 0.2570 - val_acc: 0.8866 - val_weighted_accuracy: 0.8712\n",
      "Epoch 4/500\n",
      " - 3s - loss: 0.2492 - acc: 0.8917 - weighted_accuracy: 0.8705 - val_loss: 0.2556 - val_acc: 0.8872 - val_weighted_accuracy: 0.8680\n",
      "Epoch 5/500\n",
      " - 3s - loss: 0.2487 - acc: 0.8921 - weighted_accuracy: 0.8712 - val_loss: 0.2566 - val_acc: 0.8876 - val_weighted_accuracy: 0.8679\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.2483 - acc: 0.8919 - weighted_accuracy: 0.8708 - val_loss: 0.2553 - val_acc: 0.8872 - val_weighted_accuracy: 0.8669\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.2484 - acc: 0.8922 - weighted_accuracy: 0.8712 - val_loss: 0.2554 - val_acc: 0.8869 - val_weighted_accuracy: 0.8658\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.2478 - acc: 0.8924 - weighted_accuracy: 0.8713 - val_loss: 0.2553 - val_acc: 0.8869 - val_weighted_accuracy: 0.8660\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.2477 - acc: 0.8925 - weighted_accuracy: 0.8716 - val_loss: 0.2552 - val_acc: 0.8871 - val_weighted_accuracy: 0.8684\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.2475 - acc: 0.8926 - weighted_accuracy: 0.8717 - val_loss: 0.2554 - val_acc: 0.8873 - val_weighted_accuracy: 0.8684\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.2472 - acc: 0.8925 - weighted_accuracy: 0.8715 - val_loss: 0.2549 - val_acc: 0.8873 - val_weighted_accuracy: 0.8688\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.2472 - acc: 0.8928 - weighted_accuracy: 0.8718 - val_loss: 0.2553 - val_acc: 0.8877 - val_weighted_accuracy: 0.8697\n",
      "Epoch 13/500\n",
      " - 2s - loss: 0.2471 - acc: 0.8927 - weighted_accuracy: 0.8719 - val_loss: 0.2561 - val_acc: 0.8867 - val_weighted_accuracy: 0.8641\n",
      "Epoch 14/500\n",
      " - 2s - loss: 0.2472 - acc: 0.8927 - weighted_accuracy: 0.8715 - val_loss: 0.2550 - val_acc: 0.8879 - val_weighted_accuracy: 0.8680\n",
      "Epoch 15/500\n",
      " - 2s - loss: 0.2468 - acc: 0.8930 - weighted_accuracy: 0.8722 - val_loss: 0.2554 - val_acc: 0.8878 - val_weighted_accuracy: 0.8702\n",
      "Epoch 16/500\n",
      " - 2s - loss: 0.2467 - acc: 0.8932 - weighted_accuracy: 0.8723 - val_loss: 0.2557 - val_acc: 0.8874 - val_weighted_accuracy: 0.8712\n",
      "Epoch 17/500\n",
      " - 2s - loss: 0.2465 - acc: 0.8933 - weighted_accuracy: 0.8728 - val_loss: 0.2549 - val_acc: 0.8879 - val_weighted_accuracy: 0.8669\n",
      "Epoch 18/500\n",
      " - 2s - loss: 0.2465 - acc: 0.8930 - weighted_accuracy: 0.8722 - val_loss: 0.2551 - val_acc: 0.8877 - val_weighted_accuracy: 0.8680\n",
      "Epoch 19/500\n",
      " - 2s - loss: 0.2462 - acc: 0.8934 - weighted_accuracy: 0.8727 - val_loss: 0.2559 - val_acc: 0.8877 - val_weighted_accuracy: 0.8693\n",
      "Epoch 20/500\n",
      " - 2s - loss: 0.2459 - acc: 0.8933 - weighted_accuracy: 0.8726 - val_loss: 0.2551 - val_acc: 0.8875 - val_weighted_accuracy: 0.8682\n",
      "Epoch 21/500\n",
      " - 2s - loss: 0.2458 - acc: 0.8932 - weighted_accuracy: 0.8727 - val_loss: 0.2551 - val_acc: 0.8874 - val_weighted_accuracy: 0.8705\n",
      "Epoch 22/500\n",
      " - 2s - loss: 0.2455 - acc: 0.8939 - weighted_accuracy: 0.8735 - val_loss: 0.2559 - val_acc: 0.8875 - val_weighted_accuracy: 0.8682\n",
      "Epoch 23/500\n",
      " - 2s - loss: 0.2454 - acc: 0.8939 - weighted_accuracy: 0.8735 - val_loss: 0.2559 - val_acc: 0.8873 - val_weighted_accuracy: 0.8666\n",
      "Epoch 24/500\n",
      " - 2s - loss: 0.2452 - acc: 0.8935 - weighted_accuracy: 0.8730 - val_loss: 0.2554 - val_acc: 0.8870 - val_weighted_accuracy: 0.8653\n",
      "Epoch 25/500\n",
      " - 2s - loss: 0.2451 - acc: 0.8939 - weighted_accuracy: 0.8733 - val_loss: 0.2557 - val_acc: 0.8877 - val_weighted_accuracy: 0.8699\n",
      "Epoch 26/500\n",
      " - 2s - loss: 0.2449 - acc: 0.8938 - weighted_accuracy: 0.8734 - val_loss: 0.2561 - val_acc: 0.8869 - val_weighted_accuracy: 0.8685\n",
      "Epoch 27/500\n",
      " - 2s - loss: 0.2446 - acc: 0.8942 - weighted_accuracy: 0.8740 - val_loss: 0.2551 - val_acc: 0.8885 - val_weighted_accuracy: 0.8696\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_97 (Dense)                (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_81 (Dropout)            (None, 24)           0           dense_97[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_81 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_81[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_98 (Dense)                (None, 24)           1968        concatenate_81[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_82 (Dropout)            (None, 24)           0           dense_98[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_82 (Concatenate)    (None, 105)          0           concatenate_81[0][0]             \n",
      "                                                                 dropout_82[0][0]                 \n",
      "__________________________________________________________________________________________________\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "dense_99 (Dense)                (None, 24)           2544        concatenate_82[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_83 (Dropout)            (None, 24)           0           dense_99[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_83 (Concatenate)    (None, 129)          0           concatenate_82[0][0]             \n",
      "                                                                 dropout_83[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_100 (Dense)               (None, 24)           3120        concatenate_83[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_84 (Dropout)            (None, 24)           0           dense_100[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_84 (Concatenate)    (None, 153)          0           concatenate_83[0][0]             \n",
      "                                                                 dropout_84[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_101 (Dense)               (None, 24)           3696        concatenate_84[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_85 (Dropout)            (None, 24)           0           dense_101[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_85 (Concatenate)    (None, 177)          0           concatenate_84[0][0]             \n",
      "                                                                 dropout_85[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_17 (Highway)            (None, 177)          63012       concatenate_85[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_102 (Dense)               (None, 3)            534         highway_17[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 4s - loss: 0.2654 - acc: 0.8872 - weighted_accuracy: 0.8669 - val_loss: 0.2774 - val_acc: 0.8750 - val_weighted_accuracy: 0.8410\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.2484 - acc: 0.8927 - weighted_accuracy: 0.8719 - val_loss: 0.2741 - val_acc: 0.8765 - val_weighted_accuracy: 0.8512\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.2474 - acc: 0.8930 - weighted_accuracy: 0.8726 - val_loss: 0.2751 - val_acc: 0.8756 - val_weighted_accuracy: 0.8452\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.2470 - acc: 0.8932 - weighted_accuracy: 0.8725 - val_loss: 0.2757 - val_acc: 0.8751 - val_weighted_accuracy: 0.8414\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.2466 - acc: 0.8933 - weighted_accuracy: 0.8728 - val_loss: 0.2748 - val_acc: 0.8752 - val_weighted_accuracy: 0.8439\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.2464 - acc: 0.8932 - weighted_accuracy: 0.8727 - val_loss: 0.2749 - val_acc: 0.8752 - val_weighted_accuracy: 0.8452\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.2462 - acc: 0.8931 - weighted_accuracy: 0.8723 - val_loss: 0.2748 - val_acc: 0.8762 - val_weighted_accuracy: 0.8476\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.2462 - acc: 0.8931 - weighted_accuracy: 0.8729 - val_loss: 0.2764 - val_acc: 0.8752 - val_weighted_accuracy: 0.8434\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.2460 - acc: 0.8934 - weighted_accuracy: 0.8731 - val_loss: 0.2751 - val_acc: 0.8750 - val_weighted_accuracy: 0.8432\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.2457 - acc: 0.8935 - weighted_accuracy: 0.8731 - val_loss: 0.2742 - val_acc: 0.8757 - val_weighted_accuracy: 0.8472\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.2457 - acc: 0.8936 - weighted_accuracy: 0.8734 - val_loss: 0.2759 - val_acc: 0.8747 - val_weighted_accuracy: 0.8411\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.2457 - acc: 0.8933 - weighted_accuracy: 0.8730 - val_loss: 0.2738 - val_acc: 0.8759 - val_weighted_accuracy: 0.8467\n",
      "Epoch 13/500\n",
      " - 2s - loss: 0.2453 - acc: 0.8939 - weighted_accuracy: 0.8738 - val_loss: 0.2749 - val_acc: 0.8757 - val_weighted_accuracy: 0.8447\n",
      "Epoch 14/500\n",
      " - 2s - loss: 0.2454 - acc: 0.8936 - weighted_accuracy: 0.8733 - val_loss: 0.2753 - val_acc: 0.8752 - val_weighted_accuracy: 0.8432\n",
      "Epoch 15/500\n",
      " - 2s - loss: 0.2453 - acc: 0.8938 - weighted_accuracy: 0.8736 - val_loss: 0.2766 - val_acc: 0.8741 - val_weighted_accuracy: 0.8398\n",
      "Epoch 16/500\n",
      " - 2s - loss: 0.2449 - acc: 0.8939 - weighted_accuracy: 0.8736 - val_loss: 0.2750 - val_acc: 0.8770 - val_weighted_accuracy: 0.8488\n",
      "Epoch 17/500\n",
      " - 2s - loss: 0.2448 - acc: 0.8938 - weighted_accuracy: 0.8737 - val_loss: 0.2748 - val_acc: 0.8750 - val_weighted_accuracy: 0.8432\n",
      "Epoch 18/500\n",
      " - 2s - loss: 0.2446 - acc: 0.8940 - weighted_accuracy: 0.8738 - val_loss: 0.2757 - val_acc: 0.8756 - val_weighted_accuracy: 0.8436\n",
      "Epoch 19/500\n",
      " - 2s - loss: 0.2446 - acc: 0.8940 - weighted_accuracy: 0.8739 - val_loss: 0.2751 - val_acc: 0.8760 - val_weighted_accuracy: 0.8465\n",
      "Epoch 20/500\n",
      " - 2s - loss: 0.2444 - acc: 0.8939 - weighted_accuracy: 0.8739 - val_loss: 0.2744 - val_acc: 0.8757 - val_weighted_accuracy: 0.8458\n",
      "Epoch 21/500\n",
      " - 2s - loss: 0.2443 - acc: 0.8942 - weighted_accuracy: 0.8745 - val_loss: 0.2758 - val_acc: 0.8748 - val_weighted_accuracy: 0.8412\n",
      "Epoch 22/500\n",
      " - 2s - loss: 0.2443 - acc: 0.8943 - weighted_accuracy: 0.8744 - val_loss: 0.2737 - val_acc: 0.8757 - val_weighted_accuracy: 0.8472\n",
      "Epoch 23/500\n",
      " - 2s - loss: 0.2440 - acc: 0.8942 - weighted_accuracy: 0.8741 - val_loss: 0.2783 - val_acc: 0.8749 - val_weighted_accuracy: 0.8424\n",
      "Epoch 24/500\n",
      " - 2s - loss: 0.2436 - acc: 0.8948 - weighted_accuracy: 0.8751 - val_loss: 0.2742 - val_acc: 0.8754 - val_weighted_accuracy: 0.8446\n",
      "Epoch 25/500\n",
      " - 2s - loss: 0.2437 - acc: 0.8947 - weighted_accuracy: 0.8751 - val_loss: 0.2796 - val_acc: 0.8735 - val_weighted_accuracy: 0.8407\n",
      "Epoch 26/500\n",
      " - 2s - loss: 0.2436 - acc: 0.8945 - weighted_accuracy: 0.8748 - val_loss: 0.2754 - val_acc: 0.8752 - val_weighted_accuracy: 0.8437\n",
      "Epoch 27/500\n",
      " - 2s - loss: 0.2432 - acc: 0.8948 - weighted_accuracy: 0.8752 - val_loss: 0.2779 - val_acc: 0.8739 - val_weighted_accuracy: 0.8419\n",
      "Epoch 28/500\n",
      " - 2s - loss: 0.2432 - acc: 0.8950 - weighted_accuracy: 0.8752 - val_loss: 0.2778 - val_acc: 0.8735 - val_weighted_accuracy: 0.8413\n",
      "Epoch 29/500\n",
      " - 2s - loss: 0.2430 - acc: 0.8950 - weighted_accuracy: 0.8753 - val_loss: 0.2766 - val_acc: 0.8738 - val_weighted_accuracy: 0.8426\n",
      "Epoch 30/500\n",
      " - 2s - loss: 0.2428 - acc: 0.8950 - weighted_accuracy: 0.8754 - val_loss: 0.2751 - val_acc: 0.8756 - val_weighted_accuracy: 0.8445\n",
      "Epoch 31/500\n",
      " - 2s - loss: 0.2424 - acc: 0.8953 - weighted_accuracy: 0.8755 - val_loss: 0.2747 - val_acc: 0.8751 - val_weighted_accuracy: 0.8476\n",
      "Epoch 32/500\n",
      " - 2s - loss: 0.2423 - acc: 0.8956 - weighted_accuracy: 0.8762 - val_loss: 0.2765 - val_acc: 0.8734 - val_weighted_accuracy: 0.8398\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_103 (Dense)               (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_86 (Dropout)            (None, 24)           0           dense_103[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_86 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_86[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_104 (Dense)               (None, 24)           1968        concatenate_86[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_87 (Dropout)            (None, 24)           0           dense_104[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_87 (Concatenate)    (None, 105)          0           concatenate_86[0][0]             \n",
      "                                                                 dropout_87[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_105 (Dense)               (None, 24)           2544        concatenate_87[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_88 (Dropout)            (None, 24)           0           dense_105[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_88 (Concatenate)    (None, 129)          0           concatenate_87[0][0]             \n",
      "                                                                 dropout_88[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_106 (Dense)               (None, 24)           3120        concatenate_88[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_89 (Dropout)            (None, 24)           0           dense_106[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_89 (Concatenate)    (None, 153)          0           concatenate_88[0][0]             \n",
      "                                                                 dropout_89[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_107 (Dense)               (None, 24)           3696        concatenate_89[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_90 (Dropout)            (None, 24)           0           dense_107[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_90 (Concatenate)    (None, 177)          0           concatenate_89[0][0]             \n",
      "                                                                 dropout_90[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_18 (Highway)            (None, 177)          63012       concatenate_90[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_108 (Dense)               (None, 3)            534         highway_18[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 5s - loss: 0.2695 - acc: 0.8860 - weighted_accuracy: 0.8652 - val_loss: 0.2561 - val_acc: 0.8864 - val_weighted_accuracy: 0.8586\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.2511 - acc: 0.8912 - weighted_accuracy: 0.8699 - val_loss: 0.2516 - val_acc: 0.8882 - val_weighted_accuracy: 0.8646\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.2503 - acc: 0.8914 - weighted_accuracy: 0.8706 - val_loss: 0.2522 - val_acc: 0.8877 - val_weighted_accuracy: 0.8688\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.2498 - acc: 0.8918 - weighted_accuracy: 0.8713 - val_loss: 0.2522 - val_acc: 0.8881 - val_weighted_accuracy: 0.8685\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.2489 - acc: 0.8919 - weighted_accuracy: 0.8713 - val_loss: 0.2516 - val_acc: 0.8884 - val_weighted_accuracy: 0.8674\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.2488 - acc: 0.8920 - weighted_accuracy: 0.8713 - val_loss: 0.2515 - val_acc: 0.8882 - val_weighted_accuracy: 0.8653\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.2486 - acc: 0.8922 - weighted_accuracy: 0.8718 - val_loss: 0.2515 - val_acc: 0.8873 - val_weighted_accuracy: 0.8662\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.2481 - acc: 0.8918 - weighted_accuracy: 0.8712 - val_loss: 0.2517 - val_acc: 0.8883 - val_weighted_accuracy: 0.8672\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.2482 - acc: 0.8922 - weighted_accuracy: 0.8718 - val_loss: 0.2513 - val_acc: 0.8881 - val_weighted_accuracy: 0.8655\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.2480 - acc: 0.8924 - weighted_accuracy: 0.8720 - val_loss: 0.2517 - val_acc: 0.8874 - val_weighted_accuracy: 0.8666\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.2477 - acc: 0.8925 - weighted_accuracy: 0.8722 - val_loss: 0.2525 - val_acc: 0.8874 - val_weighted_accuracy: 0.8686\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.2477 - acc: 0.8928 - weighted_accuracy: 0.8724 - val_loss: 0.2517 - val_acc: 0.8881 - val_weighted_accuracy: 0.8653\n",
      "Epoch 13/500\n",
      " - 2s - loss: 0.2476 - acc: 0.8926 - weighted_accuracy: 0.8720 - val_loss: 0.2525 - val_acc: 0.8880 - val_weighted_accuracy: 0.8689\n",
      "Epoch 14/500\n",
      " - 2s - loss: 0.2473 - acc: 0.8930 - weighted_accuracy: 0.8728 - val_loss: 0.2520 - val_acc: 0.8876 - val_weighted_accuracy: 0.8672\n",
      "Epoch 15/500\n",
      " - 2s - loss: 0.2471 - acc: 0.8928 - weighted_accuracy: 0.8726 - val_loss: 0.2518 - val_acc: 0.8883 - val_weighted_accuracy: 0.8658\n",
      "Epoch 16/500\n",
      " - 2s - loss: 0.2472 - acc: 0.8928 - weighted_accuracy: 0.8725 - val_loss: 0.2535 - val_acc: 0.8882 - val_weighted_accuracy: 0.8697\n",
      "Epoch 17/500\n",
      " - 2s - loss: 0.2470 - acc: 0.8931 - weighted_accuracy: 0.8731 - val_loss: 0.2530 - val_acc: 0.8868 - val_weighted_accuracy: 0.8678\n",
      "Epoch 18/500\n",
      " - 2s - loss: 0.2467 - acc: 0.8933 - weighted_accuracy: 0.8734 - val_loss: 0.2526 - val_acc: 0.8872 - val_weighted_accuracy: 0.8627\n",
      "Epoch 19/500\n",
      " - 2s - loss: 0.2466 - acc: 0.8932 - weighted_accuracy: 0.8732 - val_loss: 0.2519 - val_acc: 0.8876 - val_weighted_accuracy: 0.8662\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_109 (Dense)               (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_91 (Dropout)            (None, 24)           0           dense_109[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_91 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_91[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_110 (Dense)               (None, 24)           1968        concatenate_91[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_92 (Dropout)            (None, 24)           0           dense_110[0][0]                  \n",
      "__________________________________________________________________________________________________\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "concatenate_92 (Concatenate)    (None, 105)          0           concatenate_91[0][0]             \n",
      "                                                                 dropout_92[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_111 (Dense)               (None, 24)           2544        concatenate_92[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_93 (Dropout)            (None, 24)           0           dense_111[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_93 (Concatenate)    (None, 129)          0           concatenate_92[0][0]             \n",
      "                                                                 dropout_93[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_112 (Dense)               (None, 24)           3120        concatenate_93[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_94 (Dropout)            (None, 24)           0           dense_112[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_94 (Concatenate)    (None, 153)          0           concatenate_93[0][0]             \n",
      "                                                                 dropout_94[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_113 (Dense)               (None, 24)           3696        concatenate_94[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_95 (Dropout)            (None, 24)           0           dense_113[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_95 (Concatenate)    (None, 177)          0           concatenate_94[0][0]             \n",
      "                                                                 dropout_95[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "highway_19 (Highway)            (None, 177)          63012       concatenate_95[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dense_114 (Dense)               (None, 3)            534         highway_19[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288497 samples, validate on 32055 samples\n",
      "Epoch 1/500\n",
      " - 5s - loss: 0.2682 - acc: 0.8865 - weighted_accuracy: 0.8641 - val_loss: 0.2526 - val_acc: 0.8920 - val_weighted_accuracy: 0.8760\n",
      "Epoch 2/500\n",
      " - 2s - loss: 0.2508 - acc: 0.8910 - weighted_accuracy: 0.8692 - val_loss: 0.2520 - val_acc: 0.8931 - val_weighted_accuracy: 0.8754\n",
      "Epoch 3/500\n",
      " - 2s - loss: 0.2500 - acc: 0.8909 - weighted_accuracy: 0.8689 - val_loss: 0.2507 - val_acc: 0.8934 - val_weighted_accuracy: 0.8746\n",
      "Epoch 4/500\n",
      " - 2s - loss: 0.2497 - acc: 0.8911 - weighted_accuracy: 0.8696 - val_loss: 0.2518 - val_acc: 0.8928 - val_weighted_accuracy: 0.8786\n",
      "Epoch 5/500\n",
      " - 2s - loss: 0.2492 - acc: 0.8914 - weighted_accuracy: 0.8699 - val_loss: 0.2503 - val_acc: 0.8929 - val_weighted_accuracy: 0.8754\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.2488 - acc: 0.8915 - weighted_accuracy: 0.8704 - val_loss: 0.2504 - val_acc: 0.8937 - val_weighted_accuracy: 0.8723\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.2489 - acc: 0.8913 - weighted_accuracy: 0.8700 - val_loss: 0.2506 - val_acc: 0.8934 - val_weighted_accuracy: 0.8722\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.2487 - acc: 0.8914 - weighted_accuracy: 0.8701 - val_loss: 0.2505 - val_acc: 0.8932 - val_weighted_accuracy: 0.8759\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.2486 - acc: 0.8917 - weighted_accuracy: 0.8705 - val_loss: 0.2504 - val_acc: 0.8928 - val_weighted_accuracy: 0.8778\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.2483 - acc: 0.8918 - weighted_accuracy: 0.8708 - val_loss: 0.2507 - val_acc: 0.8929 - val_weighted_accuracy: 0.8795\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.2482 - acc: 0.8918 - weighted_accuracy: 0.8708 - val_loss: 0.2500 - val_acc: 0.8932 - val_weighted_accuracy: 0.8749\n",
      "Epoch 12/500\n",
      " - 3s - loss: 0.2479 - acc: 0.8917 - weighted_accuracy: 0.8708 - val_loss: 0.2503 - val_acc: 0.8935 - val_weighted_accuracy: 0.8780\n",
      "Epoch 13/500\n",
      " - 3s - loss: 0.2479 - acc: 0.8919 - weighted_accuracy: 0.8711 - val_loss: 0.2516 - val_acc: 0.8925 - val_weighted_accuracy: 0.8663\n",
      "Epoch 14/500\n",
      " - 3s - loss: 0.2478 - acc: 0.8921 - weighted_accuracy: 0.8712 - val_loss: 0.2497 - val_acc: 0.8939 - val_weighted_accuracy: 0.8755\n",
      "Epoch 15/500\n",
      " - 3s - loss: 0.2477 - acc: 0.8920 - weighted_accuracy: 0.8708 - val_loss: 0.2502 - val_acc: 0.8941 - val_weighted_accuracy: 0.8777\n",
      "Epoch 16/500\n",
      " - 3s - loss: 0.2476 - acc: 0.8918 - weighted_accuracy: 0.8707 - val_loss: 0.2499 - val_acc: 0.8938 - val_weighted_accuracy: 0.8745\n",
      "Epoch 17/500\n",
      " - 2s - loss: 0.2474 - acc: 0.8920 - weighted_accuracy: 0.8711 - val_loss: 0.2499 - val_acc: 0.8933 - val_weighted_accuracy: 0.8756\n",
      "Epoch 18/500\n",
      " - 2s - loss: 0.2474 - acc: 0.8923 - weighted_accuracy: 0.8713 - val_loss: 0.2498 - val_acc: 0.8932 - val_weighted_accuracy: 0.8782\n",
      "Epoch 19/500\n",
      " - 2s - loss: 0.2471 - acc: 0.8922 - weighted_accuracy: 0.8713 - val_loss: 0.2496 - val_acc: 0.8943 - val_weighted_accuracy: 0.8762\n",
      "Epoch 20/500\n",
      " - 2s - loss: 0.2470 - acc: 0.8923 - weighted_accuracy: 0.8715 - val_loss: 0.2497 - val_acc: 0.8940 - val_weighted_accuracy: 0.8756\n",
      "Epoch 21/500\n",
      " - 2s - loss: 0.2468 - acc: 0.8924 - weighted_accuracy: 0.8715 - val_loss: 0.2507 - val_acc: 0.8935 - val_weighted_accuracy: 0.8724\n",
      "Epoch 22/500\n",
      " - 2s - loss: 0.2465 - acc: 0.8925 - weighted_accuracy: 0.8718 - val_loss: 0.2503 - val_acc: 0.8928 - val_weighted_accuracy: 0.8718\n",
      "Epoch 23/500\n",
      " - 2s - loss: 0.2464 - acc: 0.8929 - weighted_accuracy: 0.8722 - val_loss: 0.2502 - val_acc: 0.8938 - val_weighted_accuracy: 0.8748\n",
      "Epoch 24/500\n",
      " - 2s - loss: 0.2461 - acc: 0.8928 - weighted_accuracy: 0.8722 - val_loss: 0.2504 - val_acc: 0.8937 - val_weighted_accuracy: 0.8716\n",
      "Epoch 25/500\n",
      " - 2s - loss: 0.2460 - acc: 0.8929 - weighted_accuracy: 0.8722 - val_loss: 0.2498 - val_acc: 0.8928 - val_weighted_accuracy: 0.8766\n",
      "Epoch 26/500\n",
      " - 2s - loss: 0.2458 - acc: 0.8932 - weighted_accuracy: 0.8726 - val_loss: 0.2497 - val_acc: 0.8938 - val_weighted_accuracy: 0.8749\n",
      "Epoch 27/500\n",
      " - 2s - loss: 0.2455 - acc: 0.8936 - weighted_accuracy: 0.8731 - val_loss: 0.2498 - val_acc: 0.8925 - val_weighted_accuracy: 0.8744\n",
      "Epoch 28/500\n",
      " - 2s - loss: 0.2454 - acc: 0.8934 - weighted_accuracy: 0.8730 - val_loss: 0.2505 - val_acc: 0.8938 - val_weighted_accuracy: 0.8737\n",
      "Epoch 29/500\n",
      " - 2s - loss: 0.2452 - acc: 0.8933 - weighted_accuracy: 0.8729 - val_loss: 0.2510 - val_acc: 0.8926 - val_weighted_accuracy: 0.8768\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "mata-features (InputLayer)      (None, 57)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "dense_115 (Dense)               (None, 24)           1392        mata-features[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_96 (Dropout)            (None, 24)           0           dense_115[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_96 (Concatenate)    (None, 81)           0           mata-features[0][0]              \n",
      "                                                                 dropout_96[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_116 (Dense)               (None, 24)           1968        concatenate_96[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_97 (Dropout)            (None, 24)           0           dense_116[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_97 (Concatenate)    (None, 105)          0           concatenate_96[0][0]             \n",
      "                                                                 dropout_97[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_117 (Dense)               (None, 24)           2544        concatenate_97[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_98 (Dropout)            (None, 24)           0           dense_117[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_98 (Concatenate)    (None, 129)          0           concatenate_97[0][0]             \n",
      "                                                                 dropout_98[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_118 (Dense)               (None, 24)           3120        concatenate_98[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_99 (Dropout)            (None, 24)           0           dense_118[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_99 (Concatenate)    (None, 153)          0           concatenate_98[0][0]             \n",
      "                                                                 dropout_99[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_119 (Dense)               (None, 24)           3696        concatenate_99[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "dropout_100 (Dropout)           (None, 24)           0           dense_119[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_100 (Concatenate)   (None, 177)          0           concatenate_99[0][0]             \n",
      "                                                                 dropout_100[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "highway_20 (Highway)            (None, 177)          63012       concatenate_100[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "dense_120 (Dense)               (None, 3)            534         highway_20[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 76,266\n",
      "Trainable params: 76,266\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "Train on 288495 samples, validate on 32057 samples\n",
      "Epoch 1/500\n",
      " - 5s - loss: 0.2666 - acc: 0.8871 - weighted_accuracy: 0.8659 - val_loss: 0.2448 - val_acc: 0.8948 - val_weighted_accuracy: 0.8698\n",
      "Epoch 2/500\n",
      " - 3s - loss: 0.2516 - acc: 0.8906 - weighted_accuracy: 0.8693 - val_loss: 0.2450 - val_acc: 0.8942 - val_weighted_accuracy: 0.8657\n",
      "Epoch 3/500\n",
      " - 3s - loss: 0.2505 - acc: 0.8910 - weighted_accuracy: 0.8700 - val_loss: 0.2454 - val_acc: 0.8946 - val_weighted_accuracy: 0.8717\n",
      "Epoch 4/500\n",
      " - 3s - loss: 0.2504 - acc: 0.8910 - weighted_accuracy: 0.8703 - val_loss: 0.2433 - val_acc: 0.8940 - val_weighted_accuracy: 0.8672\n",
      "Epoch 5/500\n",
      " - 3s - loss: 0.2499 - acc: 0.8914 - weighted_accuracy: 0.8705 - val_loss: 0.2444 - val_acc: 0.8932 - val_weighted_accuracy: 0.8652\n",
      "Epoch 6/500\n",
      " - 2s - loss: 0.2494 - acc: 0.8916 - weighted_accuracy: 0.8710 - val_loss: 0.2444 - val_acc: 0.8935 - val_weighted_accuracy: 0.8659\n",
      "Epoch 7/500\n",
      " - 2s - loss: 0.2493 - acc: 0.8917 - weighted_accuracy: 0.8712 - val_loss: 0.2436 - val_acc: 0.8930 - val_weighted_accuracy: 0.8643\n",
      "Epoch 8/500\n",
      " - 2s - loss: 0.2494 - acc: 0.8914 - weighted_accuracy: 0.8708 - val_loss: 0.2433 - val_acc: 0.8939 - val_weighted_accuracy: 0.8716\n",
      "Epoch 9/500\n",
      " - 2s - loss: 0.2490 - acc: 0.8917 - weighted_accuracy: 0.8711 - val_loss: 0.2441 - val_acc: 0.8943 - val_weighted_accuracy: 0.8694\n",
      "Epoch 10/500\n",
      " - 2s - loss: 0.2490 - acc: 0.8916 - weighted_accuracy: 0.8711 - val_loss: 0.2444 - val_acc: 0.8935 - val_weighted_accuracy: 0.8643\n",
      "Epoch 11/500\n",
      " - 2s - loss: 0.2487 - acc: 0.8919 - weighted_accuracy: 0.8713 - val_loss: 0.2440 - val_acc: 0.8936 - val_weighted_accuracy: 0.8702\n",
      "Epoch 12/500\n",
      " - 2s - loss: 0.2484 - acc: 0.8918 - weighted_accuracy: 0.8715 - val_loss: 0.2436 - val_acc: 0.8934 - val_weighted_accuracy: 0.8672\n",
      "Epoch 13/500\n",
      " - 2s - loss: 0.2484 - acc: 0.8918 - weighted_accuracy: 0.8715 - val_loss: 0.2451 - val_acc: 0.8933 - val_weighted_accuracy: 0.8636\n",
      "Epoch 14/500\n",
      " - 2s - loss: 0.2483 - acc: 0.8921 - weighted_accuracy: 0.8716 - val_loss: 0.2445 - val_acc: 0.8928 - val_weighted_accuracy: 0.8657\n"
     ]
    }
   ],
   "source": [
    "trainer = KerasModelTrainer(model_stamp=\"Ensemble-DenseNet\", epoch_num=500)\n",
    "models, score, folds_preds = trainer.train_folds(features=ensemble_trains, y=to_categorical(labels), augments=None, fold_count=10,\n",
    "    batch_size=1024, early_stop_on='val_loss',\n",
    "    scale_sample_weight=None, class_weight=None,\n",
    "    get_model_func=_agent_get_model, \n",
    "    patience=20)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "score 0.873541072919297\n",
      "Predicting training results...\n",
      "Predicting testing results...\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 11us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 15us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 14us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 12us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 14us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 16us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 15us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 15us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 14us/step\n",
      "80126/80126 [==============================] - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - ETA:  - 1s 14us/step\n",
      "Predicting labeled testing results...\n"
     ]
    }
   ],
   "source": [
    "print(\"score\", score)\n",
    "oofs_dir = \"../data/p_ensemble/oofs/\"\n",
    "output_dir = \"../data/p_ensemble/preds/\"\n",
    "onehot_pred_dir = \"../data/p_ensemble/nn_one_hot/\"\n",
    "\n",
    "model_submit_prefix = \"AddNN-Ensemble-Weighted-StopOnLoss\"\n",
    "\n",
    "oofs_path = oofs_dir + model_submit_prefix\n",
    "output_path = output_dir + model_submit_prefix\n",
    "one_hot_pred_path = onehot_pred_dir + \"One-Hot\" + model_submit_prefix\n",
    "\n",
    "print(\"Predicting training results...\")\n",
    "train_predicts = np.concatenate(folds_preds, axis=0)\n",
    "score = np_weighted_accuracy(to_categorical(labels), train_predicts)\n",
    "\n",
    "oofs = pd.DataFrame({\"unrelated\": train_predicts[:, 0], \"agreed\": train_predicts[:, 1], \"disagreed\": train_predicts[:, 2]})\n",
    "submit_path = oofs_path + \"-Train-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "oofs.to_csv(submit_path, index=False)\n",
    "\n",
    "print(\"Predicting testing results...\")\n",
    "test_predicts_list = []\n",
    "for fold_id, model in enumerate(models):\n",
    "    test_predicts = model.predict({\"mata-features\": ensemble_tests}, batch_size=128, verbose=1)\n",
    "    test_predicts_list.append(test_predicts)\n",
    "\n",
    "test_predicts = np.zeros(test_predicts_list[0].shape)\n",
    "for fold_predict in test_predicts_list:\n",
    "    test_predicts += fold_predict\n",
    "test_predicts /= len(test_predicts_list)\n",
    "\n",
    "test_predicts = pd.DataFrame({\"unrelated\": test_predicts[:, 0], \"agreed\": test_predicts[:, 1], \"disagreed\": test_predicts[:, 2]})\n",
    "submit_path = output_path + \"-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "test_predicts.to_csv(submit_path, index=False) # 0.3343\n",
    "\n",
    "print(\"Predicting labeled testing results...\")\n",
    "ids = pd.read_csv(\"../data/dataset/test.csv\")\n",
    "pred_labels = test_predicts.idxmax(axis=1)\n",
    "sub = pd.DataFrame({\"Id\": ids['id'].values, \"Category\": pred_labels})\n",
    "submit_path = one_hot_pred_path + \"-L{:4f}-NB{:d}.csv\".format(score, NB_WORDS)\n",
    "sub.to_csv(submit_path, index=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "score = np_weighted_accuracy(to_categorical(labels), train_predicts)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.8670897024003137"
      ]
     },
     "execution_count": 30,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "score"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.3"
  },
  "widgets": {
   "application/vnd.jupyter.widget-state+json": {
    "state": {},
    "version_major": 1,
    "version_minor": 0
   }
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
