{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "!pip install import_ipynb"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "!pip install xgboost"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "from IPython.core.interactiveshell import InteractiveShell\n",
    "InteractiveShell.ast_node_interactivity = \"all\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import json\n",
    "import csv\n",
    "import urllib.request\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "import pymysql\n",
    "from io import BufferedReader\n",
    "import import_ipynb\n",
    "import ML_Portfolio_All_statemens\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "\n",
    "# THIS CODE NEEDS TO BE RUN BEFORE MAKING THE SQL CONNECTION\n",
    "\n",
    "pymysql.converters.encoders[np.float64] = pymysql.converters.escape_float\n",
    "pymysql.converters.conversions = pymysql.converters.encoders.copy()\n",
    "pymysql.converters.conversions.update(pymysql.converters.decoders)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "As we see in the previous file, we faced overfitting issue in DNN. Also due to many features, accuracy could be less for random forest model. So we will do the PCA analysis to decide how many features are important or explaining 95% of variation of the data."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "21816"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "18"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn.decomposition import PCA\n",
    "\n",
    "pca=PCA(0.95).fit(ML_Portfolio_All_statemens.X_scaledtr)\n",
    "x_train=pca.transform(ML_Portfolio_All_statemens.X_scaledtr)\n",
    "x_test=pca.transform(ML_Portfolio_All_statemens.X_scaledte)\n",
    "len(x_train)\n",
    "pca.n_components_"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "As pca component give the count for the important components, we will take 18 top important features from the random forest which we calculated and shown in ML_portfolio_Al_Statement.ipynb file."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "On basis of these top 18 features we will build our random forest model again. We will recheck the accuracy. \n",
    "Also we will re-run the DNN to check whether we could reduce the overfitting issue with increase accuracy in the DNN model."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>EPS_DILUTED</th>\n",
       "      <th>EBITDA</th>\n",
       "      <th>NET_CASH_FLOW</th>\n",
       "      <th>NET_CASH_MARKET_CAP</th>\n",
       "      <th>EBIT</th>\n",
       "      <th>CASH_CASH_EQUIVALENT</th>\n",
       "      <th>OPERATING_INCOME</th>\n",
       "      <th>EPS</th>\n",
       "      <th>REVENUE_GROWTH</th>\n",
       "      <th>FREE_CASH_FLOW</th>\n",
       "      <th>...</th>\n",
       "      <th>CONSOLIDATED_INCOME</th>\n",
       "      <th>EBIT_MARGIN</th>\n",
       "      <th>OPERATING_CASH_FLOW</th>\n",
       "      <th>TOTAL_SHAREHOLDERS_EQUITY</th>\n",
       "      <th>FINANCING_CASH_FLOW</th>\n",
       "      <th>FCF_MARGIN</th>\n",
       "      <th>NET_INCOME_COM</th>\n",
       "      <th>RETAINED_EARNINGS_DEFICIT</th>\n",
       "      <th>recession_prob</th>\n",
       "      <th>treasury_rate</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>-0.180000</td>\n",
       "      <td>-1,018,041.000000</td>\n",
       "      <td>241,076.000000</td>\n",
       "      <td>1.482400</td>\n",
       "      <td>-1,093,626.000000</td>\n",
       "      <td>20,103,502.000000</td>\n",
       "      <td>-1,349,454.000000</td>\n",
       "      <td>-0.180000</td>\n",
       "      <td>-0.126800</td>\n",
       "      <td>227,128.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>-1,394,125.000000</td>\n",
       "      <td>-0.062700</td>\n",
       "      <td>352,086.000000</td>\n",
       "      <td>22,522,439.000000</td>\n",
       "      <td>13,948.000000</td>\n",
       "      <td>0.013000</td>\n",
       "      <td>-1,394,125.000000</td>\n",
       "      <td>7,594,492.000000</td>\n",
       "      <td>0.120000</td>\n",
       "      <td>3.266284</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>1.440000</td>\n",
       "      <td>42,217,000.000000</td>\n",
       "      <td>-10,397,000.000000</td>\n",
       "      <td>-0.660900</td>\n",
       "      <td>27,612,000.000000</td>\n",
       "      <td>100,205,000.000000</td>\n",
       "      <td>37,023,000.000000</td>\n",
       "      <td>1.440000</td>\n",
       "      <td>-0.067300</td>\n",
       "      <td>51,633,000.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>33,030,000.000000</td>\n",
       "      <td>0.058800</td>\n",
       "      <td>55,640,000.000000</td>\n",
       "      <td>188,041,000.000000</td>\n",
       "      <td>-52,960,000.000000</td>\n",
       "      <td>0.110000</td>\n",
       "      <td>19,076,000.000000</td>\n",
       "      <td>60,048,000.000000</td>\n",
       "      <td>0.120000</td>\n",
       "      <td>3.266284</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>-0.050000</td>\n",
       "      <td>238,725.000000</td>\n",
       "      <td>232,105.000000</td>\n",
       "      <td>-0.006800</td>\n",
       "      <td>-163,390.000000</td>\n",
       "      <td>2,126,150.000000</td>\n",
       "      <td>-401,390.000000</td>\n",
       "      <td>-0.050000</td>\n",
       "      <td>-0.090500</td>\n",
       "      <td>-487,406.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>-743,441.000000</td>\n",
       "      <td>-0.016700</td>\n",
       "      <td>788,313.000000</td>\n",
       "      <td>11,067,483.000000</td>\n",
       "      <td>462,155.000000</td>\n",
       "      <td>-0.049800</td>\n",
       "      <td>-743,441.000000</td>\n",
       "      <td>-5,734,591.000000</td>\n",
       "      <td>0.120000</td>\n",
       "      <td>3.266284</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>6.720000</td>\n",
       "      <td>24,044,419.000000</td>\n",
       "      <td>-798,360.000000</td>\n",
       "      <td>0.124300</td>\n",
       "      <td>22,032,207.000000</td>\n",
       "      <td>14,404,500.000000</td>\n",
       "      <td>20,926,323.000000</td>\n",
       "      <td>6.720000</td>\n",
       "      <td>0.422600</td>\n",
       "      <td>2,886,297.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>16,404,600.000000</td>\n",
       "      <td>0.371900</td>\n",
       "      <td>3,341,294.000000</td>\n",
       "      <td>61,445,968.000000</td>\n",
       "      <td>1,963,035.000000</td>\n",
       "      <td>0.048700</td>\n",
       "      <td>15,188,773.000000</td>\n",
       "      <td>30,237,707.000000</td>\n",
       "      <td>0.120000</td>\n",
       "      <td>3.266284</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>-1.140000</td>\n",
       "      <td>-1,728,000.000000</td>\n",
       "      <td>-35,000.000000</td>\n",
       "      <td>-0.104000</td>\n",
       "      <td>-2,087,000.000000</td>\n",
       "      <td>155,000.000000</td>\n",
       "      <td>-2,729,000.000000</td>\n",
       "      <td>-1.140000</td>\n",
       "      <td>0.151100</td>\n",
       "      <td>285,000.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>-2,210,000.000000</td>\n",
       "      <td>-0.509300</td>\n",
       "      <td>736,000.000000</td>\n",
       "      <td>18,282,000.000000</td>\n",
       "      <td>52,000.000000</td>\n",
       "      <td>0.069500</td>\n",
       "      <td>-2,210,000.000000</td>\n",
       "      <td>-40,577,000.000000</td>\n",
       "      <td>0.120000</td>\n",
       "      <td>3.266284</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 23 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "   EPS_DILUTED            EBITDA      NET_CASH_FLOW  NET_CASH_MARKET_CAP  \\\n",
       "0    -0.180000 -1,018,041.000000     241,076.000000             1.482400   \n",
       "1     1.440000 42,217,000.000000 -10,397,000.000000            -0.660900   \n",
       "2    -0.050000    238,725.000000     232,105.000000            -0.006800   \n",
       "3     6.720000 24,044,419.000000    -798,360.000000             0.124300   \n",
       "4    -1.140000 -1,728,000.000000     -35,000.000000            -0.104000   \n",
       "\n",
       "               EBIT  CASH_CASH_EQUIVALENT  OPERATING_INCOME       EPS  \\\n",
       "0 -1,093,626.000000     20,103,502.000000 -1,349,454.000000 -0.180000   \n",
       "1 27,612,000.000000    100,205,000.000000 37,023,000.000000  1.440000   \n",
       "2   -163,390.000000      2,126,150.000000   -401,390.000000 -0.050000   \n",
       "3 22,032,207.000000     14,404,500.000000 20,926,323.000000  6.720000   \n",
       "4 -2,087,000.000000        155,000.000000 -2,729,000.000000 -1.140000   \n",
       "\n",
       "   REVENUE_GROWTH    FREE_CASH_FLOW      ...        CONSOLIDATED_INCOME  \\\n",
       "0       -0.126800    227,128.000000      ...          -1,394,125.000000   \n",
       "1       -0.067300 51,633,000.000000      ...          33,030,000.000000   \n",
       "2       -0.090500   -487,406.000000      ...            -743,441.000000   \n",
       "3        0.422600  2,886,297.000000      ...          16,404,600.000000   \n",
       "4        0.151100    285,000.000000      ...          -2,210,000.000000   \n",
       "\n",
       "   EBIT_MARGIN  OPERATING_CASH_FLOW  TOTAL_SHAREHOLDERS_EQUITY  \\\n",
       "0    -0.062700       352,086.000000          22,522,439.000000   \n",
       "1     0.058800    55,640,000.000000         188,041,000.000000   \n",
       "2    -0.016700       788,313.000000          11,067,483.000000   \n",
       "3     0.371900     3,341,294.000000          61,445,968.000000   \n",
       "4    -0.509300       736,000.000000          18,282,000.000000   \n",
       "\n",
       "   FINANCING_CASH_FLOW  FCF_MARGIN    NET_INCOME_COM  \\\n",
       "0        13,948.000000    0.013000 -1,394,125.000000   \n",
       "1   -52,960,000.000000    0.110000 19,076,000.000000   \n",
       "2       462,155.000000   -0.049800   -743,441.000000   \n",
       "3     1,963,035.000000    0.048700 15,188,773.000000   \n",
       "4        52,000.000000    0.069500 -2,210,000.000000   \n",
       "\n",
       "   RETAINED_EARNINGS_DEFICIT  recession_prob  treasury_rate  \n",
       "0           7,594,492.000000        0.120000       3.266284  \n",
       "1          60,048,000.000000        0.120000       3.266284  \n",
       "2          -5,734,591.000000        0.120000       3.266284  \n",
       "3          30,237,707.000000        0.120000       3.266284  \n",
       "4         -40,577,000.000000        0.120000       3.266284  \n",
       "\n",
       "[5 rows x 23 columns]"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn.model_selection import train_test_split\n",
    "\n",
    "\n",
    "X = ML_Portfolio_All_statemens.dfmerge_param[['EPS_DILUTED','EBITDA','NET_CASH_FLOW','NET_CASH_MARKET_CAP','EBIT','CASH_CASH_EQUIVALENT',\n",
    "                                        'OPERATING_INCOME','EPS','REVENUE_GROWTH','FREE_CASH_FLOW','ISSUANCE_SHARES_BUYBACKS'\n",
    "                                        ,'PROFIT_MARGIN','NET_INCOME','CONSOLIDATED_INCOME','EBIT_MARGIN', 'OPERATING_CASH_FLOW',\n",
    "                                         'TOTAL_SHAREHOLDERS_EQUITY','FINANCING_CASH_FLOW','FCF_MARGIN','NET_INCOME_COM',\n",
    "                                              'RETAINED_EARNINGS_DEFICIT','recession_prob','treasury_rate']].copy()\n",
    "X.head()\n",
    "Y = ML_Portfolio_All_statemens.dfmerge_param['TREND'] #target\n",
    "X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.30)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',\n",
       "            max_depth=None, max_features='auto', max_leaf_nodes=None,\n",
       "            min_impurity_decrease=0.0, min_impurity_split=None,\n",
       "            min_samples_leaf=1, min_samples_split=10,\n",
       "            min_weight_fraction_leaf=0.0, n_estimators=100, n_jobs=-1,\n",
       "            oob_score=True, random_state=1, verbose=0, warm_start=False)"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "0.9786853685368537"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "oob score: 72.37 %\n"
     ]
    }
   ],
   "source": [
    "from sklearn.ensemble import RandomForestClassifier\n",
    "\n",
    "random_forest = RandomForestClassifier(criterion = \"gini\", \n",
    "                                       min_samples_leaf = 1, \n",
    "                                       min_samples_split = 10,   \n",
    "                                       n_estimators=100, \n",
    "                                       max_features='auto', \n",
    "                                       oob_score=True, \n",
    "                                       random_state=1, \n",
    "                                       n_jobs=-1)\n",
    "\n",
    "random_forest.fit(X_train, Y_train)\n",
    "#random_forest.fit(x_train, Y_train)\n",
    "\n",
    "Y_prediction = random_forest.predict(X_test)\n",
    "\n",
    "random_forest.score(X_train, Y_train)\n",
    "\n",
    "print(\"oob score:\", round(random_forest.oob_score_, 4)*100, \"%\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.7315795102128114\n",
      "             precision    recall  f1-score   support\n",
      "\n",
      "          0       0.74      0.75      0.74      4842\n",
      "          1       0.73      0.71      0.72      4509\n",
      "\n",
      "avg / total       0.73      0.73      0.73      9351\n",
      "\n",
      "[[3621 1221]\n",
      " [1289 3220]]\n"
     ]
    }
   ],
   "source": [
    "from sklearn.metrics import precision_recall_fscore_support,classification_report,confusion_matrix, accuracy_score\n",
    "\n",
    "print(accuracy_score(Y_test,Y_prediction))\n",
    "print(classification_report(Y_test,Y_prediction))\n",
    "print(confusion_matrix(Y_test,Y_prediction))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\Sudip\\Anaconda3\\lib\\site-packages\\sklearn\\preprocessing\\label.py:151: DeprecationWarning: The truth value of an empty array is ambiguous. Returning False, but in future this will result in an error. Use `array.size > 0` to check that an array is not empty.\n",
      "  if diff:\n",
      "C:\\Users\\Sudip\\Anaconda3\\lib\\site-packages\\sklearn\\preprocessing\\label.py:151: DeprecationWarning: The truth value of an empty array is ambiguous. Returning False, but in future this will result in an error. Use `array.size > 0` to check that an array is not empty.\n",
      "  if diff:\n"
     ]
    }
   ],
   "source": [
    "import xgboost as xgb\n",
    "from sklearn.metrics import mean_squared_error\n",
    "\n",
    "xgbclassifier = xgb.XGBClassifier(objective ='reg:logistic', learning_rate = 0.009,\n",
    "                max_depth = 10, alpha = 10, n_estimators = 50, random_state=1)\n",
    "\n",
    "model_xgb = xgbclassifier.fit(X_train,Y_train)\n",
    "xgb_pred = xgbclassifier.predict(X_test)\n",
    "\n",
    "acc_xgb = round(xgbclassifier.score(X_train, Y_train) * 100, 2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.722703454176024\n",
      "             precision    recall  f1-score   support\n",
      "\n",
      "          0       0.72      0.75      0.74      4842\n",
      "          1       0.72      0.69      0.71      4509\n",
      "\n",
      "avg / total       0.72      0.72      0.72      9351\n",
      "\n",
      "[[3655 1187]\n",
      " [1406 3103]]\n"
     ]
    }
   ],
   "source": [
    "from sklearn.metrics import precision_recall_fscore_support,classification_report,confusion_matrix, accuracy_score\n",
    "\n",
    "print(accuracy_score(Y_test,xgb_pred))\n",
    "print(classification_report(Y_test,xgb_pred))\n",
    "print(confusion_matrix(Y_test,xgb_pred))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 41,
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.preprocessing import StandardScaler\n",
    "\n",
    "scaler_tr = StandardScaler().fit(X_train)\n",
    "X_scaledtr = scaler_tr.transform(X_train)\n",
    "\n",
    "scaler_te = StandardScaler().fit(X_test)\n",
    "X_scaledte = scaler_te.transform(X_test)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train on 21816 samples, validate on 9351 samples\n",
      "Epoch 1/200\n",
      "21816/21816 [==============================] - 3s 140us/step - loss: 4.0188 - acc: 0.5750 - val_loss: 3.5046 - val_acc: 0.6073\n",
      "Epoch 2/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 3.1113 - acc: 0.6190 - val_loss: 2.7154 - val_acc: 0.6113\n",
      "Epoch 3/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 2.4196 - acc: 0.6399 - val_loss: 2.1243 - val_acc: 0.6365\n",
      "Epoch 4/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 1.9039 - acc: 0.6645 - val_loss: 1.6846 - val_acc: 0.6629\n",
      "Epoch 5/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 1.5192 - acc: 0.6718 - val_loss: 1.3609 - val_acc: 0.6654\n",
      "Epoch 6/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 1.2389 - acc: 0.6746 - val_loss: 1.1294 - val_acc: 0.6729\n",
      "Epoch 7/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 1.0402 - acc: 0.6767 - val_loss: 0.9679 - val_acc: 0.6729\n",
      "Epoch 8/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.9003 - acc: 0.6778 - val_loss: 0.8534 - val_acc: 0.6715\n",
      "Epoch 9/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.8057 - acc: 0.6780 - val_loss: 0.7757 - val_acc: 0.6724\n",
      "Epoch 10/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.7391 - acc: 0.6819 - val_loss: 0.7237 - val_acc: 0.6839\n",
      "Epoch 11/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6936 - acc: 0.6833 - val_loss: 0.6834 - val_acc: 0.6801\n",
      "Epoch 12/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6629 - acc: 0.6867 - val_loss: 0.6685 - val_acc: 0.6790\n",
      "Epoch 13/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6432 - acc: 0.6867 - val_loss: 0.6485 - val_acc: 0.6812\n",
      "Epoch 14/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6299 - acc: 0.6890 - val_loss: 0.6407 - val_acc: 0.6806\n",
      "Epoch 15/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6222 - acc: 0.6879 - val_loss: 0.6354 - val_acc: 0.6799\n",
      "Epoch 16/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6224 - acc: 0.6820 - val_loss: 0.6264 - val_acc: 0.6747\n",
      "Epoch 17/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6139 - acc: 0.6865 - val_loss: 0.6287 - val_acc: 0.6804\n",
      "Epoch 18/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6100 - acc: 0.6883 - val_loss: 0.6218 - val_acc: 0.6823\n",
      "Epoch 19/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6089 - acc: 0.6887 - val_loss: 0.6404 - val_acc: 0.6769\n",
      "Epoch 20/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6075 - acc: 0.6876 - val_loss: 0.6318 - val_acc: 0.6802\n",
      "Epoch 21/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6069 - acc: 0.6902 - val_loss: 0.6245 - val_acc: 0.6823\n",
      "Epoch 22/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6033 - acc: 0.6927 - val_loss: 0.6203 - val_acc: 0.6833\n",
      "Epoch 23/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6036 - acc: 0.6924 - val_loss: 0.6184 - val_acc: 0.6801\n",
      "Epoch 24/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6058 - acc: 0.6893 - val_loss: 0.6280 - val_acc: 0.6784\n",
      "Epoch 25/200\n",
      "21816/21816 [==============================] - 0s 15us/step - loss: 0.6034 - acc: 0.6937 - val_loss: 0.6192 - val_acc: 0.6822\n",
      "Epoch 26/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6041 - acc: 0.6918 - val_loss: 0.6238 - val_acc: 0.6795\n",
      "Epoch 27/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6026 - acc: 0.6891 - val_loss: 0.6179 - val_acc: 0.6829\n",
      "Epoch 28/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6010 - acc: 0.6950 - val_loss: 0.6229 - val_acc: 0.6839\n",
      "Epoch 29/200\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.6021 - acc: 0.6936 - val_loss: 0.6156 - val_acc: 0.6842\n",
      "Epoch 30/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6001 - acc: 0.6923 - val_loss: 0.6338 - val_acc: 0.6799\n",
      "Epoch 31/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5997 - acc: 0.6911 - val_loss: 0.6308 - val_acc: 0.6820\n",
      "Epoch 32/200\n",
      "21816/21816 [==============================] - 0s 21us/step - loss: 0.6001 - acc: 0.6941 - val_loss: 0.6293 - val_acc: 0.6838\n",
      "Epoch 33/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5971 - acc: 0.6970 - val_loss: 0.6140 - val_acc: 0.6851\n",
      "Epoch 34/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5987 - acc: 0.6937 - val_loss: 0.6336 - val_acc: 0.6836\n",
      "Epoch 35/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5984 - acc: 0.6948 - val_loss: 0.6316 - val_acc: 0.6827\n",
      "Epoch 36/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5984 - acc: 0.6954 - val_loss: 0.6636 - val_acc: 0.6804\n",
      "Epoch 37/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5995 - acc: 0.6931 - val_loss: 0.6421 - val_acc: 0.6833\n",
      "Epoch 38/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5991 - acc: 0.6941 - val_loss: 0.6225 - val_acc: 0.6851\n",
      "Epoch 39/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6003 - acc: 0.6926 - val_loss: 0.6187 - val_acc: 0.6852\n",
      "Epoch 40/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5983 - acc: 0.6938 - val_loss: 0.6304 - val_acc: 0.6843\n",
      "Epoch 41/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5986 - acc: 0.6924 - val_loss: 0.6499 - val_acc: 0.6831\n",
      "Epoch 42/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5998 - acc: 0.6930 - val_loss: 0.6434 - val_acc: 0.6835\n",
      "Epoch 43/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6000 - acc: 0.6915 - val_loss: 0.6458 - val_acc: 0.6825\n",
      "Epoch 44/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5975 - acc: 0.6964 - val_loss: 0.6207 - val_acc: 0.6845\n",
      "Epoch 45/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5972 - acc: 0.6978 - val_loss: 0.6296 - val_acc: 0.6832\n",
      "Epoch 46/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5945 - acc: 0.7006 - val_loss: 0.6317 - val_acc: 0.6840\n",
      "Epoch 47/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5955 - acc: 0.6975 - val_loss: 0.6522 - val_acc: 0.6822\n",
      "Epoch 48/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5975 - acc: 0.6944 - val_loss: 0.6204 - val_acc: 0.6811\n",
      "Epoch 49/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6089 - acc: 0.6940 - val_loss: 0.6309 - val_acc: 0.6736\n",
      "Epoch 50/200\n",
      "21816/21816 [==============================] - 0s 15us/step - loss: 0.6038 - acc: 0.6964 - val_loss: 0.6393 - val_acc: 0.6773\n",
      "Epoch 51/200\n",
      "21816/21816 [==============================] - 0s 17us/step - loss: 0.5979 - acc: 0.6957 - val_loss: 0.6417 - val_acc: 0.6802\n",
      "Epoch 52/200\n",
      "21816/21816 [==============================] - 0s 16us/step - loss: 0.5945 - acc: 0.7006 - val_loss: 0.6340 - val_acc: 0.6820\n",
      "Epoch 53/200\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5940 - acc: 0.6998 - val_loss: 0.6377 - val_acc: 0.6827\n",
      "Epoch 54/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5939 - acc: 0.6986 - val_loss: 0.6460 - val_acc: 0.6826\n",
      "Epoch 55/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5943 - acc: 0.6994 - val_loss: 0.6356 - val_acc: 0.6836\n",
      "Epoch 56/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5953 - acc: 0.6965 - val_loss: 0.6287 - val_acc: 0.6842\n",
      "Epoch 57/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5938 - acc: 0.6991 - val_loss: 0.6301 - val_acc: 0.6846\n",
      "Epoch 58/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5922 - acc: 0.6998 - val_loss: 0.6423 - val_acc: 0.6837\n",
      "Epoch 59/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5927 - acc: 0.7020 - val_loss: 0.6336 - val_acc: 0.6844\n",
      "Epoch 60/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5937 - acc: 0.6971 - val_loss: 0.6424 - val_acc: 0.6837\n",
      "Epoch 61/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5946 - acc: 0.6991 - val_loss: 0.6537 - val_acc: 0.6835\n",
      "Epoch 62/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5935 - acc: 0.6987 - val_loss: 0.6521 - val_acc: 0.6827\n",
      "Epoch 63/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5908 - acc: 0.7010 - val_loss: 0.6658 - val_acc: 0.6822\n",
      "Epoch 64/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5948 - acc: 0.6965 - val_loss: 0.6504 - val_acc: 0.6763\n",
      "Epoch 65/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5949 - acc: 0.7018 - val_loss: 0.6326 - val_acc: 0.6771\n",
      "Epoch 66/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5922 - acc: 0.7009 - val_loss: 0.6310 - val_acc: 0.6814\n",
      "Epoch 67/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5946 - acc: 0.6994 - val_loss: 0.6411 - val_acc: 0.6823\n",
      "Epoch 68/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5912 - acc: 0.7031 - val_loss: 0.6575 - val_acc: 0.6831\n",
      "Epoch 69/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5939 - acc: 0.6972 - val_loss: 0.6273 - val_acc: 0.6852\n",
      "Epoch 70/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5938 - acc: 0.6999 - val_loss: 0.6328 - val_acc: 0.6854\n",
      "Epoch 71/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5925 - acc: 0.6983 - val_loss: 0.6329 - val_acc: 0.6842\n",
      "Epoch 72/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5950 - acc: 0.6973 - val_loss: 0.6377 - val_acc: 0.6841\n",
      "Epoch 73/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5889 - acc: 0.7057 - val_loss: 0.6472 - val_acc: 0.6837\n",
      "Epoch 74/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5925 - acc: 0.7001 - val_loss: 0.6405 - val_acc: 0.6839\n",
      "Epoch 75/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5907 - acc: 0.7023 - val_loss: 0.6609 - val_acc: 0.6825\n",
      "Epoch 76/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5927 - acc: 0.7010 - val_loss: 0.6422 - val_acc: 0.6835\n",
      "Epoch 77/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5913 - acc: 0.7015 - val_loss: 0.6609 - val_acc: 0.6825\n",
      "Epoch 78/200\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5941 - acc: 0.6961 - val_loss: 0.6611 - val_acc: 0.6830\n",
      "Epoch 79/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5906 - acc: 0.7025 - val_loss: 0.6390 - val_acc: 0.6837\n",
      "Epoch 80/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5878 - acc: 0.7044 - val_loss: 0.6622 - val_acc: 0.6829\n",
      "Epoch 81/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5908 - acc: 0.7021 - val_loss: 0.6510 - val_acc: 0.6850\n",
      "Epoch 82/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5951 - acc: 0.7045 - val_loss: 0.6422 - val_acc: 0.6838\n",
      "Epoch 83/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5927 - acc: 0.7026 - val_loss: 0.6578 - val_acc: 0.6837\n",
      "Epoch 84/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5929 - acc: 0.7006 - val_loss: 0.6369 - val_acc: 0.6839\n",
      "Epoch 85/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5893 - acc: 0.7035 - val_loss: 0.6405 - val_acc: 0.6846\n",
      "Epoch 86/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5890 - acc: 0.7041 - val_loss: 0.6515 - val_acc: 0.6837\n",
      "Epoch 87/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5901 - acc: 0.7022 - val_loss: 0.6487 - val_acc: 0.6839\n",
      "Epoch 88/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5871 - acc: 0.7043 - val_loss: 0.6483 - val_acc: 0.6845\n",
      "Epoch 89/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5870 - acc: 0.7071 - val_loss: 0.6516 - val_acc: 0.6835\n",
      "Epoch 90/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5891 - acc: 0.7023 - val_loss: 0.6393 - val_acc: 0.6841\n",
      "Epoch 91/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5912 - acc: 0.7037 - val_loss: 0.6486 - val_acc: 0.6835\n",
      "Epoch 92/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5897 - acc: 0.7028 - val_loss: 0.6313 - val_acc: 0.6848\n",
      "Epoch 93/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5867 - acc: 0.7057 - val_loss: 0.6475 - val_acc: 0.6851\n",
      "Epoch 94/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5896 - acc: 0.7007 - val_loss: 0.6659 - val_acc: 0.6508\n",
      "Epoch 95/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5898 - acc: 0.7005 - val_loss: 0.6470 - val_acc: 0.6847\n",
      "Epoch 96/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5875 - acc: 0.7044 - val_loss: 0.6449 - val_acc: 0.6841\n",
      "Epoch 97/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5857 - acc: 0.7053 - val_loss: 0.6444 - val_acc: 0.6847\n",
      "Epoch 98/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5861 - acc: 0.7040 - val_loss: 0.6490 - val_acc: 0.6842\n",
      "Epoch 99/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5849 - acc: 0.7060 - val_loss: 0.6583 - val_acc: 0.6839\n",
      "Epoch 100/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5861 - acc: 0.7056 - val_loss: 0.6556 - val_acc: 0.6832\n",
      "Epoch 101/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5882 - acc: 0.7043 - val_loss: 0.6521 - val_acc: 0.6845\n",
      "Epoch 102/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5884 - acc: 0.7059 - val_loss: 0.6311 - val_acc: 0.6847\n",
      "Epoch 103/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5871 - acc: 0.7062 - val_loss: 0.6401 - val_acc: 0.6854\n",
      "Epoch 104/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5903 - acc: 0.7004 - val_loss: 0.6459 - val_acc: 0.6846\n",
      "Epoch 105/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5900 - acc: 0.7029 - val_loss: 0.6493 - val_acc: 0.6850\n",
      "Epoch 106/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5882 - acc: 0.7032 - val_loss: 0.6413 - val_acc: 0.6850\n",
      "Epoch 107/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5874 - acc: 0.7048 - val_loss: 0.6429 - val_acc: 0.6851\n",
      "Epoch 108/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5868 - acc: 0.7041 - val_loss: 0.6308 - val_acc: 0.6847\n",
      "Epoch 109/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5849 - acc: 0.7076 - val_loss: 0.6332 - val_acc: 0.6852\n",
      "Epoch 110/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5859 - acc: 0.7021 - val_loss: 0.6237 - val_acc: 0.6860\n",
      "Epoch 111/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5903 - acc: 0.7013 - val_loss: 0.6395 - val_acc: 0.6850\n",
      "Epoch 112/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5902 - acc: 0.7057 - val_loss: 0.6436 - val_acc: 0.6851\n",
      "Epoch 113/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5855 - acc: 0.7049 - val_loss: 0.6542 - val_acc: 0.6850\n",
      "Epoch 114/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5841 - acc: 0.7073 - val_loss: 0.6451 - val_acc: 0.6851\n",
      "Epoch 115/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5873 - acc: 0.7059 - val_loss: 0.6396 - val_acc: 0.6851\n",
      "Epoch 116/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5895 - acc: 0.7014 - val_loss: 0.6273 - val_acc: 0.6855\n",
      "Epoch 117/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5918 - acc: 0.6996 - val_loss: 0.6357 - val_acc: 0.6843\n",
      "Epoch 118/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5869 - acc: 0.7043 - val_loss: 0.6453 - val_acc: 0.6846\n",
      "Epoch 119/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5853 - acc: 0.7044 - val_loss: 0.6562 - val_acc: 0.6838\n",
      "Epoch 120/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5878 - acc: 0.7004 - val_loss: 0.6596 - val_acc: 0.6845\n",
      "Epoch 121/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5858 - acc: 0.7073 - val_loss: 0.6465 - val_acc: 0.6844\n",
      "Epoch 122/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5843 - acc: 0.7054 - val_loss: 0.6438 - val_acc: 0.6853\n",
      "Epoch 123/200\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5837 - acc: 0.7084 - val_loss: 0.6553 - val_acc: 0.6856\n",
      "Epoch 124/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5854 - acc: 0.7059 - val_loss: 0.6687 - val_acc: 0.6504\n",
      "Epoch 125/200\n",
      "21816/21816 [==============================] - 0s 15us/step - loss: 0.5849 - acc: 0.7048 - val_loss: 0.6486 - val_acc: 0.6847\n",
      "Epoch 126/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5869 - acc: 0.7043 - val_loss: 0.6572 - val_acc: 0.6830\n",
      "Epoch 127/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5849 - acc: 0.7065 - val_loss: 0.6410 - val_acc: 0.6830\n",
      "Epoch 128/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5879 - acc: 0.7018 - val_loss: 0.6687 - val_acc: 0.6823\n",
      "Epoch 129/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5838 - acc: 0.7058 - val_loss: 0.6499 - val_acc: 0.6839\n",
      "Epoch 130/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5823 - acc: 0.7085 - val_loss: 0.6703 - val_acc: 0.6766\n",
      "Epoch 131/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5894 - acc: 0.7086 - val_loss: 0.6679 - val_acc: 0.6763\n",
      "Epoch 132/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5890 - acc: 0.7027 - val_loss: 0.6836 - val_acc: 0.6792\n",
      "Epoch 133/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5882 - acc: 0.7034 - val_loss: 0.6540 - val_acc: 0.6838\n",
      "Epoch 134/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5838 - acc: 0.7078 - val_loss: 0.6278 - val_acc: 0.6854\n",
      "Epoch 135/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5854 - acc: 0.7070 - val_loss: 0.6459 - val_acc: 0.6846\n",
      "Epoch 136/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5897 - acc: 0.6993 - val_loss: 0.6440 - val_acc: 0.6856\n",
      "Epoch 137/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5859 - acc: 0.7028 - val_loss: 0.6393 - val_acc: 0.6854\n",
      "Epoch 138/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5825 - acc: 0.7084 - val_loss: 0.6337 - val_acc: 0.6859\n",
      "Epoch 139/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5851 - acc: 0.7063 - val_loss: 0.6288 - val_acc: 0.6856\n",
      "Epoch 140/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5835 - acc: 0.7052 - val_loss: 0.6475 - val_acc: 0.6853\n",
      "Epoch 141/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5841 - acc: 0.7049 - val_loss: 0.6557 - val_acc: 0.6857\n",
      "Epoch 142/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5833 - acc: 0.7078 - val_loss: 0.6419 - val_acc: 0.6852\n",
      "Epoch 143/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5831 - acc: 0.7066 - val_loss: 0.6678 - val_acc: 0.6846\n",
      "Epoch 144/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5819 - acc: 0.7096 - val_loss: 0.6684 - val_acc: 0.6838\n",
      "Epoch 145/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5829 - acc: 0.7065 - val_loss: 0.6435 - val_acc: 0.6856\n",
      "Epoch 146/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5809 - acc: 0.7099 - val_loss: 0.6407 - val_acc: 0.6853\n",
      "Epoch 147/200\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5830 - acc: 0.7081 - val_loss: 0.6565 - val_acc: 0.6852\n",
      "Epoch 148/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5813 - acc: 0.7082 - val_loss: 0.6508 - val_acc: 0.6853\n",
      "Epoch 149/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5814 - acc: 0.7068 - val_loss: 0.6745 - val_acc: 0.6527\n",
      "Epoch 150/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5835 - acc: 0.7045 - val_loss: 0.6325 - val_acc: 0.6844\n",
      "Epoch 151/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5813 - acc: 0.7103 - val_loss: 0.6495 - val_acc: 0.6852\n",
      "Epoch 152/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5858 - acc: 0.7018 - val_loss: 0.6313 - val_acc: 0.6859\n",
      "Epoch 153/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5848 - acc: 0.7038 - val_loss: 0.6448 - val_acc: 0.6852\n",
      "Epoch 154/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5817 - acc: 0.7091 - val_loss: 0.6501 - val_acc: 0.6860\n",
      "Epoch 155/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5832 - acc: 0.7067 - val_loss: 0.6477 - val_acc: 0.6862\n",
      "Epoch 156/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5813 - acc: 0.7070 - val_loss: 0.6536 - val_acc: 0.6856\n",
      "Epoch 157/200\n",
      "21816/21816 [==============================] - 0s 15us/step - loss: 0.5828 - acc: 0.7082 - val_loss: 0.6369 - val_acc: 0.6855\n",
      "Epoch 158/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5834 - acc: 0.7051 - val_loss: 0.6286 - val_acc: 0.6861\n",
      "Epoch 159/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5821 - acc: 0.7059 - val_loss: 0.6458 - val_acc: 0.6855\n",
      "Epoch 160/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5800 - acc: 0.7094 - val_loss: 0.6524 - val_acc: 0.6855\n",
      "Epoch 161/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5793 - acc: 0.7107 - val_loss: 0.6545 - val_acc: 0.6853\n",
      "Epoch 162/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5826 - acc: 0.7067 - val_loss: 0.6448 - val_acc: 0.6854\n",
      "Epoch 163/200\n",
      "21816/21816 [==============================] - 0s 16us/step - loss: 0.5815 - acc: 0.7081 - val_loss: 0.6364 - val_acc: 0.6850\n",
      "Epoch 164/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5791 - acc: 0.7092 - val_loss: 0.6501 - val_acc: 0.6851\n",
      "Epoch 165/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5791 - acc: 0.7107 - val_loss: 0.6367 - val_acc: 0.6855\n",
      "Epoch 166/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5793 - acc: 0.7071 - val_loss: 0.6378 - val_acc: 0.6856\n",
      "Epoch 167/200\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5788 - acc: 0.7100 - val_loss: 0.6593 - val_acc: 0.6854\n",
      "Epoch 168/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5811 - acc: 0.7079 - val_loss: 0.6590 - val_acc: 0.6853\n",
      "Epoch 169/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5817 - acc: 0.7091 - val_loss: 0.6322 - val_acc: 0.6851\n",
      "Epoch 170/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5813 - acc: 0.7091 - val_loss: 0.6282 - val_acc: 0.6861\n",
      "Epoch 171/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5851 - acc: 0.7037 - val_loss: 0.6428 - val_acc: 0.6862\n",
      "Epoch 172/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5790 - acc: 0.7098 - val_loss: 0.6437 - val_acc: 0.6852\n",
      "Epoch 173/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5803 - acc: 0.7075 - val_loss: 0.6455 - val_acc: 0.6856\n",
      "Epoch 174/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5819 - acc: 0.7081 - val_loss: 0.6473 - val_acc: 0.6853\n",
      "Epoch 175/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5806 - acc: 0.7083 - val_loss: 0.6715 - val_acc: 0.6836\n",
      "Epoch 176/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5860 - acc: 0.7014 - val_loss: 0.6816 - val_acc: 0.6482\n",
      "Epoch 177/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5804 - acc: 0.7098 - val_loss: 0.6672 - val_acc: 0.6851\n",
      "Epoch 178/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5816 - acc: 0.7064 - val_loss: 0.6556 - val_acc: 0.6857\n",
      "Epoch 179/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5829 - acc: 0.7054 - val_loss: 0.6509 - val_acc: 0.6850\n",
      "Epoch 180/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5815 - acc: 0.7072 - val_loss: 0.6643 - val_acc: 0.6854\n",
      "Epoch 181/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5789 - acc: 0.7104 - val_loss: 0.6476 - val_acc: 0.6858\n",
      "Epoch 182/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5838 - acc: 0.7043 - val_loss: 0.6742 - val_acc: 0.6855\n",
      "Epoch 183/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5845 - acc: 0.7051 - val_loss: 0.6396 - val_acc: 0.6858\n",
      "Epoch 184/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5793 - acc: 0.7084 - val_loss: 0.6362 - val_acc: 0.6856\n",
      "Epoch 185/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5791 - acc: 0.7094 - val_loss: 0.6663 - val_acc: 0.6852\n",
      "Epoch 186/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5774 - acc: 0.7110 - val_loss: 0.6713 - val_acc: 0.6852\n",
      "Epoch 187/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5823 - acc: 0.7091 - val_loss: 0.6590 - val_acc: 0.6851\n",
      "Epoch 188/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5792 - acc: 0.7099 - val_loss: 0.6446 - val_acc: 0.6850\n",
      "Epoch 189/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5815 - acc: 0.7065 - val_loss: 0.6360 - val_acc: 0.6851\n",
      "Epoch 190/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5801 - acc: 0.7090 - val_loss: 0.6507 - val_acc: 0.6852\n",
      "Epoch 191/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5797 - acc: 0.7084 - val_loss: 0.6400 - val_acc: 0.6861\n",
      "Epoch 192/200\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5775 - acc: 0.7087 - val_loss: 0.6711 - val_acc: 0.6838\n",
      "Epoch 193/200\n",
      "21816/21816 [==============================] - 0s 17us/step - loss: 0.5788 - acc: 0.7106 - val_loss: 0.6459 - val_acc: 0.6860\n",
      "Epoch 194/200\n",
      "21816/21816 [==============================] - 0s 17us/step - loss: 0.5802 - acc: 0.7083 - val_loss: 0.6641 - val_acc: 0.6860\n",
      "Epoch 195/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5780 - acc: 0.7121 - val_loss: 0.6421 - val_acc: 0.6868\n",
      "Epoch 196/200\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5781 - acc: 0.7100 - val_loss: 0.6584 - val_acc: 0.6853\n",
      "Epoch 197/200\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5787 - acc: 0.7103 - val_loss: 0.6553 - val_acc: 0.6855\n",
      "Epoch 198/200\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5776 - acc: 0.7104 - val_loss: 0.6440 - val_acc: 0.6862\n",
      "Epoch 199/200\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5768 - acc: 0.7115 - val_loss: 0.6724 - val_acc: 0.6857\n",
      "Epoch 200/200\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5772 - acc: 0.7104 - val_loss: 0.6428 - val_acc: 0.6866\n"
     ]
    }
   ],
   "source": [
    "import keras\n",
    "from keras import layers\n",
    "from keras import regularizers\n",
    "from sklearn.model_selection import KFold\n",
    "\n",
    "model3=keras.Sequential()\n",
    "model3.add(keras.layers.Dense(100, input_dim=23, activation='relu',kernel_regularizer=regularizers.l2(0.1)))\n",
    "model3.add(keras.layers.Dense(50, activation='relu'))\n",
    "model3.add(keras.layers.Dense(25, activation='relu'))\n",
    "model3.add(keras.layers.Dense(12, activation='relu'))\n",
    "model3.add(keras.layers.Dense(6, activation='relu'))\n",
    "model3.add(keras.layers.Dense(3, activation='relu'))\n",
    "model3.add(keras.layers.Dense(1, activation='sigmoid'))\n",
    "model3.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])\n",
    "history=model3.fit(X_scaledtr, Y_train, epochs=200, batch_size=1000, validation_data=(X_scaledte, Y_test))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<Figure size 1080x432 with 0 Axes>"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "<matplotlib.axes._subplots.AxesSubplot at 0x182844aac50>"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "[<matplotlib.lines.Line2D at 0x182804f4e48>]"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "[<matplotlib.lines.Line2D at 0x1828721a048>]"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "Text(0.5,1,'Model accuracy')"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "Text(0,0.5,'Accuracy')"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "Text(0.5,0,'Epoch')"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "<matplotlib.legend.Legend at 0x182872019b0>"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "<matplotlib.axes._subplots.AxesSubplot at 0x18287201a90>"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "[<matplotlib.lines.Line2D at 0x182891c2940>]"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "[<matplotlib.lines.Line2D at 0x182891eca90>]"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "Text(0.5,1,'Model loss')"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "Text(0,0.5,'Loss')"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "Text(0.5,0,'Epoch')"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "<matplotlib.legend.Legend at 0x182891db4e0>"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAA4IAAAGDCAYAAAB+yq7tAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAAIABJREFUeJzs3Xl4VNX5wPHvmclk3/eErAQIWyBAWEQUtO7V4oKK1rpUa63V2trW2l8XrW3VLlrX1lqLW913RXAHZZGdsCcQIAnZyL6vM3N+f5yZyR7WAIb38zx5mLn33HvPTKJ33nnfc47SWiOEEEIIIYQQ4uRhOd4dEEIIIYQQQghxbEkgKIQQQgghhBAnGQkEhRBCCCGEEOIkI4GgEEIIIYQQQpxkJBAUQgghhBBCiJOMBIJCCCGEEEIIcZKRQFCIQaSUSlFKaaWU10G0vV4ptfxY9EsIIYT4Jjta99dDOY8QQ40EgkK4KKXylVLtSqnIHtuzXTeJlOPTMyGEEOKbS+6vQpyYJBAUoru9wFXuJ0qpDMDv+HXnxCDflAohhDhCcn8V4gQjgaAQ3b0IXNvl+XXAC10bKKVClFIvKKUqlFIFSqnfKqUsrn1WpdTflVKVSqk9wLf7OPa/SqlSpVSxUupPSinrwXRMKfWGUqpMKVWnlPpKKTWuyz4/pdRDrv7UKaWWK6X8XPtmKaVWKqVqlVL7lFLXu7YvVUrd1OUc3UpnXN/S/lgptQvY5dr2qOsc9Uqp9Uqp07q0tyql/k8ptVsp1eDan6iUelIp9VCP1/KBUuqnB/O6hRBCDAkn7P21x3nilVLvK6WqlVJ5SqkfdNk3TSm1znUP3K+Ueti13Vcp9T+lVJXrXrtWKRVzqNcW4liTQFCI7lYBwUqpMa4byJXA/3q0eRwIAYYDszE3thtc+34AXAhMArKAeT2OfR6wAyNcbc4BbuLgLAZGAtHABuClLvv+DkwBZgLhwF2AUymV5DrucSAKyASyD/J6ABcD04GxrudrXecIB14G3lBK+br23Yn5tvcCIBj4PtDses1XdbmZRwLfAl45hH4IIYT4ZjuR769dvQIUAfGua9yvlPqWa9+jwKNa62AgDXjdtf06V78TgQjgFqDlMK4txDElgaAQvbm/tTwbyAGK3Tu63Lx+rbVu0FrnAw8B33M1uQJ4RGu9T2tdDTzQ5dgY4Hzgp1rrJq11OfAPYP7BdEprvcB1zTbgXmCi6xtQCyboukNrXay1dmitV7rafRf4TGv9ita6Q2tdpbU+lEDwAa11tda6xdWH/7nOYddaPwT4AOmutjcBv9Va52pjk6vtGqAOE/zher1Ltdb7D6EfQgghvvlOyPtrl/MkArOAX2mtW133y2e69KEDGKGUitRaN2qtV3XZHgGMcN2D12ut6w/l2kIcDzLuR4jeXgS+AlLpUbYCRALeQEGXbQXAMNfjeGBfj31uyYANKFVKubdZerTvk+sG+Wfgckxmz9mlPz6AL7C7j0MT+9l+sLr1TSn1c0zAFw9oTObPPfh/oGs9D1wDfOr699Ej6JMQQohvphPu/tpDPFCttW7ocZ0s1+MbgfuAHKXUXuAPWuuFrteVCLyqlArFZDp/o7XuOMTrC3FMSUZQiB601gWYQe0XAG/32F2J+eYvucu2JDq/1SzF3Ay67nPbB7QBkVrrUNdPsNZ6HAd2NTAXOAtTfpLi2q5cfWrFlKn0tK+f7QBNgH+X57F9tNHuB67xgL/CfCsbprUOxWT63Hfdga71P2CuUmoiMAZ4t592QgghhqgT9P7aVQkQrpQK6qsPWutdWuurMEM0/gK8qZQKcFXc/EFrPRYzRONCuo+HFOKEJIGgEH27EThTa93UdaPW2oEZE/BnpVSQUioZMzbOPc7hdeAnSqkEpVQYcHeXY0uBT4CHlFLBSimLUipNKTX7IPoThLnJVWGCt/u7nNcJLAAedg1ytyqlTlFK+WDGEZ6llLpCKeWllIpQSmW6Ds0GLlVK+SulRrhe84H6YAcqAC+l1O8xGUG3Z4A/KqVGKmOCUirC1ccizPjCF4G33KWmQgghTjon2v21ax/2ASuBB1wTwExw9fclAKXUNUqpKNd9t9Z1mEMpdYZSKsNVvVOPCWgdh3JtIY4HCQSF6IPWerfWel0/u2/HZNP2AMsxk6YscO37D/AxsAkzoUvPbzyvxZS+bAdqgDeBuIPo0guY8pRi17Greuz/BbAFE2xVY76ptGitCzHfvP7ctT0bmOg65h9AO7AfU7r5EgP7GDPxzE5XX1rpXnbzMOZG/QnmRvhfuk8N/jyQgQkGhRBCnIROwPtrT1dhqm5KgHeAe7TWn7r2nQdsU0o1YoY4zNdat2Iqat7E3Pt2AF/SeyIcIU44Smt94FZCCHGElFKnY26MKa5vU4UQQgghxHEiGUEhxKBTStmAO4BnJAgUQgghhDj+JBAUQgwqpdQYzFiKOOCR49wdIYQQQgiBlIYKIYQQQgghxElHMoJCCCGEEEIIcZKRQFAIIYQQQgghTjJex7sDR0tkZKROSUk53t0QQghxDKxfv75Sax11vPvxTSH3SCGEODkcyv1xyASCKSkprFvX37I0QgghhhKlVMHx7sM3idwjhRDi5HAo90cpDRVCCCGEEEKIk4wEgkIIIYQQQghxkpFAUAghhBBCCCFOMkNmjKAQQgghhBDi5NTR0UFRURGtra3HuyvHhK+vLwkJCdhstsM+hwSCQgghhBBCiG+0oqIigoKCSElJQSl1vLszqLTWVFVVUVRURGpq6mGfR0pDhRBCCCGEEN9ora2tREREDPkgEEApRURExBFnPyUQFEIIIYQQQnzjnQxBoNvReK0SCAohhBBCCCHEEaiqqiIzM5PMzExiY2MZNmyY53l7e/tBneOGG24gNzd3kHvaScYICiGEEEIIIcQRiIiIIDs7G4B7772XwMBAfvGLX3Rro7VGa43F0ncu7tlnnx30fnYlGUEhhBBCCCGEGAR5eXmMHz+eW265hcmTJ1NaWsrNN99MVlYW48aN47777vO0nTVrFtnZ2djtdkJDQ7n77ruZOHEip5xyCuXl5Ue9b5IRFEIIIYQQQgwZf/hgG9tL6o/qOcfGB3PPReMO69jt27fz7LPP8tRTTwHw4IMPEh4ejt1u54wzzmDevHmMHTu22zF1dXXMnj2bBx98kDvvvJMFCxZw9913H/Hr6EoygkIIIY6K0roWapsPbhyEGJhSyqqU2qiUWtjHPh+l1GtKqTyl1GqlVMpg96ewqpkvcvbjdOrBvpQQQgw5aWlpTJ061fP8lVdeYfLkyUyePJkdO3awffv2Xsf4+flx/vnnAzBlyhTy8/OPer8kIyiEEOKouPa/a4gN8eXFG6f32tfhcOJlUSfVjG5H6A5gBxDcx74bgRqt9Qil1HzgL8CVg9mZRVtLeXBxDjvuOw8/b+tgXkoIIY7Y4WbuBktAQIDn8a5du3j00UdZs2YNoaGhXHPNNX0uA+Ht7e15bLVasdvtR71fkhEUQghxxGqb29lV3siyXZXsq27utq+uuYNz/vEVt72yEa0lo3QgSqkE4NvAM/00mQs873r8JvAtNcgRtpfFnL7d4RzMywghxJBXX19PUFAQwcHBlJaW8vHHHx+3vkggKIQQg6iqsY3WDkef+/LKG2hsO/rf8B2uRVtKufyplf32ty/utpuK6jzb3tpQ5HmsteZXb21mb2UTH24u5cVVBUevw0PXI8BdQH9R1zBgH4DW2g7UARE9GymlblZKrVNKrauoqDiiDnl7mY8LHRIICiHEEZk8eTJjx45l/Pjx/OAHP+DUU089bn2R0lAhhOiD1pqtxfVkJIQc9jnK61s56+EvuSIrkd9e2H0Q+L7qZi54dDnXn5rC/10w5ki7e0BOp+b1dfu4aGI8AT69/9ffZnfwp4XbKalr5cPNpVw2JeGA51yaW87NL6zng9tnsWlfLUpBZmIob6wr4nszkvlyZwVf7qzgo21l3H3+aFbvqeJPH+4gKzmcsfF9VTwKpdSFQLnWer1Sak5/zfrY1ivVqrV+GngaICsr64hSsTarCQTtDsnoCiHEgdx7772exyNGjPAsKwFmIfgXX3yxz+OWL1/ueVxbW+t5PH/+fObPn3/U+ykZQSHESemZZXvYUdr/jGJLcsu56Inl5JQd/qxjf1i4nfpWO2sLanrte/yLXbQ7nKzeWw3A8l2V3P7KRhwDTMaxr7qZ8obe4wi6Kqlt6bP8ctXeKu5+ewsfbint87hX1+yjpK6VYF8vXlxVgN3h5PHPdw34+p9Ztpd2h5N3NhazaV8taVGBXD8zheLaFrL+/Bl3vr6JL3aUc/X0JG4+bTh/v3wiKRH+VDS2DfgaTnKnAt9RSuUDrwJnKqX+16NNEZAIoJTyAkKA6sHslLs0VDKCQggxdEggKIQ4KXy2fT/z/rWSdruTsrpW/vThDv64sPcsXW65ZY0AFFW3HNb1luSU8+HmUiICvNlRWt/tA/Teyibe2lBMgLeV7SV1tHY4eOHrfD7YVMLyvMp+z3nrSxu47aWN/e6vbGxj9t+W8PaG4l77VuZVAfQavwfQ3G7niSV5TEsN52dnjyJ7Xy03PLeWhz7dybPL8/u81u6KRpbnVWKzKj7YVMKmolomJoRy7rhYLpwQxw9PT+OD22aRfc853H9JBhaLIiLQh4/uOJ3Zo6L6fQ0nO631r7XWCVrrFGA+8IXW+poezd4HrnM9nudqM6ipOndpqIwRFEKIoUMCQSHESeHN9UWsK6hhU1Eta/NN8mTl7qp+s4KF1U0A3bJXbXYHNz63lo2FvTN8PS1YsZfEcD9+fcEY2u1Odu5vAEzJ6QOLduBttfC7C8fS4dBsKKxh5W4TqL2xbh/tdid3v7WZuU+u4JJ/rqCysQ2tNbsrGlmTX01RTe9gDkyQ1+HQfQaTK3ebbQVV3Y/VWnP3W1uobGzjrnPTuWxKAn42K8t2VRLk48W6gmrPuV9aXUC73QQCL35dgM2q+MU56RTXtlDZ2E5mYgi+NitPXD2Zu88fTUZCCFZL9ypGi0VmDT0cSqn7lFLfcT39LxChlMoD7gSO7sJSfZDSUCGEGHokEBRCDIqS2pZDmnRkXX41f/0oh7vf2syeisaDPm5/fSvN7QNPuOJwak8gtCKvkrX51fh7W/GzWVmwfG+fx+RXmoCpvL4zEMwrb+TznHKeXZE/4PXsDicbCmqYMyqarOQwALYWm8lUnluZzyfb9/Ozs0dy1tgYAP67bC+NbXZSIvz5ZNt+7v1gG6+u3YdFwcbCWtYX1FDV1E5zu3k/P9jUWd7Z2uGgyTXhTEWD6euavd2rBBvb7J7JXAp7ZAT/u3wv728q4RfnpJOVEk6wr43fXTiWX56bzi1z0thd0URNUzt//TiX37yzlblPruB3727l1bWFnD8+jqumJ3myRRMTQwd8X8Sh0Vov1Vpf6Hr8e631+67HrVrry7XWI7TW07TWewa7L+5AUEpDhRBi6JBAUAhx1LV2ODj3H1/xt49zD6q91prbXt7Iv7/aw6tr9/FedslBX+uyf63k/97eMmCbrcV11LfasShTIrlmbzVTksO4bMow3ssu4eInV3D9s2s82S6Agip3RrBzTN6eCrPtsx37aXEFZU6n5s7XsnllTaGnXU5ZA03tDrJSwkiO8CfI14vNRXVsLqrl/kU7OGtMNDfNGk5koA8pEf58nlOO1aJ48LIJtDucvLy6kPlTE3n++9MAU0rqDuC8vSy8l13M1uI6bnh2DRP/8AnfecIMLi93BYLFtS2U1HaWtK7dW43DqUmO8O9WGtra4eDvn+TyrdHR3DonzbP96ulJ/PiMEZ4g9us9VSzJKWdKchgVDa28vm4fZ46O5u7zRxPsa+OM9Ch8vCyMjpUJYIYqL6ssHyGEEEONBIJCnIRqmtr508Lth5SxOxRr86tpaLOzcHMJzgEmP3HbVlJPWX0rD16aQXyIb6+s1a0vrecXb2zC3uNDaENrB0U1LSzaUkZtc3u3fVprLn9qJf9cmucplbxscgIbCmvI3d9AVnI4N5+Wxpi4IGxWxdLcCs/SBq0dDkrqTADozrJBZyDY3O5gaW45YEpA395YzItfdy6LsM5VepqVEo5SioxhIWwuquN3724lzN+bhy7P9JRITk4Kc/0byozhEUxICCElwp/fXTiWYF8bkYE+7K1o8gRwV2QlkFPWwCX/XMGW4npGxgSyu6KJNrvDEwi6fwduK3dX4m21cHHmMKqa2j1LVmwuqqO1w8n8aUl9LvQ+MTEUm1Xx+Bd5NLbZue2MEay8+1tk//4c/vndKcSH+gFm4d7nbpjmyQyKocdbSkOFEGLIkbu2EMeI1prnV+ZTWnd4k48cTQs3l/DM8r2sy+9/rNtLqwu4/ZWNBxXIAazeU8W1C9bQ0u5g2S4TeO2vb2PjvtoDHAlf5JSjFMxJjyY5IsCTjQNTivbp9v28ub6Iu97c3G1WTfd4t3aHk/c3dc8i5pQ1sDa/hoc+2clra/cxJi6YiybGY3dqtIapqWEkRfjz3m2zeP2Hp3DayEge+3wXtc3t3bJmXQPB3RWNxIX4EhnozcLNpeworedvH+fiZ7OyvbSe6iYTjK4tqCE+xJdhrkApIyGELcV1bCqq467zRhPib/Occ7Ir63b6SDOByvM3TOOdW0/1LPEwPDKAvZWdgeAts9MI8vVi1ohIPvnZ6Vx7SgoAZXWtVDS0EuZvI9DHq1sguDyvisnJoYyKCQI6J4xZvacKpWBqSlifvxdfm5Vx8SHsKK0n0MeLmSMi8Pay4Odt7dYuPtSPU9J6LWMnhhApDRVCiIFVVVWRmZlJZmYmsbGxDBs2zPO8vb39wCdwWbBgAWVlZYPY004SCApxjGwvreee97fxypp9x7srrHctZ7C3S8DVldaafy3dzQebSnh7Y+8ZKPvy5c4KvtpZwTsbi/lqZwUTEkKwWRUfbe29XEFNU3u37N7nOeVMTAglKsiH5Aj/bhOa7K5opMOhmZoSxtsbi7stf7C30vQ/xM/G6+u6v69f7jQLaIf5e1NY3cysERFMTQnH22rBy6KYlNgZ/Cil+O23x9LQ2sFjn+eR77p+amRAt8li9lQ2MjImiPPGx7J4aynnP7oMP28r/7hyIgBf765Ca826/GqyUsI9x2UMC/H8e+mkYd36ecboaEbHBnHhxHjT3wBvwgK8PftTIwPY4yoNjQ7yISHMn7W/OYsF108lPMDbE2yW1LZS0dBGbIgfk5JCWbvX/I63Ftexo7Ses8fGkhTuD3QG0Gvyq0mPCSLU35v+uMtDzxwdjY+Xtd92YmiT0lAhhBhYREQE2dnZZGdnc8stt/Czn/3M89zbu//7bE8SCAoxBC3NNYFJXnnDEZ3H4dR9rjVXXt/Km+uL2HAQM1puKDRZuvzKvgPB7H21FNW0EOBt5cHFOTS0dhzwnO4xaU8uySOnrIELMuI4dUQki7eWdVvXrqapndP+uoQXXKWUFQ1tbNpXy7dGRwOQHBHQrXzRPavnPReNA2BvRWef3f3/4ezhbC2uZ4trQhSAL3MrGB0bxMNXTMTLojhrTAx+3lZOSYsgKyWsV1YrPTaIiybG8+b6fexy/Y6yksMorzczdmqt2VvRxPDIAL43I4XpqRH8/OxRfPiT0zhrTAyBPl6s2F1JUU0L++vbumXZZgyPYGxcMH+YO67XrJnDQv346KenkxoZ0Of7mhoVQGVjG9tL60l0BXK+NqunlDPeEwi2UN7QRlSQD9NSwsnd30BhVTPPrczH39vKvCkJnkDQzC7qZH1BDdNTw/u8rttU1/7zxscO2E4MbVIaKoQQh+/5559n2rRpZGZmcuutt+J0OrHb7Xzve98jIyOD8ePH89hjj/Haa6+RnZ3NlVdeeciZxMPhNahnF+IEt72knrAAG3EhfoN+LfeYsl37+54Rc/muSrJSwvC1DZx1+eUbm1iSW84ts9O4bmYKvjYrb64v4hdvbAJgbFwwi+44rd/jKxraPGPwegaCi7eUMjk5jIWbS/G2Wvj397K45r+r+c+yvdx59qgB+1VS24qXRVHsCghPGxlJmL+NX721hdV7q5kx3JQOfrC5hMY2u6dkdInrfTlzjDsQdGetmlxliQ14Wy2Mjg0iIsCbsvrOyVvyq5qJDfbl6mlJ/HfZXu58PZv3bjsVp4Z1BdXcOGs4p4+KYusfzvW8r09cPYn+PspeOjmB97JLeGlVIcG+XoyMCaTN7qShzU5zm4OmdgdpUQGkxwbxys0zuh07PTWclXmVRAb6ADAluTPAigz0GfB3MhB3gLi1uJ5LemQTAeJCfAFXIFjfxqiYIC6eNIynl+3h5hfXsaeyiSuyEgjxM+WoIX42Cqub2VpcR3O7g2mpA5d0nj0mhv9cm+UJ1MXJSUpDhRDfKIvvhrKBJ5I7ZLEZcP6Dh3zY1q1beeedd1i5ciVeXl7cfPPNvPrqq6SlpVFZWcmWLaaftbW1hIaG8vjjj/PEE0+QmZl5dPvfB8kIipPaD/+3jgcW5Qz6deqaO1hfUIOvzcLeyqZeH6Zyyuq55r+reWNd77LRpjY7j3++i/KGVsrrW3l/UwneXhYeWJzDz9/YRJvdwd8/zmViYigXZMSSX9XEQGtLuzOGscG+5Fd1z6796KUNXPrPlby/qYTZ6VHMGhnJ5KRQVu2pOuBrLK5t4dxxsUQF+RAR4M2Y2GAuyIhjWKgfv3xzE/WurOKb64sA2Flmsm6r91QTGejN2Dgz42TP8sUdpWZCFC+rhZhgX/Z3CwSbSIn0J9Tfm0fnTyKvopG73tzMos2ldDi0Z+HyrsF1kK+NYN/OMXpdnZoWQWSgN8W1LaREBhAdZIKsioY2z5IWw6MC+zx25ohI8quaeezzXVw4IY4xcUEHfM8OxvAumUJ3RrArX5uViABv11p+bUQH+ZAY7s+j8zPJ3d9Au93Jda5xhGDe38LqZs8SE9MOkBG0WBRnj42R9f9Ocu7SUAkEhRDi0Hz22WesXbuWrKwsMjMz+fLLL9m9ezcjRowgNzeXO+64g48//piQkJBj3jfJCIqTltOpKa1tJcjn4NesO1zL8ipwarh8SiIvriogv7KJkTGdgYK7bDR3f/eyUbvDyW0vb2BJbgWbi+vITAzF7tS88oMZvL+phEc+24XWmrL6Vv5++UR2VzSyaEsZFY1tniDGLaesHoViQ0ENNqvigow4/reqAIdTY7UotrtKMCsa22i3O7nINWZtdFwwH24uRWvd58ySYMpVy+pbSYn0Z/60RJrbHVgsiiBfG49dlckV/17FXW9s5tYz0thcVEd4gDd7KhvpcDjZUlzLhIRQz7k7M4LuQLCBOekmoIsN8aWsrksgWNnE2a61+GaNjOTnZ4/i75/sZOHmUgK8rUxJ7nsSlP54WS1cNDGeZ1fkkxwRQHSQye5VNLSx25U9HR7Vdwnn7FGR/FnB2WNj+MeVmf2+V4cqKcIfpUBrSAzrO3MdH+rHtpJ67E5NlKvPZ46O4f5LMiipben2t5YU7s/a/Goz3jE60NNeiIG4S0O7LrEihBAnrMPI3A0WrTXf//73+eMf/9hr3+bNm1m8eDGPPfYYb731Fk8//fQx7ZtkBMVJq6a5HbtTHzCD1tUXOfv5wDU7ZXl9K9cuWENhVXOfbbuec0lOBaH+Ni7PSgBgV3n34POrne7xg923/+nDHSzJreCU4RF8un0//1q6m1OGRzA8KpBb54xgRHQgi7aUkZkYyqkjInoFUW5NbXa++5/VXPT4ct7NLmb8sBDSYwNpdzg9Y/tySuuxKHj9h6fww9OHc44rwBodG0RdS0e3ksyyulaeWbaHO17dyLJdFVQ0tOFwauJD/ThtZBTnjuscTzYlOZy7zk3no21lXPrPlXhZFLfOSaPDodleUk9eeSPjh3V+CxbkayMiwJvC6iYqGtqobGxjjCtb2DUjWN/aQVVTOyldMma3nTmShbfP4kdz0vjNt8ce1nIGF2ea8svkcH9PkFTuygj6e1uJDfbt87gR0UF8/vM5PHn1ZE8Z3dHg42UlwRUAJvWREQSID/X1jKXs+gXAVdOS+Pk56d3aJob7U97QRm1zB3+dN+Go9VMMbe6/aftBziIshBDCOOuss3j99deprDQzqldVVVFYWEhFRYVZ6uryy/nDH/7Ahg0bAAgKCqKh4cjmkzhYkhEUJy33bJDN7Q4qGtqI7ucDvlu73cldb26hrcPB2WNjeNs1O+bTy3bzp4szPO201jz15R6eWbaHl34wndTIAD7bsZ8z0qMYGR2EUmacYPsYJ/WtHfjZrJ6p/vPKm7pd7+XVhcybksD9l2RwwWPLyCtv5KrpSYBZWPyBSzO4+YV1/PLcdJRSpESYoKigqpmpXWatfG5lPlVN7YyNC2Z7aT0XTogn2dV2b2UTieH+7ChrIDUygMzEUDITQz3HuhcJzylr8IylvG/hNhZtKUMpaGpz8CPXYuTuiUt6+uHsNCYlhfGXj3JIjw3yLDXwzsZinBomDOteDpEc4U9+ZbMnuBkTazJascG+VDW102Z3UFBpgl33a3YbPyykW2B5qCYkhHDf3HGcOTqaQNcSDqY0tInUyIABM339TfhypFIjA9lX3UJSRN+BYFyIn+cDenTwwBm+6cPD+WhrKU9cPfmI3idxcpHSUCGEODwZGRncc889nHXWWTidTmw2G0899RRWq5Ubb7zRU3H1l7/8BYAbbriBm266CT8/P9asWXNIM44eKgkExUmrvL5zWYC9lU0HDAQ/2lZGpSt4XJJTzsLNJjP49oZi7jpvNMG+NrTW3Pn6Jt5xLbmwYPle5qRHU9fSwSWTE/DztpIY5s/O8gbueHUjX+6s4MdnjPCMZ/tyZwW1ze2E+nuTW9ZAu8PJnPQovL0s/G3eBJ5bmc+542I8fZqaEs66356N1TV+a1iYH1aL6rYOX11LB//+cjffGh3Nv66ZwsurCzg/I86zP7+qidOJIqesnokJnQGgW7qrrDC3rIEz0s2EIdtL6jl/fCzeXhbW7K32ZBWH9RMIghmL9taPZgJmwXaLwrP2X0ZCz0AwgDV7qz3lqu6MYGyIK0NX3+ZZ+uJoB19KKc/afFprbFZFUU0z2ftqOWtMzMAHD5JMBFpTAAAgAElEQVRR0YGsz68mJqjvv9Gu73tU4MCB4Bnp0ZzxS5n4RRwam5SGCiHEQbv33nu7Pb/66qu5+uqre7XbuHFjr21XXHEFV1xxxWB1rRspDRVD1vJdlfz949x+95d3WSg8v5/19Lp68et8klzlgv9cuputxfXMzYynud3BG+vMBCh7Kpt4Z2MxN81K5appibyXXcJzK/KJCfZh1ohIAEbFBPJVbgWLt5bRZnd6FiS/aloi0Fkeml1kZtV0B2eTksJ4dP6kXmu5WbtM4mGzWhgW6kd+VTN2h5Mnl+RxyZMrqG+187OzR+HtZeH6U1OJCfYlOsgHP5uV/MpmGlo72Ffd4gm4ugrxtxEX4kuOKyhraXdQUN3M6NhgMoaFUFrXypZis2yDewbLA/G1WUmJCKC6qZ3oIB9iegThSeH+lNS18Njnu0iPCfKsq+dut7++1TPjaXI/WbKjQSlFVKAP72wsNsF8H7N2Hgu3nzmSN26Z2e+ELV0zsQfKCApxOLylNFQIIYYcCQTFkPXcynyeXJpHm93R5/4KVyBotSj2Vjajtaamqe/1WnaU1rM2v4ZrZiTx7Yw4T+Bz13mjyUoO44Wv83E6tWes33UzU7j2lBTa7E7W5FdzyaQET8A2IjqIhjY7McE+vHrzDHxtFmamRTA2zmTF3IHg5n21hAd4e8aHHSyzIHsTCzeX8rePc4kM9OHxqyb1KgNUSpkSzKomcl0zeI6O7Xumy9GxQeS42uSVN6I1pMcGes758bYygn29COpnNs6+jHJlGick9C5PHB4VgNbmui/cOM2zPdYVaJbVt7K7opH4EN8DLrdxpKKCfalt7iAhzI+ZaQMvtTBYQvxtjI3vHaS7xYea9yXQxwt/byn0EEefzV0aKhlBIYQYMiQQFCcsrTUr8yr7XDy9J6dT4+zSTmtN9r4atIaimpY+jylvaCXQx8s1Hq2JN9YVMe3+z9haXNer7Rc5Zq27eVMSmZtpZtOcnBTKsFA/rp2ZQkFVMyt2V/LVzgpSIwNIDPdnTFywZ1HxeVM6M0npsWb5gTvPHsXUlHA+uG0W91+awbAwP3y8LJ5AcFNRLRMTQg559smUiAD2VjaxaEupJ9h0zwDaU2pkALsrGjvH4vWRETR9DmZ3hZnl0z2z6ciYIMa5gpOCquZ+xwf2Z1SMeR8yhvUuR70gI46nrpnMKzfP6JYtdE/UUlbXyobCGiYm9j72aHOXWl6ZlXjCLqHgfu9lBlAxWNxfZMkYQSGEGDokEBQnrKW5FVz9zGo+27H/gG1vfWkD859eRWuHyf4V1bRQ2WiyewX9lH2WN5g111IjAsivauKVtYV0ODS/fXcrTqemsKqZxja763zNRAR4Ex7gTWZiKBdnxnPLbDNByrnjYgjzt/Hciny+3lPF6SMjPdf43YVj+c0FYxgR3ZlpuyAjjn99dzKXTzGloCNjgogJ9sVqUaRFBZJX0UhTm5288kYm9DFm70CSI/xpaLWzJLec88bFDhi8zEyLoKCqmYc+3Umwr1e/pZ2jY4PocGj2VDSxc38D3l4WksP9CfK1eda5O+RA0JV9zEjoHXzarBbOGx/Xqww2xM+Gj5eFzUV17KtuIStl4DXwjoboYB8sCua5Znw9EUUF+mCzKgkExaBRSuFttdAhpaFCiBPYwc4CPxQcjdc6qIGgUuo8pVSuUipPKXV3H/v/oZTKdv3sVErVdtl3nVJql+vnusHspzgxvbnBjLtzZ6u6cjo1X+6s8PxHsK6gmjX51dz91ma01p5F06H3UgpuFfVtRAb5kBIZQF55IxsLa5mUFEr2vlou//fXnP63JTz8yU7ABJbuEk2lFI/Mn8Q5riUSfLyszJuSwOc55bR2OJntWvMOYEJCKD84fXi36/p4WTk/I67PAG1EdCB55Y1sLa7DqWFi4qHP6uieRbPDobtNCtOXa2Ykc8vsNGqbOxgdF9xv9nG0a3H0jYU15JY1MCLKLPAOeMpD3eWJB+vssTH8ce44Th8ZdeDGLkopYkN8PV8OuDOug+kHpw3nn9+d7Jkx9URksSiSIwL6XV5CiKPBZlVSGiqEOGH5+vpSVVV1UgSDWmuqqqrw9T20z149DdpgEqWUFXgSOBsoAtYqpd7XWm93t9Fa/6xL+9uBSa7H4cA9QBaggfWuY2sQJ5x1+dX42qwHnIq+pd3Bqj1VzEmPOmC5Y11LB59uNx/2d+3vveD7oq2l3PbyRl68cRoTEkKpbGxneGQA72aXMCU5jN0VTfjaLFiV6j8QbGxjXHwwKZEB2J0ai4J/fXcKd7y6kY2FtYT42TxBaFFNy4BjtOZPS+I/y/bibbUwY/jhjyMbER3I+5tKWLBiL8BhZQRTIk0wEBno3W0Jib4opfjVeemMjA4kcYAgIj0miJHRgTz/dQE1Te2e5R8AMoaF8P6mkkPOCPp4Wfmea3bOQxET7EtBVTN+Nmu/paxHU2pkwKAtC3E0PXv9VAJ8ZHygGDxeVouUhgohTlgJCQkUFRVRUVFxvLtyTPj6+pKQcGTVSoP5qWEakKe13gOglHoVmAts76f9VZjgD+Bc4FOtdbXr2E+B84BXBrG/4jBorbnt5Y3EBPvw3m2zBmz7XnYxd7+9hetnpnDPRWMHDAYXbyml3e5kWKgfu8p7L6r5frZZdmBbSb1ngpJfnT+a51bk8/CnO4kK8mFCQiiNrfb+S0PrW5mTHuUpbTx1RCSxIb48e8NUWtod3L8oh+V5FTidmuKaFs4Z1//SAWlRgcxJj8JmtRzRZB2zRkby3+V7+XjbftKiAog8wFIAfUkI88dmVZwzLrbbjKL9UUpx2ZSB/0eilOKm01L51VtbgM6JXqBz6YeBlo44mtzjBCclhR7Vhdu/6QYK5IU4GmxSGiqEOIHZbDZSU1OPdze+UQbzU9QwYF+X50Wubb0opZKBVOCLQz1WHF+5+xsoqzfLB9S1dAzY1j3r5HMr8/nLR/0v62B3OHlt3T6GRwXwncx49lY2dfsWur61g6W55tuenNJ69lSYjGFaVCD/d8EYapo72Lm/kUlJoWYGzepmWjsc3PLietYXmIXbm9rsNLU7iA7yZXRsEP7eVr47PRkAf28vIgJ9GB4VwH7XenXtDicJYQN/0H7m2iyeumbKAd6xgU1OCiP792ez8u4zefOWmYd1Dl+blZd/MIO7zk0/or70NDdzGJGBZhkH94Q3ANNSwvnrvAmc6yqVHWzumUOPxfhAIUQnbykNFUKIIWUwA8G+UhH9fZU4H3hTa+2e5/+gjlVK3ayUWqeUWneypIFPNO6AzKlhzd7qAduayU9CuGpaEk99uZslOeVsLa7j4idXsK3EzNTZ0NrBjc+vY2NhLTfNGs6omEA6HJr8yiYaWjuoaWrnk237aXc4iQ32JaesgT0VTVgtiqRwfzISQrjYNavnpMQwkiL8KapuYfmuSj7aVsbv39uG06k9S0dEB/kQEejDpnvO4bzx3QMZd6ZwmWtJiMQDLOPgZbUcVAbuQJRSxIf6edbOOxxTU8IJ9T/84/via7Nyw6mpWFT32UUtFsUVWYmDvoyDm3sW0azkwR8fKIToJKWhQggxtAxmaWgRkNjleQJQ0k/b+cCPexw7p8exS3sepLV+GngaICsrS+pVjoMvcytIiwqguLaFlbsrOXts/+WTu8obmDUiinu/M5aNhTX88s1N2J2a2uYO3t5QzLj4EH7zzlaW51XywKUZXDUtybOUw67yRn7z7lay99US4Vpb79sT4liwfC9J4f4khvnh7WW+1/j1BWMI9rNx2shIaprbaXc4eWFVAWBKSRduKfWUF7pnWeyrxDA1ygSCX+2qBDhgRvBkcMvsNM4aE3NcJ045Iz2KzUXxTEuVjKAQx5LNquhwyK1WCCGGisHMCK4FRiqlUpVS3phg7/2ejZRS6UAY8HWXzR8D5yilwpRSYcA5rm3iBNLYZmddQTVnjY1hako4X++u6rdtfWsH++vbGBEdiI+XlUfmZ1LfYsfXy8q4+GCW7aqgtcPBp9v3c9W0RK6algSYck+lzPjCNXurGRUTSHlDG/OmJDAmNpgOh2Z5XiXDozpLFWOCfblv7ngCfLxIdo2b+mpnBeePj2V0bBAPfZJLca2ZQCY6uP8xeCkRASgFq/aY13WoC7sPRVaLIr2fReePleFRgTw6f9Ixy0AKIQybZASFEGJIGbSMoNbarpS6DRPAWYEFWuttSqn7gHVaa3dQeBXwqu4y16vWulop9UdMMAlwn3viGHHiWJlXSYdDM2dUNCF+Nv76US6VjW19TnDiXiR9ZLQJ2EbHBvPuj08lMtCbd7OLuX9RDm+uL6Klw9FtrJmft5WkcH8+3rYfHy8L/7txOgBBvjbPJDLN7Y5+Z3VMiujM4p07LpZQfxvXP7uWBxblABAd1P+0u742K/EhfhTXthAV5COBhxDipCaBoBBCDC2DOte41noRsKjHtt/3eH5vP8cuABYMWufEEXt17T6Cfb2YkhyGn7cVyGXVniounBDfq22eawmIkTGdmTv3cgynjYwCcnjok1yCfL2Yntp9+YWR0UEUVDVz0cT4buPehkcG4mVR2J2a4VF9B4JxIX54Wy04tOaM9GhC/G3cMjuNp77cjZdFEepnG/A1DneVvUo2UAhxspPSUCGEGFpk7vWTRFVjG8+t2HvUFtlctquCL3LKufWMEXh7WRgfH0ygjxcr+ykP3VXegI+Xpc9xdqNjg4gK8qGmuYMzR0d7xvq5jXIFj9+dntRtu7eXhTRXSejwyED6YrUohkcFMGN4OCH+Juj75bnpzB4VxfCogD4Xde/KnWmU8YFCiJOdZASFEGJokdWHTxLPrczn8S/yyEwKIzPx0Bcp78rucPKnhTtIDPfjhlNTADOb3PTU3uMEyxta8bVZ2VXeSFpUYJ+zaiqlOG1kJG9vKOacsb2XILhmRjIpEQF99js9Nojc/Q39ZgQB/v29Kfh1Keu0WhQLrp9Ka4ej32PchnsCQckICiFObjarheZ2+/HuhhBCiKNEAsGTxJeuJRBW76k64kBwxe4qcvc38NhVk/Dx6gywTkmL4POcckpqW4gP9cPp1Fz6z5V0OJy0252cPiqq33POm5xAXnkjs9N7t4kP9eOKqYl9HAXnjY+luqmd6KD+J31JjugdJFotigCfA//5p7oyjomSERRCnOSkNFQIIYYWKQ09CVQ2trG5yCzDsLrHWn8O56Hf1HPL6gE4fWRkt+0z08xzd1ZwQ2ENRTUt1DZ3UNPc4Zkopi8zR0Ty/m2zCDyI4KyrCzLi+N9N01HqyNfv68ukpFDOHB3NaT1eqxBCnGykNFQIIYYWCQSHiJyy+n5v0Mt2mWxgxrAQ1u6txuHUtNud/P3jXMb87iOWu9bJO1i79jcSFeTTa8Hy0bFBhPnbPOMEF24uxdvLwkc/PZ1rZiQxN3PYYbyy4yvY18aC66eSGC4ZQSHEyc3mJYGgEEIMJRIIDgFVjW18+7HlniURevoyt4LwAG++PyuFhjY72ftqmf/01zyxJA+N5pW1hYd0vZ3ljX1m9ywWxSlpEXy9uxKnU7NoSylnpEeRGhnAny7OkGBKCCG+wWwWKQ0VQoihRALBIWBPZRMOp+aFr/PZU9HYbZ/TqflqVyWnj4zklOGmvPH2lzewobCWR67M5KppSXy2fT+NbXaqm9opq2sd8Fpaa/L2NzAqpu9FxU9Ji6SkrpVbX9pAeUNbn0tJCCGE+OaxWS3YJSMohBBDhgSC3zCtHQ5eXFVAu73zZlxQ1ex5/MDi7lnBbSX1VDe1Mzs9itgQX1Ii/Cmpa+X7p6Zy8aRhzM2Mp83u5NU1hVz0+HLmPrmc5nY7+6qbueXF9eRXNnU7X2ldK03tDkb0M95v3uQE5k1J4JPtZfjZrJw5OvoovnohhBDHi83LQrtkBIUQYsiQWUO/YT7aWsbv3t2Kj9XimUmzsKoJi4LbzxzJPz7bydLccuakmwBsxW4z/u9U10Qu38kcxpq9Vfzq/HQAJieFkRDmx58+3IG31UK7w8l/vtrLmvwqVuRV0WZ38OwN0zzX31XuWhi+n0DQz9vK3y+fyI/PGEFTm/2gZuYUQghx4jOloZIRFEKIoUI+pZ/AGlo72FZSz+SkMM8i6ztKzYydL6zK5/KsBJRSFFQ3Exfixy1zhvP+pmJ+885WPvnZ6QT4eLEir5KR0YFEB/sCcOfZo7pdQynF3Mx4nlyymwcvy+DjbWU8+vlOnBqmJIexJLeCV9cU8vG2MuJD/TwLrPdXGurmbieEEGJokNJQIYQYWqQ09AT20Cc7mf/0Kqb++TNec03ost0VCG4trid7Xy1gSkOTI/zx8bLy4GUTKK5t4eFPd9Jmd7A2v5pTRwy89MFPvjWSd26dyaWTE7jrvNEopZieGs5LN00nOcKfu9/ewtKdFby0upA31xcRGehNWID3gOcUQghx6JRSvkqpNUqpTUqpbUqpP/TR5nqlVIVSKtv1c9Ox6JuZNVRKQ4UQYqiQQPAE5Z51c3JSKPGhfjz2eR5aa3aUNnD++FgCfbx48esCAAqrTSAIMDUlnKunJ/Hsir28sLKA1g4nM9MiBryWj5eVSUlhAKRFBbLw9lk8c10WvjYrf7lsApdOGsYnPz2dmGAfcsoa+h0fKIQQ4oi1AWdqrScCmcB5SqkZfbR7TWud6fp55lh0zKY07Q4HWkswKIQQQ4EEgieoDYU1lDe0cd3MFK6enkRxbQsbCmuobGwjKyWcSyYN48MtpZTXt1Ld1E5SeGcp5t3njyYy0Ic/L9qBRcH04QMHgj2NiQsmyNcGwIzhETx8ZSYjY4I8ZaUjowcuCxVCCHF4tOGe/tnm+jn+kdfyf3Dn1zPwoQO78/h3RwghxJGTQPAE9dHWMrytFs4cHe3J6C1Yng/AmLggzh8fS5vdyUurTcmoOyMIZhH0++aOAyAjIZQQP9tR6dO8KYlce0oyl0z+5i0ML4QQ3xRKKatSKhsoBz7VWq/uo9llSqnNSqk3lVKJ/ZznZqXUOqXUuoqKiiPrlMXcR7yxY5fyUCGEGBJkspgTkNaaxVvLmDUykiBfG4E+XsQE+7B4aykAY2KDCfT1IsjXi5dWm/LQroEgwHnj4/jJt0YyNi74qPXLalHcN3f8UTufEEKI3rTWDiBTKRUKvKOUGq+13tqlyQfAK1rrNqXULcDzwJl9nOdp4GmArKysI4verGZcuA077Q4nfliP6HRCCCGOP8kInoC2FtdTXNvCeeNjATOz58y0SJwaYoN9CQvwxma1MCc9msrGdgCSI3rP0nnn2aM85xBCCPHNorWuBZYC5/XYXqW1bnM9/Q8wZdA742UCQW86ZAkJIYQYIiQQPAF9umM/FgVnjYnxbDvFVR46Oq5zfN5ZY8xagREB3gTKen1CCPGNp5SKcmUCUUr5AWcBOT3axHV5+h1gx6B3zOoDgE1JaagQQgwVEggeA41tdpbklB/0TGuf79jPlOQwwrss0eAeJ9i11HPOqGisFkVSj7JQIYQQ31hxwBKl1GZgLWaM4EKl1H1Kqe+42vzEtbTEJuAnwPWD3itr5xhByQgKIcTQIGmkY+DpL3fz2Bd5/HXeBK7I6j2mv7HNzn++2kOov43zxseyraSeu88f3a1NQpg/T149mampYZ5tIf42rpmeRGK4BIJCCDEUaK03A5P62P77Lo9/Dfz6WPYLL5MR9HaNERRCCPHNJ4HgINNa8/6mEgDueW8bU5LDSIvqXIcve18tNz2/jspGM9xjbX410Fn22dW3J8T12vYHmbxFCCHEYLN2jhGU0lAhhBgapDR0kG0trie/qpk7zx6Fn7eV3727tdv+Bxebtf5evXkGI6IDWbSljOQI/27BohBCCHFceQJBKQ0VQoihQgLBQfbB5hJsVsW1pyRz4YQ4thTVecYKbi2uY9Weam46LZUZwyN45MpMbFbFueNiUUod554LIYQQLu7lI5SUhgohxFAhpaGDZGVeJbsrGnk/u4TTR0YR6u9NUrg/DW12aps7CAvw5j/L9hDo48X8aUkAjB8Wwmd3ziY6yPc4914IIQ7Drk8hMAbiJvTfpqYAwpKPXZ/E0eEZIyiloUIIMVRIRnAQOJyaH764nt+9t42y+lYumTwMgCTXpC4F1c2U17fy4eZSrpyaSLCvzXNsckQAft6yUK8QJyytoXoP7F1mHnfV3nT0r1dXDLuXwP5tR//ch8PpBEdH7+3LH4GX5sErV0FHa+f2+lIo2Wge71kKT0yFDS8ek66Ko0hmDRVCiCFHMoKDIKesnoY2O3+cO44zx8QQH2IyfO5F3wurmymtbcHu1HxnYvzx7Ko42S1/BAq/hsufA5vf8e7NoWtvgq//abIVE6+CwCjX9mZAg3fAkV9j12dQvA5O+wXU7YNX5kOFa1m3MRfBxf8C70BY9U/45HcwcT58+2GwHYXM/qZX4Z1bAA3KCtcvhOSZBz7O6YSmcgiK7dzW3gQ7P4ZhkyEkCQqWQ3M1pJwGAREH15/WOnj2AogcBZc/a4LT166BjhZoKIXEGbBvFax9BmbeZva/eAk07oeMyyFnEUSkwehvH9bbIY4jq8waKoQQQ40EgoNgfUENAHPSoxkW2vnh2p0RLKxqot2hsSgYFRPU5zmOK61hKI1RdDpMRmLfGhh/GQTFHO8eDZ69X5nXm3bGgduufAI+u8c8Xvwr+M5j/bctWAnrnoVZP4OYsQOft2G/CQS2vQ1zfg0Z8w6+//3ZsxR8QyE+Eza/AZ/8BoZlwf6tUFtg2nx+H4y5ECLTYfVT5n04988w+druf88L7zR/C5OugdTTIDDWBDE2PxOkdJX9Crz3Y9AOKN5gAsC2ejj/b9DeCF/8CR4eB4HRULUL4iZC9ktQuApCEyEgClJPB4sNmipg7HcgLMWcuzwHlj8MpZtMoBSXCcmnQngqxGaAdsIHP4WkU2D2XfDhnfDm92HGj2D9cxA1Gqb9ANLO7N7nllp454ew8yPTzynXw4bn4cu/muAQZQLEhlLXAQrSz4czfmNev8XLk/3pxmGHN24w73n5DmiqgnULoL7E/I4jRsLM201WcNnfodm13+YHWTeax2Ep8L13wD/8CP8gxDHn+puwYafDLoGgEEIMBRIIDoJ1+TXEBPuQENY9w+LnbSU6yIeCqmbqWjpIiTyGZaBF62HVk3DhI+Ab3Hebtc/AikdNKdfka+HbDw2NgPB/l8GeJebxzsXwvffAcoRV0euehYSpEHsUlu/Yvx22vG4CgnP+bAItpxOcHZ5xObQ3g/cA60U6nfDVX2HpA4CCCx+GqDGQsxAc7SYbVFMAwXEw7lLY/h5sfhXGzoWwVFjxiAkMWmrNh3T/SKgvNh/mHe2dGbD9W+HmpZ396qm2EBacZ4IDtAnA3YGgva3/4wbSUAYvX2m+oDj9l+Z1hqVC+Taw+cMNi8Ev3AQ72S/Dtndg5DkmS/XBT8zz7zxuArOafFj/rGn/0a+6X8cnBH5d2Pl81b/go7shdbY53ye/Ae8guO49GDbFtEmaAZtfN697ynUw48cmAPv6SXP9vV/Bljc6z/n5fSboaqmBghVgC4DhsyFxGhStgyV/6myrrOZ3cfmzJnC7/Hl45iz49PeQMA2K18OLi+C6D0ywCWBvhwXnQlWe6ePiX8Kyh6CxDJJmwtwnzXFlW1y/+xTY9QmseRqeOtV9YQhJMPtCk0wWsKbAvHftDTD9R7D6XybQ3/o2pF9gzut21r3w9Bzz/5LkmTD3CXOu6T80f1cHm30UJxb3GEFlx+6UMYJCCDEUSCA4CNYX1JCVHN7nzJ9J4f4UVDdTVtdKRkLIsemQvQ3eudl8OAxPgzN/07tNUyV8/FuISjeZlnX/BS9fk1Hp+Tqaq82Hy9N/AX5hvc91uCp2wtL7TUbi4qcAbT5Up31r4CBoIKWbTBA483YIioOP/w/W/sd8KD1clbtg4U8hNBl+tAJ8umR1a/fBuz+CzKvND5ggbc3TJggbO7f7uZoqTeDU3mAyQPtWmUDws9/DptfgiufNB/dP7zHZpAv+DgGRnce31sGiu8yH+ZZqUx7ZXA0Lf2b2W33Me+flaz7U7/rEBCZevnDqHXDGb0FZTMlj8QYTANQVQ0k2hAyD4HhAmfOGp8Lr15rX194MXt5wyb+hrdH8PXj5wI73zfObl8ILczvHkq1/Dj7/I9y29tCzQcv/Yc4TPdYESuHD4fsf9T7PeQ/At35vAsfwVPO+r/uvee/+eQrMf8n8PSkL3LIMWuuhfLtpX7DCBM1OB1is8OXfzLVGXwjzFpjXFpthrhmb0XnN5Jm9SzVHX2B+wASvlbvMNa1esOxhyPvcvK8zb4eZd3QPjNoaTcBVvM5kFSdf11neGTcBrn0XnHZTzmlvhUcnmnO6A8GGEhO0n/sATLsZFv3cBH1zn4ARZ5n/lked072/SdPhlB+bv4uOZvO7rS00/dj9BfgEm/cz5VQT+I69GPI+NdnQ1lqYcGX388VNhNvWmWxo1y+dotIP5bcuTjSu0lCbjBEUQoghQwLBo8XphNp8Sq1xFNe2cOOs1D6bJUX480VOObXNHVyRlXD0rl+4ygQbZVvgho+6f7hc9rAJAqPHmUzF1Jt6l0eufsp8sLz0aTP+JzDaZBCjR5vsYFdrn4GvnzBByNwnzKQZvsHmA+DhcDrhy7+YTI+Xr/kw6hNkMpM7F5txR1e/Bn6hh37u9c+bc572c1NauHuJCQY3vGAyJqfdaUoDK3ZA7AQTBBzwnM+ZbE1toRkTdtEj5gN/aTa8+l2TSStYCcHDIHKkKUXcudiUB34/wQR8a58x5X4rHzclhtcthOcu6Ayc6opNGd+z55vnidNhx0LT/0nXmN9heCp8dq/JJk6Y7wpALjSBwrKHTbAx/tLu4+Q6WszvK3a8K8hzmbfg4N7PydeZzFtAlAli25tMwFCTDyiTofvuG6aE0+ptsolgAuTmys7X3VVHqx+9focAACAASURBVHmtoUm9r1dfarKvmVfBeQ/CisfMGLz+gkmbn3lfwGR9p/0ARp4NL883k5goBeMuMa89ON78fYP5289ZaN5/Rzss+bN5Ly9/3gRwYDJ3h0opiBrV+Xyg8lsAn0Dzu4kdb0o6e+oadNr8YMatpry3ZCPETzLlm2B+P1YvuOjRg+unf/ihfTky7lLz36tfOIz4Vu/9PUtsxTefqzTUhw7apTRUCCGGBAkEj5achfDGdWw/azEAWSl9Z8qSwwOobTYf9kfH9lOi6aa1+VB6oHK67e+ZTI2Xr/lAW7TGlJ+B+QC+/GEzUcOcX5sZ+5be3/0DYluDCSJHf7vzW/tzHzDZko9+bUrj2hpMQBGabGb8s3rDRte/6xaYD/E/2XhwgVRXHS1m3NHOxSaYOffPJgP09ROAMh+GN74E/znTvIbxl3X/YD2Q9iaT5Rg7tzNzecm/YcU/TJZm82umlNDiBfYWM0Zq9l3QWG6yRZEjOs+16TWT7Tr3ftj0igm6wlJh5WOQ86F53Q2l/D97dx4mZ1nl//99autOOukkJJ0QspAAyUAIWwgoogiICm6oKILDoOiAOMOMy+gMzuKCP2fUma/jAuIwCooKihtEBBlUUEEQIgaQhEAMS5okZO9s3V3b+f1xV6WrK9XpStK1PJXP67r66q6qp6pOPekkdeqc+9x0TA6tird/CL7z1pCUWRzO+hQ8/I0waKR3c2j7XH5nqASe/L6BcfvFxCmXDu10s14e1oKdckVYl3Xvf4SkffENYRjH4utDO+LZ/z4QazwJp5e1PRYlR+1eEdobr/uvkIhNPykkxHd8JLRUvuv20N6Yz4VKIYTfjXxm8Ot68NpQfSpNTm//YKjU/eMzu1efH7ounMNXfCR8OFCpmj2cCbNCJe3614aE9aV/s/sxhT3SyPWH14CHqlu8yf+JXPiekPTf98VQPS6e51rHPf+8kAge/ZbK6wml9SQGKoJqDRURaQ1N/i4nQnpWgefZsPQ3jEoey1FTKyd5h04caHE8cuowg2Lu/KewvulDTwy8uS63egn8+H3hjfkFN8N/zQlVwWIi+ND/hDe2r/pEWCP1kveF6YaT5w1UAB77fqjuvfzDA48bi4V1P199GVx7akhYRh0EZ/4r9DwP534Vfv3Z0HrXdWRoR3vyZ6F9sVrucNsVIQk85/Ohlc0MXn1VeHN5yILwePPeDL/6dHjj+buvhBa/aoahPHZLGOyx4F0D13VMhNcU1mFtXR0GpnguxP/bL4S1YLf8Vah2vefn4bXd/fGQfAGs/HU4Fye+OyQKndPCurlsXxj0ceTrQzX1nd+HX30mVHbmnhOS11mvCBW+2aeFFsZFV4Sk8/R/KklEiolgJlQwS9deTZkH7/g29HSH5PnXn4PO6XDGP1d/zvdXIjVQlTr5Uhg3I7RqFpPz0g8C4smBCmfxe++msO5u2onwF68PrYWPfT9USXs3717pe2FxqDQfVLnCXrWxB4dK+ZpHw9TMcrvOfyYknsX4m117J8x7Iyz/ebi8KxEc4t+LkTL5SDj/xvA7LweGwu+Uto8QEWkdSgRHSm+YFJp+/iHefMI5JOOVh5HMKEwOHdueGDRRdDd/+lFI4iC0fc18SeXjfvr3MHoiXHBTGJ0/8XBY+1i4rX9baI2cd25IAgFe/enQ0njnP4aNn49+cxiPP2E2TD9x8GOPnxla2R68NiSW938pTC4cPTEMAJkyL6whOuWKUGl84JrqEsElN8PTd4U33Mt+Cmf+2+C2tFg8DJwoOvyM8NXzAnz37XDT+WE8/wkXDT3M5tn7Q8Ix/aShx+13HjJQSdv8XHgNXz8rPH/7+NBK2D4uVEZf8v5Q/bj5HaEqetiZIVl+6eWVH3vCLDjvfwdfN/1E+MjyUEGLxeDSe0MbbHtnaI+FksQpPfSb+XHTwzYCD1wdqrVtYyofVw9/cfbQt8WTgyucoyfClKNDW+4jN4Z1hZPmhiQQQoJbngiuWwZzXzsysXZODV+VJEoS8WIiuC+DbRohNXbg92ZXElvjRBB2X+8qrS0Wxy1OytQaKiLSKrSh/Ajx3i0ALIj/mX987dBDEYoVwaMO7gzDZJ5/MOwTli/5j7WnGxZ9YGDN3bO/HbgtvSNUsbLpkHyueSxUp8ZMDrcffEyoCEJoqezfGlrxiuIJOO8bYaLk/V8KbyCfvW/oCtv8t8Jf3x3W0p1/Y2ijXPCu8Cb5kBPC2rtEG7zk8jDopHvxHk6ShyEct14e1tA9fXeI/RX/MPR9So2bFhKgGS8J1bSbzh+8cXXRC4+EKZPjD4ULv1/d5NMJh8LLPxhaGd98LfzlLeH89m6Gd94C53w2JON/83u45I59nzo6asLAfeOJgWEasVg4t6UVwT29mU+0hfM2feG+xVEP8dTuie1FP4F/eArefUcYjLLiFwNVpa2rB99/+/qw5cLko+sTK4TBStk6VdVGSnnCDeF3SWSkxVNqDRURaSFKBEfIiy+GPbmOsueZkMoNedzEjhSTxrRx/MzC4JNffjqsOespGVu/+HrI7AiDKqbMD4la0RM/CWPsl/8s7IWGh0l+RQcfE9ZB9W4JFcXpJ++eLCTbwzqv1Y+EymN6Gxx2+vAv8rBXwgcfD+2h5U64KIyGX/R3hc28K3jsljCJ8dgL4ENL4V/WhrWKe7NFxeiD4OJFIYan/29gW4iidU+G7SJGTwjrwvZmVP3pHwuvb/5bC5MPHw5fpRWpsVNCRa4WSoer5NLRaE3ck0EJSiZcjifCOZx1alhLufA98Lr/DMdsfWHw/dc9Eb5PPqoOsZa0htarvXKkDPq9yQxcJzLSEqnQGqqKoIhIS6hpImhmZ5vZcjNbYWZXDnHM+Wa21MyeMLObSq7/fOG6ZWb2Zau0F0MT2dmzAYCYZ0OVDkIF7E8/CpWNAjNj0RWn8sGz5oT9454rJHkbV4TvuWyo5B3x6sLI9pfDqt8PVCmKFbflP4fnHwif/E8raek8uDB05MFrYdPKMF2ykqPfEr7/37+F0fbF8fPD6Tyk8kCY9s4wcXTdsrB3Wbl8PmwyPeWYUHGLJ/Z9j8JYDE68JPy86ZmB63OZkATGk3DxbYOnYlbDbPDkynHTB28NUWuD1tTtoTU0KnZLbMtez+Qj4Q3/HdZhWnz3iuCLS8P3KXWsCObSYWBM6XXNLp4M61zzuZJEMOIfIkhziqe0RlBEpIXULBE0szhwDXAOMA+40MzmlR0zB/gYcKq7Hw18sHD9y4BTgWOB+cBJwD7Mbq+f/M5NrEwcFi68UEjW/vxL+OF7wubKLzyy69hDxo9idCoRRukXW7g2/jl8f/qusPnziYUBJ7NeHtaRrX5k8GM//X9hHdzU4wfvsVfc4+z+L4Z1bkOt2ZtwaFg/t2NdaPEcif0Aj3hV2Jvuj9+B9csH37b8Z7DhqdB+ub+buUNYb5YaC5tLEsH1y2Frd1gHedBh+/8c9VZe2Yn6m/lKraGVxOJhj8dKFcHRkwbanmtpUCJYiHmoAU3Npvh7kssMTGmN+u+ONCWLt9Eey5JRa6iISEuoZUXwZGCFu6909zTwPaB8usClwDXuvhnA3dcVrnegHUgBbUASeLGGse6XfN5JpXvYNvaIMMWxWLV77JYwGMRiYdPwJTcN3Klva5iWeMzbwzEbng7X/+FbYV+7OYV2xOL6qWd/G1ouX1waBmz0bgrbRBx6yuBgxkwJe4hl+0L7Z3IPA2mOfmv4flgVEzirdeQbwvfNzw1c5x62hJgwK0wAHQlmcNCswRXB4trIQ44fmeeot+EqaFFTPjV0T8nJuGkVEsFlYSBRPexKptJhnSBE5/wX48xHsK1VoiWepM3UGioi0ipqmQhOA1aVXO4uXFdqLjDXzO43swfN7GwAd38AuAdYU/i6y92X1TDW/fLMxh10sp3R4ybBjJPgmV+HgS/Lbof5b4HL7g2DRm59P9z7uXCn1Y+EjcSPeXuY9LlxRdjCYcXdIYEr7gM2+qDQTrn852HDcs/BaR8dqCTOLEsEzQaqgqXbJlRyzNvD/Y95+0idioGpjKVv6tctgxf+EPa7G8n9zSbMHlwRXPs4JEbBxCOGvk8za7XW0Fiy+sS285DBraH5fFjvObleiWBpRTBiydSg9Y2F35+YKoJSA4m2kAiqNVREpCXUMhGstACsvJ8kAcwBTgcuBL5uZuPN7AjgKGA6IXk808x2W8RmZpeZ2WIzW7x+/frym+vm8VWb6GQnEyZOCXvx9fXA9eeEgS/HnB8Gllz0EzjirLDvHoT1exA2cJ80JySCqx4Ko/TLJ3guuDi0hP7u6nD5sDMGKoUzKmwrseBdYdPs4aopY7rCXnmTj9z3F7/bY04JFdBtawauKw50Ke5tOFIOmh0qj/nCcJ61j4XXvLeb2jeLlhwWU1oR3FMiOC1sD+KFfyK2PBv+/tQrESxuFRHJRLCkmqk1glJL8RRtllNrqIhIi6hlItgNzCi5PB1YXeGY29w94+7PAMsJieFbgAfdfbu7bwfuBF5adl/c/Tp3X+juC7u6umryIqrx1HMvEDNnwqQpMPVYeOWVYQrouBkDFbt4Iqz32/5iSBQ3rYREO4w9JFSwelbBil+GSt/0kwY/wQkXhTVxy38WhpmM6YLTPgKn/SN0TNo9oKPfDGf/R+1feCXxJHRMHlwRXHlveI3jZwx5t30yYXZoh9taSCDWPj5QDY2iatfURUVpYpsfpjW08xDI9u7aj3NXm289BsXAQGzZKCaClaqZSgSlBuIp2jQ1VESkZdQyEXwYmGNms80sBVwALCo75lbgDAAzm0RoFV0JPA+80swSZpYkDIpp2tbQ57tD0hMfXRi48vIPwVFv3H0wyqS54fuGFbBxZUhkYrHQGgphzeAhJ0CqY/ATpEaHDc0BphW2gph9Gpz5LzV6RfupcypsLVQEs+kw1GYk1yEWHTQ7fN/0TGjF7dsyMDU1inbbbiEiichQ9mbNY2eha7zYHrryXkiNqd+fZ2kyVZzQG5UN5WOVhsVE/HdHmlOijaRaQ0VEWkbNEkF3zwJXAHcRkrhb3P0JM7vKzIqjLO8CNprZUsKawI+6+0bgh8CfgceBR4FH3f2ntYp1X+XzzpJVW1i3bm24ojh5M56Ad3xn960bJs4J3zc+HSqCxcmWxet7N8GhL6v8ZCf/dXizXLqnXbPqnDbwhr77odDiN9SG9ftjQiER3PzMQAUp0ongAd4aCuH3xj1Ux2e9on6TO+PF1tBM9KpqpVNDd60R1IbyUgPxJG1kyOTUGioi0gpq+m7B3e8A7ii77uMlPzvw4cJX6TE54H21jG1/9aZzXHDdAzza3cPpsa0QZ/gtGCbMCvulbXgqJC9HvCpcX6wIwsDav3KjJsCHntj3vffqaezUMOUUQmXH4qEtdqSNmx6qIZuegW0vAla/KZO10MqtocMltsU9H7d2hw9JtjwHL/u72sdYtCuZ6i9JBCNSEYzyoBuJlngbKVUERURahj423kcfv+1PPPZCD1edezRvTWyFnzF8IphIhWTwmd+E7R2KCWCqY6CKNnO3pZADopAEQnhT39cD6R0hEZx2IrSPG/nnicXDmsnNz4bnmnjE7m21URJPQv+2MPzG89F/Mx9PDrQqDpfYFocMbV0dqoEw8EFJPVTaRzAq57/S1NCoxC7REk+S1IbyIiIto5ZrBFvWbUte4Ad/6OaKM47g4lNmMSa/LdxQzabsk+YO7DNYuun5lPm1S5jqrVjd2bQSVv9x6HbXkXDQbHjq52HbjaPeWLvnqYdiBa1Y1YnKhuZD2Zt9BOOJUEle8xg8dWdo+y39+1FriZLW0OI+glE5/5oaKvWSaCOl1lARkZahiuBecneuvffPzJvayQfPKgx/KU46bB8//ANMOgKeKvwnWvpG983Xhj0CW0ExEVx+J+SzMOPk2j3XhNmw4hcw/zw4819r9zz1UEycWqW9b2+GxUAYKLTkO+Hn8vW1tTYomYrY+S+NPZ8J6wOj0j0g0RJPkVBFUESkZSgR3EtPrN7Kk2u38ek3zyceK7zZ6t0MbZ3VbZZeHAwTb4PO6QPXd0wc+WAbZWwhEVx6W/hevh3GSDrhL6G9E07/WHT3DyzaVRFskfa+eCp8EJDPV5cInns1HHcBLFsEJ19WnxiLirFl+yOYCBbizBc+RNBm8lIr8RQpz5JWIigi0hKUCO6lWxavoi0R403HHTJwZe9mGFVFNRAGtpCYMGvw1hKtpHNq+P7in2D8oTBmcu2e65ATwlcrKG8NjXp7XzH+fGb41lAIVazZrwhf9TZonV0hmYpKVW3Q1NBsdBJYiZ54igQZVQRFRFpEi2YitdGXyXHbktW89uiDGTeq5E1t7+bq1gcCTCpUBOu5/qneUh0Dax1r2RbaalqxNRQGkttmfj1mIfkr7iMYlT0EocJ51ud7UiOJNhKeoT+jRFBEpBUoEdwL9y5fR09vhrcvnD74hr1JBEdPDEng9BNHPsBmUtwXbroSwaq1WmtosUUxmw4tos3+ekorslGqxpZXM5v9PEt0xVMk1BoqItIy9NHxXvjlsnV0tic45bCy9Xy9m2HctOoexAze/0C03mjui7FTYd1SmL6w0ZFER3EfwVZrDc3sGHy5WSWKiWB/dPYQhIHN43PFhLvJz7NEV2FYTDqdbXQkIiIyApQIVimfd+5Zvp7T5naRiJcVUvu2VF8RBEi2j2xwzWj8DEiOhoOPaXQk0RFPRnNq5VCK8ad3DL7crEorss0ea6ny1lANi5FaKWypki92LYiISKQpEazSn1b3sG37Nt4z6nF48CGYehwcegq4711r6IHiFR+BY9+h6sTe2K01NOLnLpKJYGEfwajsIQi7byjf7OdZoqvwu+XZvgYHIiIiI0GJYJV+9eQ6zoo/woJHvwyPAhaH9/8u7JmXzyoRLDd+RviS6sVT4HnI9A5cjrJiIpvePvhys4onB7aPiNK5HzQ1NKNhMRFnZu3Ab4A2wv/RP3T3T5Qd0wbcCJwIbATe4e7P1jy4Qsu0FbsWREQk0jQspkr3LF/Pwgk7w4XL74PUGLjrY/Cbz4fruo5sXHDSGnZLnCKUjFQSuYpgWzQmnJYrbQ3NqyLYAvqBM939OOB44Gwze2nZMe8FNrv7EcB/A5+rS2QlHzrk816XpxQRkdpRIliF7f1ZHuvewnGdOyDZAVPmw+lXwp9/Bb/7Stj8es6rGx2mRN1uiVOTV9CGsyuxjUoimIzm5M1BFUGtEYw6DwqfBpEsfJVnXecC3yr8/EPgVWZ12PiysK1KyjKaHCoi0gKUCFZh9ZZe3GGKbQ6toGZw8qUw9XiYeza89j8aHaK0gl2JYKtUBKPWGpoKE0Ojto9g6dTQXKb5z7MMy8ziZrYEWAfc7e6/LztkGrAKwN2zQA9QNs4azOwyM1tsZovXr1+//4EV/k1KkdVegiIiLUCLSaqwpicsjB+XXgedU8OV8SRc+iuwWEgMRfZX1Cpow9mV2O4cfLlZJdoGqmrJzkZHUz2zcG7zhTWCydGNjkj2k7vngOPNbDzwEzOb7+5/Kjmk0n86u/Vquvt1wHUACxcu3P9eztJEMJcjFCtFRCSqVBGswtqeMLyjve/FgY3SAWJxJYEyclquNTRqawSTJfsINnms5Ur3oIz6743s4u5bgHuBs8tu6gZmAJhZAhgHbKp5QIW/F0lVBEVEWoISwSqs7enHyBPf8WLYKF2kFnZrDY1Qe2IlUWwNzfZHcwuGYhKrDeUjz8y6CpVAzGwUcBbwZNlhi4B3FX5+G/Ard6/99JZEsSKoNYIiIq1AraFVWLu1l7kdfVguG9YIitRCy7aGRuT1lO4j2Oyxltu1B6WGxbSAqcC3zCxO+LD2Fne/3cyuAha7+yLgG8C3zWwFoRJ4QV0iK3w4lbQc6awSQRGRqFMiWIU1PX0cNWZbWI6vRFBqpdVaQ2MRS2x3JVOZaA2LgXCutaF8S3D3x4ATKlz/8ZKf+4C31zMuYNfvVhsZ+pUIiohEnlpDq7C2p48j2raFC2oNlVppuYpgBFtDd60RbPJYy8WT2lBeai8xsEZQFUERkehTIliFNT19HJrcEi6UDosRGUktt31ExFpDEyXtlVFbn1naGtrs51miq3RqaDbX4GBERGR/KREcxs50lp7eDIfENof9ujq6Gh2StKpBiZOFqbRRFrVEsJhMZSM4ebO4vjGv1lCpoXjJsBhVBEVEIk+J4DDWFvYQnOQbQ1toTKdMaqS0NTSeiv7WJMXXk9k5+HKzKt2CIWprBHdtfZEZ2GBeZKQVt4+wrNYIioi0AGU1wygmguOy67U+UGqrtDW0Fao6UWt1jSch0wuea/5Yy5UmglGLXaKj8AFJSmsERURaghLBYawpJIKj+17UxFCprdJWymavnlUjasNv4m2htRKaP9Zyg1pDW+B3R5pT4XdLawRFRFqDEsFhrN3aBzjJHUoEpcbKW0OjrtiiGJXtMErPedTOfzwZnRZcia54sSKoNYIiIq1AieAw1vb0Mad9G5bZAeOmNzocaWXF5CPbF71EpBKz8Dp2JShN/ppKE6jIrRFMDSTc2lBeaqW4RhCtERQRaQVKBIexpqePN49aEi4c/qrGBiOtbVBFqkXezJe+pmZPUEqTv6id/3gyOi24El3xBG4xUhoWIyLSEpQIDmP9tj7OyD8Ak/4CJh/Z6HCklZUmH63yZr74mmKJ5p+4O+j8R6wiGEtCJiItuBJt8TZVBEVEWkSTvzNrvFT/Jo7sfxzmndvoUKTVtXJFMAqJbZTPfzwFaa0RlNqzeIpRpqmhIiKtQIngME7qf4AYeZj3pkaHIq2uNBGJ2hq1oexKBCOQnJRWAaN2/uPJ6E48lWhJpGiP5TQ1VESkBSgRHMapmQfZkJoGU+Y3OhRpdaUbgbfKm/nia4rC64lya26U1mJKtMVTtMdUERQRaQVKBIcxyTeybtThYQKiSC0Vp2xCNCpo1Yhsa2gE4i0V5bZWiZZ4ina1hoqItAQlgsNIeRqP2uAIia4oJU7ViFJiO2hqaMTOf7y0mhyBcy3RFU/RZjkNixERaQFKBPfA3UmRUSIo9VN8Ex+1RGQoUXo9Ud9HsNLPIiMtkaJNFUERkZZQ00TQzM42s+VmtsLMrhzimPPNbKmZPWFmN5VcP9PM/s/MlhVun1XLWCtJ5/KkSONRe1Mo0RWlClo1olThjHJ7ZZRjl2iJpwr7CGpYjIhI1CWGP2TfmFkcuAZ4NdANPGxmi9x9ackxc4CPAae6+2Yzm1zyEDcCn3H3u81sDFD3jx/7s3nayECivd5PLQeqKCVO1dhVEYxAchKPcmtoyfnVsBippXgbbWwnnVNFUEQk6mpZETwZWOHuK909DXwPKN+M71LgGnffDODu6wDMbB6QcPe7C9dvd/edNYy1ov5MSARNFUGplyglTtWIamtoFOItpdZQqZd4kpRl6M8oERQRibpaJoLTgFUll7sL15WaC8w1s/vN7EEzO7vk+i1m9mMz+6OZ/WehwjiImV1mZovNbPH69etH/AX0pTO0WVaJoNRPy1UEI/R6opxMDYq9Zo0eIpAcRYqMKoIiIi2glolgpf0WvOxyApgDnA5cCHzdzMYXrn8F8BHgJOAw4N27PZj7de6+0N0XdnV1jVzkBen+XgAsqdZQqZMoVdCqEaUKZ5SHxbTiHpTSnBJtpDytiqCISAuoZSLYDcwouTwdWF3hmNvcPePuzwDLCYlhN/DHQltpFrgVWFDDWCtK94duVCWCUjcaFtM4g7aPiNj5j3I1U6IlMYo20qoIioi0gFomgg8Dc8xstpmlgAuARWXH3AqcAWBmkwgtoSsL951gZsUy35nAUuos3dcHQFyJoNRLlBKnakQpsR2UTEWsIlgae0ytoVJDyXaSnqY/o6mhIiJRV7NEsFDJuwK4C1gG3OLuT5jZVWb2psJhdwEbzWwpcA/wUXff6O45QlvoL83scUKb6f/WKtahZAsVwVhqVL2fWg5ULdsaGoHXE+lhMRGOXaIlMYpUvl8VQRGRFlDTj47d/Q7gjrLrPl7yswMfLnyV3/du4NhaxjecbDqsEYynVBGUOolSBa0asSglgoUqoMWiN3BF+whKvSTaSGiNoIhIS6jphvJRly0Mi0moIij1sisRjFhr4lCilNhGuS13UEUwAudaois5ioRnyOSyjY5ERET2kxLBPchl+gGIKxGUeolSK2U1ovR6YnHAopmEa0N5qZdE6JCxbB+hqUdERKJKieAe5Aqtock2JYJSJ1GqoFUjSlU2szA5NIrnXlNDpV4KiWC7JoeKiESeEsE9yKXD1FAlglI3UUqcqhG1xDaeit4egqA1glI/hSnabWTozyoRFBGJMiWCe5DPKBGUOotSK2U1ovZ64sloJlLFmGOJUNkUqZVE+P+w3dKklQiKiESaEsE9yO9qDR3d4EjkgBG1CtpwIpcItkVzjWCUprNKtCWLraEZJYIiIhGnRHAPPBuGxSTbtH2E1IlaQxsrnozmuS/GrEExUmslawTVGioiEm1KBPckGyqCllBrqNRJ1Cpow4na64mnIBGRWEvtOs9KBKXGEgNrBFURFBGJNiWCe+CF7SMiOTxCoilqFbThRK3CGU9FJ9ZSrfZ7I80rObBGsD+ba3AwIiKyP4ZNBM3sCjObUI9gmk4uDIspfgIqUnNRq6ANJ2oJSkKJoDSWmc0ws3vMbJmZPWFmH6hwzOlm1mNmSwpfH69bgKXbR6giKCISaYkqjjkYeNjMHgGuB+7yA2QXWcuqIih1FrUK2nCiltged+GuikekxAv/lEflPMueZIF/cPdHzGws8Aczu9vdl5Yd91t3f0Pdo9vVGqo1giIiUTdsRdDd/xWYA3wDeDfwtJn9u5kdXuPYGs5y/aRJahy71E+rVXaiNs3yJe+DBRc3Ooq9p2ExLcPd17j7I4WftwHLgGmNjapEcR9B0xpBEZGoq2qNYKECuLbwlQUmROqkigAAIABJREFUAD80s8/XMLaGi+X6SVtE3sBKa4haBW04rZbYNiud55ZkZrOAE4DfV7j5FDN71MzuNLOjh7j/ZWa22MwWr1+/fmSCKu4jiNYIiohEXTVrBP/ezP4AfB64HzjG3d8PnAicV+P4GiqW6yerRFDqqdXe0LdaYtusYsXW0Bb5vRHMbAzwI+CD7r617OZHgEPd/TjgK8CtlR7D3a9z94XuvrCrq2tkAktq+wgRkVZRzRrBScBb3f250ivdPW9m9V+fUEexfJqMEkGpp8POgJP+GsYf2uhIRkarJbbNyiy6E09lN2aWJCSB33X3H5ffXpoYuvsdZvZVM5vk7htqHlzJ9hFKBEVEoq2a1tA7gE3FC2Y21sxeAuDuy2oVWDOI5/rJxvTGSupo/Ax4/f8bGP4RddNOhFM/CDNPaXQkrS+eGqgMSmSZmRHW5C9z9y8McczBheMws5MJ/5dvrEuA8SRucdpNU0NFRKKumncN1wILSi7vqHBdS0p4mpwSQZF9l2yHV3+q0VEcGGIJVQRbw6nAXwGPm9mSwnX/DMwEcPevAW8D3m9mWaAXuKCu07wT7bRn1BoqIhJ11SSCVvofTKEl9ID42DmeT5NLausIEYkAtYa2BHe/D9jjqGp3vxq4uj4RVZAcRXtvmm1KBEVEIq2a1tCVhYExycLXB4CVtQ6sGSTzqgiKSETEU63TUizNLdFeWCOoqaEiIlFWTSJ4OfAy4AWgG3gJcFktg2oWCdLk46oIikgEJNt3jfYXqSVLtjMqlqEvo4qgiEiUDfvxsbuvAy6oQyxNJ+VKBEUkIt7wReiY1Ogo5ECQGEVHLENfRhVBEZEoGzYRNLN24L3A0UB78Xp3f08N42q4fN5JeZqsEkERiYLZr2h0BFLGzA4Hut2938xOB44FbnT3LY2NbD8l2hhlSgRFRKKumtbQbwMHA68Ffg1MB7bVMqhm0J/N02YZUCIoIiL75kdAzsyOIGwJMRu4qbEhjYDkKEZZhl4lgiIikVZNIniEu/8bsMPdvwW8HjimtmE1Xn82RxsZPKFEUERE9kne3bPAW4AvuvuHgKkNjmn/JdoZZWl600oERUSirJpEMFP4vsXM5gPjgFk1i6hJ9GfztJEBJYIiIrJvMmZ2IfAu4PbCdckGxjMykmFqqCqCIiLRVk0ieJ2ZTQD+FVgELAU+V9OomkBfJlQELdE+/MEiIiK7uwQ4BfiMuz9jZrOB7zQ4pv2XaKedtNYIiohE3B6HxZhZDNjq7puB3wCH1SWqJtCfydFmGSypRFBERPaeuy8F/h6g8IHqWHf/bGOjGgGJdlKkVREUEYm4PVYE3T0PXFGnWJpKur8PQImgiIjsEzO718w6zewg4FHgBjP7QqPj2m/JUaRcawRFRKKumtbQu83sI2Y2w8wOKn7VPLIGS/fvBCCmRFBERPbNOHffCrwVuMHdTwTOanBM+y/RTsr7taG8iEjEDbuPIFDcL/BvS65zWrxNNNMXKoLx5KgGRyIiIhGVMLOpwPnAvzQ6mBGTaCfpaXrT2UZHIiIi+2HYRNDdZ9cjkGaTTRcqgilVBEVEZJ9cBdwF3O/uD5vZYcDTDY5p/xU6ZXKZvgYHIiIi+2PYRNDMLq50vbvfOPLhNI9Mfy8AiZQqgiIisvfc/QfAD0ourwTOa1xEIyQR/l/0TC/ujpk1OCAREdkX1bSGnlTyczvwKuARoKUTwVxaiaCIiOw7M5sOfAU4lbCk4j7gA+7e3dDA9lehIthOhv5snvZkvMEBiYjIvqimNfTvSi+b2Tjg2zWLqEkUW14SbUoERURkn9wA3AS8vXD5osJ1r25YRCOhsL9uu4XJoUoERUSiqZqpoeV2AnOqOdDMzjaz5Wa2wsyuHOKY881sqZk9YWY3ld3WaWYvmNnV+xDnfskWWkOTSgRFRGTfdLn7De6eLXx9E+hqdFD7rZAItpHRXoIiIhFWzRrBnxJaWiAkjvOAW6q4Xxy4hvDJZzfwsJktKmywWzxmDvAx4FR332xmk8se5tPAr6t5ISMtX6gIKhEUEZF9tMHMLgJuLly+ENjYwHhGRmGadrs2lRcRibRq1gj+V8nPWeC5Ktc3nAysKCyOx8y+B5wLLC055lLgGnffDODu64o3mNmJwBTg58DCKp5vRLkSQRER2T/vAa4G/pvwgervgEsaGtFIKLaGok3lRUSirJrW0OeB37v7r939fmCjmc2q4n7TgFUll7sL15WaC8w1s/vN7EEzOxvAzGLA/wM+uqcnMLPLzGyxmS1ev359FSFVr1gRjGtYjIiI7AN3f97d3+TuXe4+2d3fTNhcPtpK1gj2qSIoIhJZ1SSCPwDyJZdzlIzD3oNK86S97HKCsN7wdELLzNfNbDzwN8Ad7r6KPXD369x9obsv7Ooa2WUXni3sj5RoG9HHFRGRA9qHGx3AfktqjaCISCuopjU04e7p4gV3T5tZqor7dQMzSi5PB1ZXOOZBd88Az5jZckJieArwCjP7G2AMkDKz7e5eceBMLRRbQ4uffIqIiIyA6G+6lyhZI6jWUBGRyKqmIrjezN5UvGBm5wIbqrjfw8AcM5tdSBwvABaVHXMrcEbhcScRWkVXuvtfuvtMd58FfAS4sZ5JIJQmgqoIiojIiCnvjImeZMn2EaoIiohEVjUVwcuB75Zs4dANXDzcndw9a2ZXAHcBceB6d3/CzK4CFrv7osJtrzGzpYSW04+6e3NMVMuqIigiInvPzLZROeEzIPoLz0uGxWiNoIhIdFWzofyfgZea2RjA3H1btQ/u7ncAd5Rd9/GSn52wXmLINROFfZe+We1zjphcoRs2roqgiIhUz93HNjqGmirdR1CtoSIikTVsa6iZ/buZjXf37e6+zcwmmNn/V4/gGsmyfWRIQqya7lkREZEDRGEfwTbS9GbywxwsIiLNqpos5xx331K8UNjz73W1C6k5xHL9ZCzZ6DBERESaSzyFY1ojKCIScdUkgnEz29UfaWajgJbvl4zl+snGWv5lioiI7B0zLNVBZ0xrBEVEoqyaYTHfAX5pZjcULl8CfKt2ITWHWD5NNlbNLhkiIiIHmFQHY9P9WiMoIhJh1QyL+byZPQacRZh49nPg0FoH1miJfD+5hBJBERGR3SRHMyam1lARkSirdhLKWiAPnAe8ClhWs4iaRDyfJq/WUBERkd2lxjAm1q9EUEQkwoasCJrZXMIm8BcCG4HvE7aPOKNOsTWMu5PwNHm1hoqIiOwu1UEHO+lTa6iISGTtqTX0SeC3wBvdfQWAmX2oLlE1WDqXp80zeEIVQRERkd2kOhhtm1URFBGJsD21hp5HaAm9x8z+18xeRVgj2PL60nlSlsG1mbyIiMjuUqMZ7X1KBEVEImzIRNDdf+Lu7wCOBO4FPgRMMbNrzew1dYqvIXozOdrI4In2RociIiIHEDObYWb3mNkyM3vCzD5Q4Rgzsy+b2Qoze8zMFtQ90NQY2unT1FARkQgbdliMu+9w9++6+xuA6cAS4MqaR9ZAfYVEELWGiohIfWWBf3D3o4CXAn9rZvPKjjkHmFP4ugy4tr4hAqkO2r1X+wiKiERYtVNDAXD3Te7+P+5+Zq0CagahIpjGVBEUEZE6cvc17v5I4edthCnd08oOOxe40YMHgfFmNrWugaY6aMurNVREJMr2KhE8UPRmcrRZBksqERQRkcYws1nACcDvy26aBqwqudzN7skiZnaZmS02s8Xr168f2eCSHSQ9TTqdHtnHFRGRulEiWEFfOrSGxpQIiohIA5jZGOBHwAfdfWv5zRXu4rtd4X6duy9094VdXV0jG2CqIwSS6R3ZxxURkbpRIlhBcViMEkEREak3M0sSksDvuvuPKxzSDcwouTwdWF2P2HYpJILJ3E6yuXxdn1pEREaGEsEKetNZ2sgQTykRFBGR+jEzA74BLHP3Lwxx2CLg4sL00JcCPe6+pm5BAqTGADDa+unLKhEUEYmiPW0of8Dq6+8nZk48NarRoYiIyIHlVOCvgMfNbEnhun8GZgK4+9eAO4DXASuAncAldY8yNRqA0YUtJMa06e2EiEjU6F/uCrL9OwFIKBEUEZE6cvf7qLwGsPQYB/62PhENodAa2kGftpAQEYkotYZWkO4Pi98TbUoERUREdlPSGqotJEREokmJYAXZ/j4AkkoERUREdleoCBZbQ0VEJHqUCFaQTYeKYDypRFBERGQ3ybBGsMP62KlEUEQkkpQIVpArtIaSaGtsICIiIs2o2BpKPzv6sw0ORkRE9oUSwQpyxQ1yE9o+QkREZDe7WkP72ZFWIigiEkVKBCvIZ8IaQVUERUREKki04RZjtPWxXRVBEZFIUiJYQT5dTARVERQREdmNGaQ66KCP7X1KBEVEokiJYAX5rCqCIiIie5Qaw2jTGkERkahSIlhJRhVBERGRPbFUB52xfrYpERQRiSQlgpXk+sN3VQRFREQqS46mM5ZWRVBEJKKUCFZgWVUERURE9ig1hjGxPnb0ax9BEZEoUiJYSVYVQRERkT1KdTDa0moNFRGJKCWCFcR2tYaqIigiIlJRYWqoWkNFRKJJiWAFMa0RFBER2bNUB6Nc20eIiESVEsEy7k4s30/OEhCLNzocERGR5pTqoN17taG8iEhEKREs05/NkyJLLpZqdCgiIiLNK9VByvvYkVYiKCISRTVNBM3sbDNbbmYrzOzKIY4538yWmtkTZnZT4brjzeyBwnWPmdk7ahlnqb5MjjYy5GJqCxURERlSqoOEZ+jv68PdGx2NiIjspUStHtjM4sA1wKuBbuBhM1vk7ktLjpkDfAw41d03m9nkwk07gYvd/WkzOwT4g5nd5e5bahVvUW8hEczHVREUEREZUrIjfMv30p/N057UcgoRkSipZUXwZGCFu6909zTwPeDcsmMuBa5x980A7r6u8P0pd3+68PNqYB3QVcNYd+nL5GmzNB5XRVBERGRIqZAIjqZf6wRFRCKolongNGBVyeXuwnWl5gJzzex+M3vQzM4ufxAzOxlIAX+uWaQletOhIuhxbR0hIiIypEIi2GHaQkJEJIpq1hoKWIXryhcRJIA5wOnAdOC3Zja/2AJqZlOBbwPvcvf8bk9gdhlwGcDMmTNHJOhia6hr6wgREZGhpcYAqgiKiERVLSuC3cCMksvTgdUVjrnN3TPu/gywnJAYYmadwM+Af3X3Bys9gbtf5+4L3X1hV9fIdI4Wh8VoD0EREZE9KFQEx1iv9hIUEYmgWiaCDwNzzGy2maWAC4BFZcfcCpwBYGaTCK2iKwvH/wS40d1/UMMYd5PO5WmzDCTUGioiIjKk9k4AxtCrLSRERCKoZomgu2eBK4C7gGXALe7+hJldZWZvKhx2F7DRzJYC9wAfdfeNwPnAacC7zWxJ4ev4WsVaKptz2kiDhsWIiIgMrX0cAJ3sZJsqgiIikVPLNYK4+x3AHWXXfbzkZwc+XPgqPeY7wHdqGdtQsrl8aA1NqiIoIiIypGIiaDvY0Z9rcDAiIrK3arqhfBRl8641giIiIsNpC62hnexke3+mwcGIiMjeUiJYJpsPawRNawRFRESGFovjbZ102k62qyIoIhI5SgTLZHLFiqASQRERkT2x9nEcFN+pfQRFRCJIiWCZXKE11JJqDRUREdmj9nFMiGn7CBGRKFIiWCabzdFGGkuOanQoIiIiza19HONiO9mu7SNERCJHiWCZbDZD3JyYKoIiIiJ71j6OcexURVBEJIKUCJaxbH/4rjWCIiIie9Y+jjHs0BpBEZEIUiJYJp/tAyCeUmuoiIjIHrWPo8N3sF2JoIhI5CgRLFdIBGPaUF5ERGTP2scxKr+DHX3pRkciIiJ7SYlguUJrqBJBERGpNzO73szWmdmfhrj9dDPrMbMlha+P1zvGQdo6ieHQv62hYYiIyN5LNDqAplOoCJoSQRERqb9vAlcDN+7hmN+6+xvqE84w2scBEEtvxd0xswYHJCIi1VJFsExxWIw2lBcRkXpz998AmxodR9UKieDo/A56M7kGByMiIntDiWCZgURQ20eIiEhTOsXMHjWzO83s6KEOMrPLzGyxmS1ev359bSIpJIKd7GTLzkxtnkNERGpCiWCZRHZ7+CE5urGBiIiI7O4R4FB3Pw74CnDrUAe6+3XuvtDdF3Z1ddUmmmIiaDvYvFMDY0REokSJYJlJO1cUfpjb2EBERETKuPtWd99e+PkOIGlmkxoWUElFsEcVQRGRSFEiWGbazmWsYiqMGt/oUERERAYxs4OtMJHFzE4m/D++sWEBlVQEt/QqERQRiRJNDS0zbedyHonPYUajAxERkQOOmd0MnA5MMrNu4BNAEsDdvwa8DXi/mWWBXuACd/cGhQttnUCoCKo1VEQkWpQIltq+noOyL/JU6nWc1ehYRETkgOPuFw5z+9WE7SWaQzyBp8bQmdWwGBGRqFFraKk1SwD4c2JOgwMRERGJBmsfz4TYTraoIigiEilKBEut/iN5jGeShzc6EhERkWhoH8fERK8qgiIiEaNEsNTqP7I2MZ1MYkyjIxEREYmG9nGMj/WyWYmgiEikKBEstfqPrEzNJRG3RkciIiISDe3jGMdOenrVGioiEiUaFlPUtxUS7fzZ5pCIKREUERGpSvs4xrBDFUERkYhRRbCovRM+sITbR72JREynRUREpCrt4+jw7VojKCISMcp4ymQdtYaKiIhUq30c7bkdbOvto5FbGoqIyN5RIlgmm3O1hoqIiFSrowvDGZPbyo50rtHRiIhIlZQIlsnk8iTiOi0iIiJV6ZgEwETbqr0ERUQiRBlPmVzeSao1VEREpDpjJgMwyXq0TlBEJEKUCJbJ5p24hsWIiIhUp6MLgElsVSIoIhIhynjKZHJ5klojKCIiUp1CIjjRetis1lARkchQIlgml3dNDRUREalW+3g8lgitob2qCIqIRIUSwTKZnFpDRUREqhaLwehJTGQrPaoIiohEhjKeMtl8XsNiRERE9oJ1dDE5vo3NWiMoIhIZSgTL5HJOQhVBERGR6o3pYkpMw2JERKJEGU+ZTD6vNYIiIiJ7o6NL+wiKiERMTRNBMzvbzJab2Qozu3KIY843s6Vm9oSZ3VRy/bvM7OnC17tqGWepbM5JaGqoiIhI9Tq6mOBb2LC9v9GRiIhIlRK1emAziwPXAK8GuoGHzWyRuy8tOWYO8DHgVHffbGaTC9cfBHwCWAg48IfCfTfXKl4AdyebdxJxFUpFRESq1tFFm/eztWdLoyMREZEq1TLjORlY4e4r3T0NfA84t+yYS4Frigmeu68rXP9a4G5331S47W7g7BrGCoStIwBVBEVERPZGYS9B37F+1/+lIiLS3GqZCE4DVpVc7i5cV2ouMNfM7jezB83s7L24L2Z2mZktNrPF69ev3++As8VEUGsERUREqjdmMgATvIeNO9QeKiISBbVMBCtlU+UfEyaAOcDpwIXA181sfJX3xd2vc/eF7r6wq6trP8MdSASTmhoqIiJSvY5JAEyyHtZtVSIoIhIFtcx4uoEZJZenA6srHHObu2fc/RlgOSExrOa+Iy6bywMQV2uoiIhI9QqtoRNtKy9u7WtwMCIiUo1aJoIPA3PMbLaZpYALgEVlx9wKnAFgZpMIraIrgbuA15jZBDObALymcF1NZXKFiqBaQ0VERKpXTATZyouqCIqIRELNpoa6e9bMriAkcHHgend/wsyuAha7+yIGEr6lQA74qLtvBDCzTxOSSYCr3H1TrWIt2jUsRlNDRUREqpdow9s66cr2qCIoIhIRNUsEAdz9DuCOsus+XvKzAx8ufJXf93rg+lrGVy6j1lAREZF9Yh1dHJLezvJtSgRFRKJApa8Su4bFqDVURERk74yZwtR4j1pDRUQiQolgiVw+VAQTmhoqIiKyd8bPYKqvV2uoiEhEKOMpURwWow3lRURE9tL4mUzIbWBDz85GRyIiIlVQIlgim9OwGBERkX0yfiZxciR3rtm1HZOIiDQvZTwlssXWUK0RFBER2TvjZwIw3dazYXu6wcGIiMhwlAiWKA6LUWuoiIjIXipJBLVOUESk+SkRLFHcPkLDYkRERPZS53QcUyIoIhIRynhK5LR9hIiIyL5JpMiPmcp026BEUEQkApQIligOi9GG8iIi0ghmdr2ZrTOzPw1xu5nZl81shZk9ZmYL6h3jnsQmHMoMW0/35t5GhyIiIsNQIlii2Bqa1NRQERFpjG8CZ+/h9nOAOYWvy4Br6xBT1WzCTA6Nb+SZDTsaHYqIiAxDGU+JYmuopoaKiEgjuPtvgE17OORc4EYPHgTGm9nU+kRXhfEz6fINPL9ha6MjERGRYSgRLJHR1FAREWlu04BVJZe7C9c1h/EziZOnf1M3+cL/qSIi0pyUCJbIamqoiIg0t0qfVFbMuMzsMjNbbGaL169fX+OwCgpbSEzOrWONBsaIiDQ1ZTwlsmoNFRGR5tYNzCi5PB1YXelAd7/O3Re6+8Kurq66BFe6l+CzWicoItLUlAiWKE4NVUVQRESa1CLg4sL00JcCPe6+ptFB7dI5Hbc4s2JrNTBGRKTJJRodQDPJ5gutoaoIiohIA5jZzcDpwCQz6wY+ASQB3P1rwB3A64AVwE7gksZEOoRECibNYd6Lq3hAiaCISFNTIliiWBFMqiIoIiIN4O4XDnO7A39bp3D2iU2ex7wN93PzRiWCIiLNTBlPiWJFMK6KoIiIyL6ZMo+p/iJr6zWgRkRE9okSwRKZnLaPEBER2S+TjwZg1JYVu/bnFRGR5qNEsETxP6xkXKdFRERkn0yZB8Dh/hyrt/Q2OBgRERmKMp4SxX0EVRAUERHZR+Nmkkt0cKQ9z7I1WxsdjYiIDEGJYIlM3knGDTNlgiIiIvskFoMp8zgytopHu7c0OhoRERmCEsESubxrD0EREZH9FD/4aObFu1ny/OZGhyIiIkNQ1lMik8trUIyIiMj+mnw0nb6NNd3PktfAGBGRpqREsEQ259pMXkREZH9NPQ6AOZknWblhe4ODERGRSrShfIls3kloYqiINKlMJkN3dzd9fX2NDqVu2tvbmT59OslkstGhyN6YtoB8YjQvyz7BH5/fwhGTxzY6IhERKaNEsERWraEi0sS6u7sZO3Yss2bNOiCGWrk7GzdupLu7m9mzZzc6HNkb8SQ261Re8fRSru/ewtsXzmh0RCIiUkblrxKhItj6b65EJJr6+vqYOHHiAZEEApgZEydOPKAqoK3EDnslh9kLPP/cnxsdioiIVKBEsEQ27yQ1NVREmtiBkgQWHWivt6XMPg2ASet/z5ad6QYHIyIi5ZT1lMjm8sTVGioiUtHGjRs5/vjjOf744zn44IOZNm3arsvpdHVv9C+55BKWL19e40ilKUw5hmzbeF7KE9y7fH2joxERkTJaI1gik9OwGBGRoUycOJElS5YA8MlPfpIxY8bwkY98ZNAx7o67Exuiu+KGG26oeZzSJGIx4oedxmnLfsdnlq7hzSdMa3REIiJSQllPiVw+T1JrBEVE9sqKFSuYP38+l19+OQsWLGDNmjVcdtllLFy4kKOPPpqrrrpq17Evf/nLWbJkCdlslvHjx3PllVdy3HHHccopp7Bu3boGvgqpBTvyDRzMBrY/9RvS2XyjwxERkRKqCJbI5l2toSISCZ/66RMsXb11RB9z3iGdfOKNR+/TfZcuXcoNN9zA1772NQA++9nPctBBB5HNZjnjjDN429vexrx58wbdp6enh1e+8pV89rOf5cMf/jDXX389V1555X6/Dmki895E5vZ/4E19v+DhZy/m1CMmNToiEREpUEWwRCaX17AYEZF9cPjhh3PSSSftunzzzTezYMECFixYwLJly1i6dOlu9xk1ahTnnHMOACeeeCLPPvtsvcKVekmOwo55O+fEHuI3jz7V6GhERKRETSuCZnY28CUgDnzd3T9bdvu7gf8EXihcdbW7f71w2+eB1xOS1buBD7i71zLeXN5Jao2giETAvlbuaqWjo2PXz08//TRf+tKXeOihhxg/fjwXXXRRxS0gUqnUrp/j8TjZbLYusUp9JRa+i8Qj15N//AfseMPJdLSpGUlEpBnULOsxszhwDXAOMA+40MzmVTj0++5+fOGrmAS+DDgVOBaYD5wEvLJWsRZlcmoNFRHZX1u3bmXs2LF0dnayZs0a7rrrrkaHJI10yPHsmHgMF+bv4EcPP9voaEREpKCW5a+TgRXuvtLd08D3gHOrvK8D7UAKaAOSwIs1ibJENp9XRVBEZD8tWLCAefPmMX/+fC699FJOPfXURockDdZx1j9xWGwtq377bfL5mjb3iIhIlWrZnzENWFVyuRt4SYXjzjOz04CngA+5+yp3f8DM7gHWAEZoGV1Wfkczuwy4DGDmzJn7HXA25yRUERQRGdYnP/nJXT8fccQRu7aVgLAJ/Le//e2K97vvvvt2/bxly5ZdP19wwQVccMEFIx+oNIe/eD09447kgs3f567H/5pzjpvR6IhERA54tSx/Vcqoyj8G/Ckwy92PBX4BfAvAzI4AjgKmExLKMwvJ4uAHc7/O3Re6+8Kurq79DjibdxLaPkJERGRkxWKMec2/cHhsDVtuu5Ke7TsbHZFING1ZBTe8Hn78Pqjt6Izd7dxU3+cbCZk++P11cPM74bkHhj8+vQPyudrHVa7ef5YFtUwEu4HSj/ymA6tLD3D3je7eX7j4v8CJhZ/fAjzo7tvdfTtwJ/DSGsYKQDaXJ6GpoSIiIiMuftQb2HDURVyYv50tX3017NjQ6JD2LL0Dtq1tdBT7pnsxPHvf8MftjfI3qrkMZHohX7I/ZN/W2r6hffY+WPun2j1+PeWy8Lur4bdfqHzOHvk2PHBNuK17MVx/Nnz7rXDdK2HVg/DY92DJTfsfx4pfhqTyh++BlfcOfdzvr4PPHwZP3rH/z1kvPS/ANSfBnR+FZ34D33wd/OJTkC75ICrTB6seCud56xr48gL4wbsHP86aR+GTEZbyAAAgAElEQVTWv4WrT4KfXA7bhlmt9tgt8M03DCTO65aFvytDWfZT+OIxsOaxfXqZ+6OWraEPA3PMbDZhKugFwDtLDzCzqe6+pnDxTUCx/fN54FIz+w9CZfGVwBdrGCugiqCIiEjNxGJMesc13PqdIzn76U+x+drXMuHyn8OY/e/oGXH5PHznPFj7OLxrEUw7sfJxxTfwNsx7h0wvbH8RJswa0TAHPf6TPwOLQffD8OC1gMOpH4Qz/w3iZW/3dm6CX34Kjn0HHPqy4R9/24vhfIybBm+7Hh78KtzzH+A5mDwPLl4Eq34Pt1wMs06FV38aDjk+PM/d/waHnQHHvG3g8Xo3hw8Cxs+ERFs4jz3dIebxQyz1+f11cOc/QqId3vEdmHPW4NtzGfjTj0Jy1D4OJs2BiXNCPJUe0334P7dK91l5D8QSMOsVe77/s/fBH74JLy6FURPgnM/BwfPDbauXwO0fhNV/DJe3rQ1xrrwXTrwE+npg0d+F89H9cEjWUh0w9uBwvl//hXD/n18Js18x9DmrZO3j8PT/wZFvhGwffO+dkBwdblt+J7z37hCnOzx6M6xfDmOnwl0fC/H8+rPwF+cMPF7pOcjnIb0d2sbCjvUhgZ35Uhh90OAYsumQzG5+DnJpmPdm6Jg4cHumD5LtA5d3bgqJ6sYVMGo8vOkrcMgJe36d6R1w8wWwczP81U9g+klw55Vw3xfgse/DyZeFv4/3fAY2PAXHvRM2Pwvb18KyRbDiF3DEWeG5v/M2yPbD9BPD79iTd8DrPh/+/hRff18PpMbA1hfg9g+F8/Cj98Jhp8PdH4dDT4WLfgSLb4Anbw/PPec14d+W2/423P8nl8Nl94S/E3VitdyRwcxeR0jg4sD17v4ZM7sKWOzuiwqJ3puALLAJeL+7P1mYOPpV4DRCO+nP3f3De3quhQsX+uLFi/cr3pf++y85be4kPv+24/brcUREamHZsmUcddRRjQ6j7iq9bjP7g7svbFBIkTMS/0eOlHQ2zzXf+DqXr/4Xsu0T6DjlvcTmnwcTD6/8xtodXvwTTJpbvzdIi68Pb+baOiEWh3ffAVPKBp+/+ATc9A7wfHjDmO2H9s6QBJW+ie3rgW+/JXzaf/lvYfJRA69r/XLY8lx4jtmvhHgSerfAo9+D7ofghItCElV6Xra9GN5EH3ZGSIoe/0F4M7v1hYFjTvrr0N72hxvg8DPh/BvDm3MI1Y3v/1V43rFT4W8eCIlKqd7N8NDXYcXdMPOUkCBseR5y/TBmCmxbA0e+AQ4+Fu77b5hwaHgTPWEWbF8HvZvCG+CNK6HnecDgLf8D0xfC4z+E3305vFHGwmvwfHhsDE56b3iOR26EY8+H0/8ZfnUV3P8lmHsObO2GdU+GBGPM5HD/HetDVadvCxx0eEiIN60MiWpbJ1x8W0gcnn8Qnvo5PHd/OPdtY+H4vwwJbjYN888bnJAU/5zWPh4SsiXfhRf+EK6fMj8cP+fVcPAxA8c+9zu4/4sh2Ro9CaYtCAlf7+bwZxFPhaR99ER4/X+FZOmBq8P9LQbxNmgbE5LZuWeH28YfCpfcAeOmD8S16Rn42stDYvjun4Xk5U8/DvF3HQmzXh5ijMULf6Zb4IeXwJ9/FS4n2qF9fHjO9/0G8lm47nRIpODlHwqv4/EfhD8TPPxZn3BRSMbf+KWQ5MYScMHNsHMD/PpzIZHt3QypsYU/Xw+xv/P74fc+m4ZffAL++F3o7xl4LYlRcPyFcNKlIdG677/DuT3nc+F83XgurH0Mjn4rPPPr8Pfkff9/e3ceHkd9Jnj8+1b1pZZakmXJkm3JJw6HzeUohgECk0A4vAEPk4dAhmfCuU6yyw7ZGWYCyyyBSTYbJpl5dsHMMCRhx+QhQ0gIgcyEcA0hZJjBNsYnYA5jfMq6rLvv+u0fv5LdEpJj2ZK6y3o/z9NPd5dK3W//qrp/9db7q6rf2PXW82yc7dvsZ0x12eStczt074I/etwuo0EfvgrP3gl719vnVU02IVv3A/v8ilU2WXRCcPML8M9/Cm8+BSt/bRPk9vfg6Vtg57/bBPaCv7DrxjO32+UTq7IVwHP+m02aAZrOsutn5Sz7Pa072bZZfxuEy+33+8K7bNue99/hors5FmPpHyc0EZxM49HJNX/zeS5e3MC3rjx1nKJSSqnxo4ngIZoIjk0pJYIA2bzH/asf5awP/p5z3a12YlUTnP4FOP0auxEZqYCaBfDcX9qN4fIZ8PHr7QZjzYKhL5jL2A3AihmHf+Nkl02ajGcThvI6qF9sk7xkF7z0LfCysPkJmHkaXHEfPHyZ3Uj+4lOHKjq71sKPrrIbsLPOhB2v2ISjZ7etLKx4wCYb7e/Y5GHfRlt1mXGyfZ0NP4I134O2gvPglc+A8lr7P17Ovl66B2Y324Soeq6d/5W/tdMjCYjEbaVx5ulw0T02gXIjUHuCfc31j8AvvmoTgxMvsxWVN5+y8/3+7fAvfwanXAGNy+yGtJe38+x53VZq6pfYjdpQFK79id3A/tlKuxwu+RY4Drz7vK28VDXCTS/YjfS137e3UBRW/B38+n/bNhp08uVw4nKbXGb67LRp86Htbft/xoMZi6F1KyRmQe9eaL4RLvsOZPtthaX1behvtZXASIVNMk++3G7Ui9h1ou0tm/SmuqFytn09J2SrQ/WLbfL63oscPIVFrBqab7BDXKsa7fr44l/BRn8IZtUcuODPQVxY8w92uQIsvhJO+AyseQj2bYCyGjj3VjjrSxAus+320v+yVdO+VltJOv82mzQYA2/80CaGs5baZG3PerjpOVtVffd5m2gmGj66Pm/5mZ1/1lKb2FQ22uXW3+p/nipbiZp7rh1K2vo2XPg/bUL9/F22unndL6BpmZ1/9zp49CqbyCPw6TvhrC/bJGbWmbad7zvTrueRhF1P4zV+QhOHk/6TrcT27LPfrdpFNrlJ98GZ19rq6Ie/hVM/D0v+0Cax6V547UFbpctnbBzzL7DfH2NsMi+u3Zlx8mftd+//XWo/U/MN8Ppq+znEtdXCWLVNECtm2GSysBJdqHc/tL5p15towu7s6N4Ny/4zbPsV/NPVh+b91J024Rvk5e338JXv2qoqwNzzbHLX9jZcei+c/WX4zXfs+nnB7TbR/NUd9nv3yT+z6/iGH9mdIhd8zcb51C3w/ktwyxpbAT5KmggepdPveY4/OGMW96xYMk5RKaXU+NFE8BBNBMem1BJBAGMMj6/bxcP//DLNufVcGd9Ic/b1oTNVzrZ70E+7xlYa3n0OMDZJaPy4TQiNB2t/YOdbeKHd+Nz7hk2cmpbZyl2qGz52Cbx6v91QC5cfqkiIA+f/ha0U7d9iEzCMTWpqT7AVgNWXQy5pE4NUj03uqprssNGa+Yfi/fW3bdIzGDfY97ryQZvw/PwrduM81Q0zz7AJVf0SuyG96TE7JK5hCZx8hU0a1z9ih5K1bj30Hgs/Dcu+BFuftBvRzTfaaaOd4+DdF+wwwp49dmN92UpbrYjXwL9+026sgt1wd0I29rm/Z9u8YYlNYrIDh6pRuYytGhXat8kmKoWJeOHQy3SfrU7Gqu2G94zD/I51vG83tGsX2eX1m+/CZ+6xG/1H48AO+MfLbbX27K/Yto1VHvp7X6tNQAY64dn/YRPWwWU06JO3wdI/tuvU8Ors+tXwyt/YhGD6Ivi9/2LbLhI/uni9vI3lSIdMP/M1m0id+nm7AyIUscfGffhv9rPs+K2tjoXjcPUPbfUa7PLJJj8ap+fZxNt4Iw853fxTOzR4xQN2uf74Wls1Xv7dkWPu3gMv3A1v/tw+X/GA3bExXF+rTQbrTrbDfls22/cKxezw5QUFlxNf+wObYHo5uzPmsm/D0uvGPtT3cD54xSbuuZRN1NzwR+cZ6LTDZyPlcOYXAb96PPP0kWPJpQ8/qiHdaz/T8Ar9GGkieJSWfP1ZPt/cxF2Xj3Tde6WUKq5iJ4IdHR1ceOGFALS0tOC6LoNnbF6zZg2RSORw/37Qww8/zPLly2loGGEP9wg0ETx2pZgIDursz/DkG3v45eZ9TE/vZH7POnakylkY7eay6EZ2VS1j3ewvUhELMcN0cErnc8zpWsu07q24qQMA9DecRVddM/Xbf4qb7iFTt4Rw13acZIdN7EIxWyWJVNiN4QWf8o9Ta4OX/xq2/NQOybv6hzZh/EiQH9i99XtetxXDZV+yFYKy6qHzeR78/Mu2qrZspa1slNfZY/SMgZ9cT65nH8/VXU/fzPP4xILpzJseR37XBmz7u7YKWFZjh18ezQav59mN+8LjBfM5m1w3LBnbcWaT6WiO5RvO80ZPlIfLJm0Vr2WzHSJ84vKhQwtH0rXTVpOazj7y9xkv+ZxdLxs/Mfp7d++xCUh57fi//5Eun/4OyPSO33Gy2aStMCbqhw6ZVZoIHq0T//IZrj9nHncsn3p73JVSpa/YiWChu+++m4qKCm677bYx/+95553HqlWrOOOMM45ofk0Ej10pJ4LDZfMez25t4eVtbWzd20N7X5r+dI7+zEdP6R4nRTlJ2rB70AUPB0MeFwePJmmjv2wWsZDDwsybdLp15CrnEAkd2mCOhxwuD79Gf6yBzXIinjE4IjgCWc/Ql8rhGXud4YF0BieXojxRTXnUJZPz7C3vkc55OAKzqsqojkcIu0JdIkpDVYyBdJ4DAxna+9L8bP0eOvozB9+/LhFl2bwaPjFvGrOnxRHsdrUICEI27zGQyRMJOUyLR6gpj5CIhfCMIe8Zcp5/n7f3jgON0+Jkch5v7uuhL5XDEWioilFVFqalJ0XYdVhQW05/Ok9LT4qQK8RCLrGwQ186R0t3ipMaKpkzPU4qm6c7mWVaPELO8+joyxALu0Rch10HBsjmPRbUVVBVdqhi0tGXZltLL/2ZPDOrYsTCDiA0TisjFnaPaD3wPIMTsGs7p7J5wq6DG7C41fFlLP3jRJ41NHD0rKFKKXV0Vq9ezQMPPEAmk+Gcc85h1apVeJ7HDTfcwIYNGzDGsHLlSurr69mwYQNXX301ZWVlY6okqqkh7Dp89rRZfPa0WUOme55hIJunP51jT1eSd1p6SWbzhFyHuooo5VGXfd0p0lmbNPX7yVdnf4ZU1iMRm82snEdbb4ps/tBO8N5Ulu+2LibvGarjXbiOYAx4xuCKkIiFcBwhlzfEIy5uLMGuzgGS2TzRkEPEv0VDDnnP8NoHnXQns2TyNkkc+tmET8yr4fbLTqIs7LJmRydrP+hk7Y4D/MvmfZSaGYko7X1pvCOoGdRWRGmcVsaeriRtvekR53EdYVZ1jIhr26ovnae2IkJTTZxIyCGd9djfk6KlJ0V7X5poyGF6eZQ5NXHKoyF2dvbTk8xhMEyLR6ivjFFfGSURC5PLe0RCDhXRMKlcnt5Ult5Ujt5Ujr5UjpArxCMuuzqT7OtO4jg2+a0qC1NVFiYRCxENO0Rch7DrEA45uCK829rLtpZecp6hMhZm6dxpnFBXQSIWor0vzYedA+zqHGD3gSSd/RkSsRDNc6fROC1OLOyw118np5dHmV5hE/myiIsgHBjIkPbX4Z5klgMDWarKwkTDDjs7BnAdYcnsSmZXxzEYfrWlhS17ugm7DrGwTdwX1lVwUkOCgWyenR0DbNzdTcgRTmpIcGJDgqaaOPt7UvSlc9TEI7iOkM0bwq74r2FfJxZ2yeY8Urk80+IRKqIhWnvT5DxDTTxCOGS/A+19aZKZPDMqY3QNZNi4q4u8MSRiYeZNj1OXiNI1kMV1hNqKKDnP0J/O0ZfOYYxtw8qyMNVx2+4hx2F7ex/dA1kqYiFqK6LUV8ZGTKaTmTyt/vfXGIMBWrpTvLO/l3nTyzn3hFoMhgMDWboG7M6WsrDLlr09vNPSy+lN1Zwyq5K+VI6eVJa+VI7KshAzEjFmVEbpT+dZt6PTfu68x8kzKzljTjXRkDskhrdbephRGaOuIsr+nhTpnHdwPSrcyQR2CHx/Jk9Z2C3JHQSaCPqMv2fN1esIKqWC4Jnb7dCl8dRwqj3WYoy2bNnCk08+yauvvkooFGLlypU89thjLFy4kPb2djZvtnF2dXVRXV3N/fffP6aKoFIAjiNURENUREPUV8ZYOufYjqOZaMYYupNZ9vekKY+6TItHiEfcIcNAF9UnuPasuQDs6UrS2ZfBYOw5Mvx5XBFbfcx7dPbbxLY3lcN1hJAj/r1z8Hk277H7gE10Fs+qpKY8Qi5v2NuVpCeVpaEyRjrvsb2tn0Q0RENVjLwxpLN5ktk88UiIukSUDTu72Lynmzk1cWoTUTr7MoRDQm15lFQuTybnMbu6jJDrsL2tj/fb+tjZOcD5i+o4eWaCkxoqScRC7OtOkcl75D37njs7B8h5NsmOR1zaetN82NFP3jOEXYf6yhinzKykLhElk/do702zo6Ofzv4Mc2rKqSkPIwidAxn296R4u6XHT/Qc0rk8qaxHyLEJfCJmE7yKaIhMzuPAQJammjLOXmAvZ5D0q53dyezBOLN+Aj94P6+2nEuXzCQacmjrS7NuRye/2Ggvix1ybJWzqSbOktlVzKyMsbc7yesfHuCNXV0kM3kaqmKUhV027e6msz9DbpSsOha2Fd/uZJZ0zqNpWhmZnMfTGw9dgrumPMLZC2owBtI5j750jme2tPDY2l0AVMfDnDq7CmPgpW1t/OT13eO9Wn+EIyAi5I9kb8EYXjPkOCAcrJADpLLeYf9voriOEA+7VJeHaekeuiNpuLKwS2VZiGjIpSeVpSeZxTN2XamtiOI6ggg4In7V3/62uWK/y9MrIjx684RfOv0gTQR9noHzP1bH/NqjPLhXKaWmqBdeeIG1a9fS3GxHoiSTSZqamrjkkkvYtm0bt956K8uXL+fiiy8ucqRKTR4RoToeoTp+ZBXv2dVlzK4um7B4TplVOeT5p048/PxjS7TrR/3L6U1jeJlxkMt7/sb2xFVfsnnPryaFx1Tl8TxDbypHKpfHM7aqOVhJLoy5cFhsZ3+G1t4UqazH4lmVhN2PVpza+tIkomHKIkOH3bb3pdlzIMnMqhgVsRCd/RmMsVX3bN4jlbWJcyqXPzisNRpyODBgdzbMSMQIu3IwgXVEqEtEiIVdWnvSlEdDnDq76uCQ4g/abcI+OIy4rTdD2LU7cMqjIRwRelI28e4eyB6snM+dHqemPEJ/2lb8BpMtg90jYvzPWR2P0FAZIxxyDiaINeURFs1IsK2ll7U7OimLuFT7FUeA3lSORfUJTmpIsH7nAT7sGPCrkjam7mSWtp40+3tSuH7Ffm5NHAQ27Oxi694ecp53cITBzKoyzmiqoqM/Q3tvhplVMaJhW9HtLrilcx6VsUPV5p5UltaeNHlz6DN5xu70yRuD5w/xrohNbmqmiaDPdYRHblxW7DCUUurIHEXlbqIYY7jxxhv5xje+8ZG/bdq0iWeeeYb77ruPJ554goceeqgIESqlpoqQO/Eju8Kuw7TysQ9pdxyhKh6miqFnoBx+WFLhsZE15XY46WhEhBmJ2Ih/q62IUltx6CyV8cj4bfYvHjpym0QszGmN1SPPPAnqElHOW3T4k+Gcs7CWcxYe+WtevLiBixcf2UnNgkrHQSqllDomF110EY8//jjt7e2APbvozp07aWtrwxjDVVddxT333MP69fYCvolEgt7e3mKGrJRSSk15WhFUSil1TE499VS+/vWvc9FFF+F5HuFwmAcffBDXdbnpppswxiAi3HvvvQDccMMN3HzzzXqymFGIyKXA/wVc4PvGmG8P+/v1wHcA/0J1rDLGfH9Sg1RKKRV4evkIpZQKiFK6fMRkmkqXjxARF3gH+AywG1gLfMEY82bBPNcDzcaYW470dbWPVEqpqWEs/aMODVVKKaVKxzLgPWPMdmNMBngMWFHkmJRSSh2HNBFUSimlSsdsYFfB893+tOE+JyKbROSnIjLJ52VUSil1PNBEUCmllCodI52LfvgxHL8A5hljTgNeAFaP+EIiK0VknYisa2trG+cwlVJKBZ0mgkopFSDHy3HdR2qqfV5sBbCwwtcI7C2cwRjTYYxJ+0+/B3x8pBcyxjxkjGk2xjTX1dVNSLBKKaWCSxNBpZQKiFgsRkdHx5RJjowxdHR0EIuNfI2s49RaYJGIzBeRCHAN8HThDCIys+DpFcBbkxifUkqp44RePkIppQKisbGR3bt3M5WG+cViMRobG4sdxqQxxuRE5BbgWezlIx42xmwVkb8C1hljngb+RESuAHJAJ3B90QJWSikVWJoIKqVUQITDYebPn1/sMNQEM8b8EvjlsGl3FTy+A7hjsuNSSil1fNGhoUoppZRSSik1xWgiqJRSSimllFJTjCaCSimllFJKKTXFyPFy9jkRaQM+HIeXqgXax+F1JkOQYoVgxRukWCFY8WqsEydI8R5rrHONMXpNhCM0Tn1kkNYvCFa8QYoVghVvkGKFYMWrsU6cY4n3iPvH4yYRHC8iss4Y01zsOI5EkGKFYMUbpFghWPFqrBMnSPEGKVZlBW2ZBSneIMUKwYo3SLFCsOLVWCfOZMWrQ0OVUkoppZRSaorRRFAppZRSSimlphhNBD/qoWIHMAZBihWCFW+QYoVgxauxTpwgxRukWJUVtGUWpHiDFCsEK94gxQrBildjnTiTEq8eI6iUUkoppZRSU4xWBJVSSimllFJqitFE0Ccil4rINhF5T0RuL3Y8w4lIk4i8JCJvichWEbnVn363iOwRkQ3+bXmxYwUQkR0istmPaZ0/rUZEnheRd/37acWOE0BETixovw0i0iMiXy2VthWRh0WkVUS2FEwbsS3Fus9fjzeJyNISifc7IvK2H9OTIlLtT58nIsmCNn6wBGIddbmLyB1+224TkUtKINYfF8S5Q0Q2+NOL2q5+DKP9ZpXsuqtGV8p9ZND6RwhOH1nq/aMfY2D6yCD1j4eJV/vIY4+1dPpHY8yUvwEu8D6wAIgAG4FTih3XsBhnAkv9xwngHeAU4G7gtmLHN0K8O4DaYdP+Grjdf3w7cG+x4xxlXWgB5pZK2wLnA0uBLb+rLYHlwDOAAGcDr5VIvBcDIf/xvQXxziucr0RiHXG5+9+3jUAUmO//ZrjFjHXY3/8GuKsU2tWPYbTfrJJdd/U26rIs6T4yaP2jH2fg+shS7B/9uALTRwapfzxMvNpHHnusJdM/akXQWga8Z4zZbozJAI8BK4oc0xDGmH3GmPX+417gLWB2caMasxXAav/xauAPihjLaC4E3jfGHOuFl8eNMeY3QOewyaO15QrgEWP9B1AtIjMnJ1JrpHiNMc8ZY3L+0/8AGiczptGM0rajWQE8ZoxJG2M+AN7D/nZMisPFKiICfB74p8mK53c5zG9Wya67alQl3UceJ/0jlH4fWXL9IwSrjwxS/wjaR06UUuofNRG0ZgO7Cp7vpoQ7ERGZB5wJvOZPusUvFT9cCkNJfAZ4TkReF5GV/rR6Y8w+sF8CYEbRohvdNQz9oSjFtoXR2zII6/KN2D1bg+aLyBsi8rKIfLJYQQ0z0nIv5bb9JLDfGPNuwbSSaddhv1lBXnenqsAsm4D0jxDMPjIo/SME93cmCP0jaB85bordP2oiaMkI00rydKoiUgE8AXzVGNMD/D2wEDgD2IctfZeCc40xS4HLgP8qIucXO6DfRUQiwBXAT/xJpdq2h1PS67KI3AnkgEf9SfuAOcaYM4E/BX4kIpXFis832nIv5bb9AkM30EqmXUf4zRp11hGmlUr7TnWBWDYB6h8hYH3kcdI/QgmvywHpH0H7yHFTCv2jJoLWbqCp4HkjsLdIsYxKRMLYFeZRY8zPAIwx+40xeWOMB3yPSSzDH44xZq9/3wo8iY1r/2Ap279vLV6EI7oMWG+M2Q+l27a+0dqyZNdlEbkO+CxwrfEHvftDSDr8x69jjyn4WPGiPOxyL8m2FZEQ8IfAjwenlUq7jvSbRQDXXVX6yyZI/SMEso8MUv8IAfudCUr/6MeifeT4xFUS/aMmgtZaYJGIzPf3el0DPF3kmIbwxzf/AHjLGPO3BdMLxwhfCWwZ/r+TTUTKRSQx+Bh7IPQWbJte5892HfBUcSIc1ZA9RqXYtgVGa8ungS/6Z5g6G+geHGZQTCJyKfA14ApjzEDB9DoRcf3HC4BFwPbiRHkwptGW+9PANSISFZH52FjXTHZ8I7gIeNsYs3twQim062i/WQRs3VVAifeRQeofIbB9ZJD6RwjQ70yQ+kc/Fu0jj1FJ9Y+miGcjKqUb9ow872D3CtxZ7HhGiO88bBl4E7DBvy0Hfghs9qc/DcwsgVgXYM8ctRHYOtiewHTgReBd/76m2LEWxBwHOoCqgmkl0bbYzncfkMXuFbpptLbEDh94wF+PNwPNJRLve9jx7YPr7oP+vJ/z15GNwHrg8hKIddTlDtzpt+024LJix+pP/0fgy8PmLWq7+jGM9ptVsuuu3g67PEu2jwxS/+jHG6g+spT7Rz+WwPSRQeofDxOv9pHHHmvJ9I/iv4FSSimllFJKqSlCh4YqpZRSSiml1BSjiaBSSimllFJKTTGaCCqllFJKKaXUFKOJoFJKKaWUUkpNMZoIKqWUUkoppdQUo4mgUiVARPIisqHgdvs4vvY8ESm1azwppZRSR0T7SKUmRqjYASilAEgaY84odhBKKaVUCdI+UqkJoBVBpUqYiOwQkXtFZI1/O8GfPldEXhSRTf79HH96vYg8KSIb/ds5/ku5IvI9EdkqIs+JSFnRPpRSSik1DrSPVOrYaCKoVGkoGzbs5eqCv/UYY5YBq4D/409bBTxijDkNeBS4z59+H/CyMeZ0YCmw1Z++CHjAGLMY6AI+N8GfRymllBov2kcqNQHEGFPsGJSa8kSkzxhTMcL0HcCnjTHbRSQMtBhjpotIOzDTGJP1p+8zxtSKSBvQaIxJF7zGPOB5Y8wi//nXgLAx5psT/8mUUkqpY6N9pFITQyuCSpU+M8rj0eYZSZKMTp4AAAD7SURBVLrgcR49PlgppdTxQftIpY6SJoJKlb6rC+7/3X/8KnCN//ha4Lf+4xeBrwCIiCsilZMVpFJKKVUE2kcqdZR0j4dSpaFMRDYUPP+VMWbw9NhREXkNu+PmC/60PwEeFpE/B9qAG/zptwIPichN2L2aXwH2TXj0Siml1MTRPlKpCaDHCCpVwvzjH5qNMe3FjkUppZQqJdpHKnVsdGioUkoppZRSSk0xWhFUSimllFJKqSlGK4JKKaWUUkopNcVoIqiUUkoppZRSU4wmgkoppZRSSik1xWgiqJRSSimllFJTjCaCSimllFJKKTXFaCKolFJKKaWUUlPM/wcTHhV93INaeAAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 1080x432 with 2 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "# plot the accuracy and loss\n",
    "plt.figure(figsize=(15,6))\n",
    "plt.subplot(1,2,1)\n",
    "plt.plot(history.history['acc'])\n",
    "plt.plot(history.history['val_acc'])\n",
    "plt.title('Model accuracy')\n",
    "plt.ylabel('Accuracy')\n",
    "plt.xlabel('Epoch')\n",
    "plt.legend(['Train', 'Test'], loc='lower right')\n",
    "plt.subplot(1,2,2)\n",
    "plt.plot(history.history['loss'])\n",
    "plt.plot(history.history['val_loss'])\n",
    "plt.title('Model loss')\n",
    "plt.ylabel('Loss')\n",
    "plt.xlabel('Epoch')\n",
    "plt.legend(['Train', 'Test'], loc='upper right')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Fold # {1}\n",
      "Train on 21816 samples, validate on 9351 samples\n",
      "Epoch 1/125\n",
      "21816/21816 [==============================] - 3s 150us/step - loss: 4.0573 - acc: 0.4862 - val_loss: 3.5271 - val_acc: 0.4832\n",
      "Epoch 2/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 3.1249 - acc: 0.5304 - val_loss: 2.7226 - val_acc: 0.5520\n",
      "Epoch 3/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 2.4244 - acc: 0.6000 - val_loss: 2.1264 - val_acc: 0.6136\n",
      "Epoch 4/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 1.9059 - acc: 0.6220 - val_loss: 1.6883 - val_acc: 0.6184\n",
      "Epoch 5/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 1.5230 - acc: 0.6596 - val_loss: 1.3647 - val_acc: 0.6669\n",
      "Epoch 6/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 1.2410 - acc: 0.6783 - val_loss: 1.1328 - val_acc: 0.6644\n",
      "Epoch 7/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 1.0405 - acc: 0.6848 - val_loss: 0.9679 - val_acc: 0.6732\n",
      "Epoch 8/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.9011 - acc: 0.6842 - val_loss: 0.8570 - val_acc: 0.6667\n",
      "Epoch 9/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.8042 - acc: 0.6878 - val_loss: 0.7815 - val_acc: 0.6665\n",
      "Epoch 10/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.7385 - acc: 0.6835 - val_loss: 0.7259 - val_acc: 0.6682\n",
      "Epoch 11/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6943 - acc: 0.6859 - val_loss: 0.6923 - val_acc: 0.6763\n",
      "Epoch 12/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6660 - acc: 0.6822 - val_loss: 0.6659 - val_acc: 0.6785\n",
      "Epoch 13/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6454 - acc: 0.6851 - val_loss: 0.6475 - val_acc: 0.6791\n",
      "Epoch 14/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6324 - acc: 0.6872 - val_loss: 0.6355 - val_acc: 0.6815\n",
      "Epoch 15/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6224 - acc: 0.6903 - val_loss: 0.6274 - val_acc: 0.6825\n",
      "Epoch 16/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6181 - acc: 0.6886 - val_loss: 0.6260 - val_acc: 0.6817\n",
      "Epoch 17/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6153 - acc: 0.6878 - val_loss: 0.6222 - val_acc: 0.6833\n",
      "Epoch 18/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6113 - acc: 0.6909 - val_loss: 0.6172 - val_acc: 0.6836\n",
      "Epoch 19/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6094 - acc: 0.6910 - val_loss: 0.6202 - val_acc: 0.6836\n",
      "Epoch 20/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6079 - acc: 0.6912 - val_loss: 0.6147 - val_acc: 0.6850\n",
      "Epoch 21/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6086 - acc: 0.6883 - val_loss: 0.6293 - val_acc: 0.6810\n",
      "Epoch 22/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6053 - acc: 0.6908 - val_loss: 0.6304 - val_acc: 0.6816\n",
      "Epoch 23/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6037 - acc: 0.6939 - val_loss: 0.6207 - val_acc: 0.6840\n",
      "Epoch 24/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6039 - acc: 0.6933 - val_loss: 0.6164 - val_acc: 0.6847\n",
      "Epoch 25/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6055 - acc: 0.6919 - val_loss: 0.6211 - val_acc: 0.6842\n",
      "Epoch 26/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6052 - acc: 0.6912 - val_loss: 0.6161 - val_acc: 0.6848\n",
      "Epoch 27/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6029 - acc: 0.6947 - val_loss: 0.6221 - val_acc: 0.6844\n",
      "Epoch 28/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6020 - acc: 0.6966 - val_loss: 0.6227 - val_acc: 0.6841\n",
      "Epoch 29/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6017 - acc: 0.6954 - val_loss: 0.6225 - val_acc: 0.6846\n",
      "Epoch 30/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6004 - acc: 0.6941 - val_loss: 0.6222 - val_acc: 0.6848\n",
      "Epoch 31/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5997 - acc: 0.6974 - val_loss: 0.6204 - val_acc: 0.6848\n",
      "Epoch 32/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6019 - acc: 0.6901 - val_loss: 0.6205 - val_acc: 0.6850\n",
      "Epoch 33/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6000 - acc: 0.6967 - val_loss: 0.6311 - val_acc: 0.6841\n",
      "Epoch 34/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6020 - acc: 0.6993 - val_loss: 0.6238 - val_acc: 0.6775\n",
      "Epoch 35/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6028 - acc: 0.6969 - val_loss: 0.6278 - val_acc: 0.6804\n",
      "Epoch 36/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6009 - acc: 0.6975 - val_loss: 0.6224 - val_acc: 0.6825\n",
      "Epoch 37/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5994 - acc: 0.6996 - val_loss: 0.6289 - val_acc: 0.6844\n",
      "Epoch 38/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5974 - acc: 0.6990 - val_loss: 0.6299 - val_acc: 0.6845\n",
      "Epoch 39/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6000 - acc: 0.6975 - val_loss: 0.6399 - val_acc: 0.6828\n",
      "Epoch 40/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5995 - acc: 0.6972 - val_loss: 0.6220 - val_acc: 0.6838\n",
      "Epoch 41/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6008 - acc: 0.6960 - val_loss: 0.6294 - val_acc: 0.6844\n",
      "Epoch 42/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5988 - acc: 0.6968 - val_loss: 0.6382 - val_acc: 0.6843\n",
      "Epoch 43/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6012 - acc: 0.6931 - val_loss: 0.6228 - val_acc: 0.6855\n",
      "Epoch 44/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6004 - acc: 0.6967 - val_loss: 0.6313 - val_acc: 0.6843\n",
      "Epoch 45/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5965 - acc: 0.6992 - val_loss: 0.6280 - val_acc: 0.6853\n",
      "Epoch 46/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5990 - acc: 0.6941 - val_loss: 0.6333 - val_acc: 0.6835\n",
      "Epoch 47/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5972 - acc: 0.6985 - val_loss: 0.6380 - val_acc: 0.6842\n",
      "Epoch 48/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5981 - acc: 0.6982 - val_loss: 0.6293 - val_acc: 0.6852\n",
      "Epoch 49/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5956 - acc: 0.7002 - val_loss: 0.6341 - val_acc: 0.6835\n",
      "Epoch 50/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5958 - acc: 0.7007 - val_loss: 0.6273 - val_acc: 0.6855\n",
      "Epoch 51/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5971 - acc: 0.6990 - val_loss: 0.6452 - val_acc: 0.6838\n",
      "Epoch 52/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5958 - acc: 0.7015 - val_loss: 0.6443 - val_acc: 0.6845\n",
      "Epoch 53/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5979 - acc: 0.6969 - val_loss: 0.6278 - val_acc: 0.6858\n",
      "Epoch 54/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6001 - acc: 0.6903 - val_loss: 0.6230 - val_acc: 0.6860\n",
      "Epoch 55/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5984 - acc: 0.6980 - val_loss: 0.6409 - val_acc: 0.6827\n",
      "Epoch 56/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5992 - acc: 0.7003 - val_loss: 0.6603 - val_acc: 0.6155\n",
      "Epoch 57/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5983 - acc: 0.6972 - val_loss: 0.6361 - val_acc: 0.6839\n",
      "Epoch 58/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5948 - acc: 0.7031 - val_loss: 0.6379 - val_acc: 0.6847\n",
      "Epoch 59/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5970 - acc: 0.7003 - val_loss: 0.6310 - val_acc: 0.6848\n",
      "Epoch 60/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5976 - acc: 0.6969 - val_loss: 0.6231 - val_acc: 0.6858\n",
      "Epoch 61/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5971 - acc: 0.6971 - val_loss: 0.6420 - val_acc: 0.6843\n",
      "Epoch 62/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5931 - acc: 0.7010 - val_loss: 0.6230 - val_acc: 0.6854\n",
      "Epoch 63/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5981 - acc: 0.6959 - val_loss: 0.6366 - val_acc: 0.6821\n",
      "Epoch 64/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5953 - acc: 0.7033 - val_loss: 0.6387 - val_acc: 0.6842\n",
      "Epoch 65/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5954 - acc: 0.6988 - val_loss: 0.6389 - val_acc: 0.6850\n",
      "Epoch 66/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5931 - acc: 0.7027 - val_loss: 0.6283 - val_acc: 0.6855\n",
      "Epoch 67/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5938 - acc: 0.7033 - val_loss: 0.6272 - val_acc: 0.6858\n",
      "Epoch 68/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5938 - acc: 0.7015 - val_loss: 0.6322 - val_acc: 0.6856\n",
      "Epoch 69/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5930 - acc: 0.7026 - val_loss: 0.6359 - val_acc: 0.6851\n",
      "Epoch 70/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5958 - acc: 0.6981 - val_loss: 0.6411 - val_acc: 0.6823\n",
      "Epoch 71/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6019 - acc: 0.6979 - val_loss: 0.6408 - val_acc: 0.6734\n",
      "Epoch 72/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6003 - acc: 0.7017 - val_loss: 0.6263 - val_acc: 0.6796\n",
      "Epoch 73/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5946 - acc: 0.7019 - val_loss: 0.6237 - val_acc: 0.6810\n",
      "Epoch 74/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5950 - acc: 0.7003 - val_loss: 0.6483 - val_acc: 0.6844\n",
      "Epoch 75/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5958 - acc: 0.6952 - val_loss: 0.6393 - val_acc: 0.6851\n",
      "Epoch 76/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5923 - acc: 0.7041 - val_loss: 0.6441 - val_acc: 0.6852\n",
      "Epoch 77/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5947 - acc: 0.6999 - val_loss: 0.6575 - val_acc: 0.6843\n",
      "Epoch 78/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.6004 - acc: 0.6913 - val_loss: 0.6427 - val_acc: 0.6851\n",
      "Epoch 79/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5942 - acc: 0.6993 - val_loss: 0.6741 - val_acc: 0.6473\n",
      "Epoch 80/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6066 - acc: 0.6947 - val_loss: 0.6426 - val_acc: 0.6745\n",
      "Epoch 81/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5980 - acc: 0.7027 - val_loss: 0.6391 - val_acc: 0.6780\n",
      "Epoch 82/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5951 - acc: 0.7029 - val_loss: 0.6413 - val_acc: 0.6789\n",
      "Epoch 83/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5912 - acc: 0.7055 - val_loss: 0.6408 - val_acc: 0.6798\n",
      "Epoch 84/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5918 - acc: 0.7040 - val_loss: 0.6480 - val_acc: 0.6822\n",
      "Epoch 85/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5950 - acc: 0.6977 - val_loss: 0.6506 - val_acc: 0.6831\n",
      "Epoch 86/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5918 - acc: 0.7033 - val_loss: 0.6359 - val_acc: 0.6844\n",
      "Epoch 87/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5925 - acc: 0.7032 - val_loss: 0.6478 - val_acc: 0.6838\n",
      "Epoch 88/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5909 - acc: 0.7027 - val_loss: 0.6350 - val_acc: 0.6845\n",
      "Epoch 89/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5887 - acc: 0.7068 - val_loss: 0.6383 - val_acc: 0.6841\n",
      "Epoch 90/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5908 - acc: 0.7055 - val_loss: 0.6557 - val_acc: 0.6776\n",
      "Epoch 91/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5931 - acc: 0.7008 - val_loss: 0.6279 - val_acc: 0.6851\n",
      "Epoch 92/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5935 - acc: 0.7001 - val_loss: 0.6479 - val_acc: 0.6848\n",
      "Epoch 93/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5929 - acc: 0.7037 - val_loss: 0.6433 - val_acc: 0.6859\n",
      "Epoch 94/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5911 - acc: 0.7043 - val_loss: 0.6318 - val_acc: 0.6841\n",
      "Epoch 95/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5893 - acc: 0.7060 - val_loss: 0.6354 - val_acc: 0.6854\n",
      "Epoch 96/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5971 - acc: 0.6946 - val_loss: 0.6507 - val_acc: 0.6850\n",
      "Epoch 97/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5923 - acc: 0.6992 - val_loss: 0.6364 - val_acc: 0.6858\n",
      "Epoch 98/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5910 - acc: 0.7050 - val_loss: 0.6535 - val_acc: 0.6839\n",
      "Epoch 99/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5925 - acc: 0.7001 - val_loss: 0.6590 - val_acc: 0.6833\n",
      "Epoch 100/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5923 - acc: 0.7039 - val_loss: 0.6364 - val_acc: 0.6855\n",
      "Epoch 101/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5887 - acc: 0.7056 - val_loss: 0.6343 - val_acc: 0.6848\n",
      "Epoch 102/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5898 - acc: 0.7054 - val_loss: 0.6489 - val_acc: 0.6851\n",
      "Epoch 103/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5930 - acc: 0.6995 - val_loss: 0.6475 - val_acc: 0.6800\n",
      "Epoch 104/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5929 - acc: 0.7061 - val_loss: 0.6496 - val_acc: 0.6790\n",
      "Epoch 105/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5915 - acc: 0.7029 - val_loss: 0.6322 - val_acc: 0.6813\n",
      "Epoch 106/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5898 - acc: 0.7029 - val_loss: 0.6400 - val_acc: 0.6805\n",
      "Epoch 107/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5903 - acc: 0.7054 - val_loss: 0.6481 - val_acc: 0.6836\n",
      "Epoch 108/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5904 - acc: 0.7034 - val_loss: 0.6395 - val_acc: 0.6842\n",
      "Epoch 109/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5880 - acc: 0.7037 - val_loss: 0.6337 - val_acc: 0.6855\n",
      "Epoch 110/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5886 - acc: 0.7061 - val_loss: 0.6302 - val_acc: 0.6856\n",
      "Epoch 111/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5946 - acc: 0.6979 - val_loss: 0.6575 - val_acc: 0.6850\n",
      "Epoch 112/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5906 - acc: 0.7010 - val_loss: 0.6518 - val_acc: 0.6847\n",
      "Epoch 113/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.5913 - acc: 0.7034 - val_loss: 0.6477 - val_acc: 0.6855\n",
      "Epoch 114/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5909 - acc: 0.7019 - val_loss: 0.6278 - val_acc: 0.6856\n",
      "Epoch 115/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5899 - acc: 0.7028 - val_loss: 0.6302 - val_acc: 0.6858\n",
      "Epoch 116/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5889 - acc: 0.7035 - val_loss: 0.6252 - val_acc: 0.6854\n",
      "Epoch 117/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5881 - acc: 0.7035 - val_loss: 0.6330 - val_acc: 0.6856\n",
      "Epoch 118/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5889 - acc: 0.7057 - val_loss: 0.6371 - val_acc: 0.6856\n",
      "Epoch 119/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5874 - acc: 0.7071 - val_loss: 0.6301 - val_acc: 0.6861\n",
      "Epoch 120/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5891 - acc: 0.7043 - val_loss: 0.6590 - val_acc: 0.6844\n",
      "Epoch 121/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5886 - acc: 0.7054 - val_loss: 0.6330 - val_acc: 0.6862\n",
      "Epoch 122/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5859 - acc: 0.7056 - val_loss: 0.6415 - val_acc: 0.6854\n",
      "Epoch 123/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5893 - acc: 0.7044 - val_loss: 0.6393 - val_acc: 0.6856\n",
      "Epoch 124/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5875 - acc: 0.7035 - val_loss: 0.6519 - val_acc: 0.6850\n",
      "Epoch 125/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.5871 - acc: 0.7066 - val_loss: 0.6479 - val_acc: 0.6848\n",
      "Fold # {2}\n",
      "Train on 21816 samples, validate on 9351 samples\n",
      "Epoch 1/125\n",
      "21816/21816 [==============================] - 3s 142us/step - loss: 3.9616 - acc: 0.5140 - val_loss: 3.4470 - val_acc: 0.5155\n",
      "Epoch 2/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 3.0571 - acc: 0.5413 - val_loss: 2.6639 - val_acc: 0.6144\n",
      "Epoch 3/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 2.3734 - acc: 0.6134 - val_loss: 2.0817 - val_acc: 0.6163\n",
      "Epoch 4/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 1.8673 - acc: 0.6476 - val_loss: 1.6538 - val_acc: 0.6676\n",
      "Epoch 5/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 1.4939 - acc: 0.6746 - val_loss: 1.3358 - val_acc: 0.6729\n",
      "Epoch 6/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 1.2179 - acc: 0.6795 - val_loss: 1.1064 - val_acc: 0.6768\n",
      "Epoch 7/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 1.0223 - acc: 0.6817 - val_loss: 0.9494 - val_acc: 0.6723\n",
      "Epoch 8/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 0.8879 - acc: 0.6837 - val_loss: 0.8405 - val_acc: 0.6810\n",
      "Epoch 9/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.7944 - acc: 0.6854 - val_loss: 0.7649 - val_acc: 0.6795\n",
      "Epoch 10/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.7323 - acc: 0.6838 - val_loss: 0.7139 - val_acc: 0.6816\n",
      "Epoch 11/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6895 - acc: 0.6869 - val_loss: 0.6815 - val_acc: 0.6813\n",
      "Epoch 12/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6607 - acc: 0.6873 - val_loss: 0.6572 - val_acc: 0.6814\n",
      "Epoch 13/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6408 - acc: 0.6886 - val_loss: 0.6443 - val_acc: 0.6819\n",
      "Epoch 14/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6284 - acc: 0.6898 - val_loss: 0.6344 - val_acc: 0.6824\n",
      "Epoch 15/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6222 - acc: 0.6872 - val_loss: 0.6247 - val_acc: 0.6837\n",
      "Epoch 16/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6174 - acc: 0.6884 - val_loss: 0.6211 - val_acc: 0.6865\n",
      "Epoch 17/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6152 - acc: 0.6896 - val_loss: 0.6210 - val_acc: 0.6846\n",
      "Epoch 18/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6106 - acc: 0.6929 - val_loss: 0.6187 - val_acc: 0.6846\n",
      "Epoch 19/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6085 - acc: 0.6926 - val_loss: 0.6316 - val_acc: 0.6820\n",
      "Epoch 20/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6076 - acc: 0.6920 - val_loss: 0.6208 - val_acc: 0.6840\n",
      "Epoch 21/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6056 - acc: 0.6915 - val_loss: 0.6301 - val_acc: 0.6785\n",
      "Epoch 22/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6068 - acc: 0.6904 - val_loss: 0.6180 - val_acc: 0.6844\n",
      "Epoch 23/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6060 - acc: 0.6933 - val_loss: 0.6183 - val_acc: 0.6843\n",
      "Epoch 24/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6052 - acc: 0.6918 - val_loss: 0.6181 - val_acc: 0.6850\n",
      "Epoch 25/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6069 - acc: 0.6913 - val_loss: 0.6121 - val_acc: 0.6859\n",
      "Epoch 26/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6052 - acc: 0.6895 - val_loss: 0.6146 - val_acc: 0.6853\n",
      "Epoch 27/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6027 - acc: 0.6966 - val_loss: 0.6131 - val_acc: 0.6862\n",
      "Epoch 28/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6020 - acc: 0.6949 - val_loss: 0.6271 - val_acc: 0.6828\n",
      "Epoch 29/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6057 - acc: 0.6915 - val_loss: 0.6152 - val_acc: 0.6838\n",
      "Epoch 30/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6075 - acc: 0.6885 - val_loss: 0.6271 - val_acc: 0.6838\n",
      "Epoch 31/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6052 - acc: 0.6932 - val_loss: 0.6406 - val_acc: 0.6827\n",
      "Epoch 32/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6039 - acc: 0.6908 - val_loss: 0.6247 - val_acc: 0.6843\n",
      "Epoch 33/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6019 - acc: 0.6964 - val_loss: 0.6262 - val_acc: 0.6840\n",
      "Epoch 34/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6005 - acc: 0.6967 - val_loss: 0.6217 - val_acc: 0.6850\n",
      "Epoch 35/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6007 - acc: 0.6934 - val_loss: 0.6224 - val_acc: 0.6848\n",
      "Epoch 36/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6007 - acc: 0.6965 - val_loss: 0.6268 - val_acc: 0.6839\n",
      "Epoch 37/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6010 - acc: 0.6961 - val_loss: 0.6376 - val_acc: 0.6838\n",
      "Epoch 38/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6015 - acc: 0.6965 - val_loss: 0.6421 - val_acc: 0.6840\n",
      "Epoch 39/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.6038 - acc: 0.6918 - val_loss: 0.6338 - val_acc: 0.6840\n",
      "Epoch 40/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.5990 - acc: 0.6969 - val_loss: 0.6246 - val_acc: 0.6846\n",
      "Epoch 41/125\n",
      "21816/21816 [==============================] - 0s 19us/step - loss: 0.6021 - acc: 0.6921 - val_loss: 0.6225 - val_acc: 0.6852\n",
      "Epoch 42/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5995 - acc: 0.6978 - val_loss: 0.6235 - val_acc: 0.6850\n",
      "Epoch 43/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5987 - acc: 0.6976 - val_loss: 0.6216 - val_acc: 0.6851\n",
      "Epoch 44/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6008 - acc: 0.6929 - val_loss: 0.6240 - val_acc: 0.6852\n",
      "Epoch 45/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5984 - acc: 0.6990 - val_loss: 0.6230 - val_acc: 0.6850\n",
      "Epoch 46/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5976 - acc: 0.6965 - val_loss: 0.6372 - val_acc: 0.6838\n",
      "Epoch 47/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5985 - acc: 0.6951 - val_loss: 0.6245 - val_acc: 0.6853\n",
      "Epoch 48/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6002 - acc: 0.6940 - val_loss: 0.6431 - val_acc: 0.6837\n",
      "Epoch 49/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6008 - acc: 0.6927 - val_loss: 0.6364 - val_acc: 0.6848\n",
      "Epoch 50/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5973 - acc: 0.6982 - val_loss: 0.6303 - val_acc: 0.6846\n",
      "Epoch 51/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5985 - acc: 0.7002 - val_loss: 0.6303 - val_acc: 0.6845\n",
      "Epoch 52/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5971 - acc: 0.6990 - val_loss: 0.6440 - val_acc: 0.6842\n",
      "Epoch 53/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5998 - acc: 0.6942 - val_loss: 0.6335 - val_acc: 0.6844\n",
      "Epoch 54/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5966 - acc: 0.6984 - val_loss: 0.6357 - val_acc: 0.6850\n",
      "Epoch 55/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5979 - acc: 0.6977 - val_loss: 0.6264 - val_acc: 0.6851\n",
      "Epoch 56/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5959 - acc: 0.7006 - val_loss: 0.6392 - val_acc: 0.6829\n",
      "Epoch 57/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5970 - acc: 0.7011 - val_loss: 0.6319 - val_acc: 0.6857\n",
      "Epoch 58/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5971 - acc: 0.7030 - val_loss: 0.6273 - val_acc: 0.6851\n",
      "Epoch 59/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5989 - acc: 0.6964 - val_loss: 0.6239 - val_acc: 0.6854\n",
      "Epoch 60/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6000 - acc: 0.6889 - val_loss: 0.6160 - val_acc: 0.6859\n",
      "Epoch 61/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5977 - acc: 0.6968 - val_loss: 0.6313 - val_acc: 0.6856\n",
      "Epoch 62/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5968 - acc: 0.6980 - val_loss: 0.6265 - val_acc: 0.6851\n",
      "Epoch 63/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5958 - acc: 0.7013 - val_loss: 0.6297 - val_acc: 0.6847\n",
      "Epoch 64/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5983 - acc: 0.6986 - val_loss: 0.6443 - val_acc: 0.6846\n",
      "Epoch 65/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5950 - acc: 0.7000 - val_loss: 0.6380 - val_acc: 0.6853\n",
      "Epoch 66/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6003 - acc: 0.6969 - val_loss: 0.6189 - val_acc: 0.6854\n",
      "Epoch 67/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5988 - acc: 0.6964 - val_loss: 0.6404 - val_acc: 0.6841\n",
      "Epoch 68/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5937 - acc: 0.7028 - val_loss: 0.6357 - val_acc: 0.6842\n",
      "Epoch 69/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.5942 - acc: 0.7019 - val_loss: 0.6318 - val_acc: 0.6847\n",
      "Epoch 70/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5936 - acc: 0.7030 - val_loss: 0.6505 - val_acc: 0.6832\n",
      "Epoch 71/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5942 - acc: 0.7012 - val_loss: 0.6509 - val_acc: 0.6843\n",
      "Epoch 72/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5971 - acc: 0.6981 - val_loss: 0.6563 - val_acc: 0.6831\n",
      "Epoch 73/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5980 - acc: 0.6960 - val_loss: 0.6347 - val_acc: 0.6856\n",
      "Epoch 74/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5944 - acc: 0.7006 - val_loss: 0.6449 - val_acc: 0.6847\n",
      "Epoch 75/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5933 - acc: 0.7021 - val_loss: 0.6401 - val_acc: 0.6848\n",
      "Epoch 76/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5990 - acc: 0.6922 - val_loss: 0.6522 - val_acc: 0.6842\n",
      "Epoch 77/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5967 - acc: 0.6934 - val_loss: 0.6402 - val_acc: 0.6854\n",
      "Epoch 78/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5939 - acc: 0.6998 - val_loss: 0.6344 - val_acc: 0.6851\n",
      "Epoch 79/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5913 - acc: 0.7040 - val_loss: 0.6356 - val_acc: 0.6851\n",
      "Epoch 80/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5944 - acc: 0.6998 - val_loss: 0.6518 - val_acc: 0.6841\n",
      "Epoch 81/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5947 - acc: 0.7008 - val_loss: 0.6300 - val_acc: 0.6856\n",
      "Epoch 82/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5924 - acc: 0.7050 - val_loss: 0.6465 - val_acc: 0.6799\n",
      "Epoch 83/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5958 - acc: 0.6993 - val_loss: 0.6322 - val_acc: 0.6851\n",
      "Epoch 84/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5952 - acc: 0.6987 - val_loss: 0.6400 - val_acc: 0.6852\n",
      "Epoch 85/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5914 - acc: 0.7034 - val_loss: 0.6414 - val_acc: 0.6853\n",
      "Epoch 86/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5915 - acc: 0.7036 - val_loss: 0.6527 - val_acc: 0.6842\n",
      "Epoch 87/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5920 - acc: 0.7015 - val_loss: 0.6417 - val_acc: 0.6851\n",
      "Epoch 88/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5908 - acc: 0.7045 - val_loss: 0.6517 - val_acc: 0.6847\n",
      "Epoch 89/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5934 - acc: 0.7009 - val_loss: 0.6624 - val_acc: 0.6466\n",
      "Epoch 90/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5949 - acc: 0.6986 - val_loss: 0.6578 - val_acc: 0.6823\n",
      "Epoch 91/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5907 - acc: 0.7047 - val_loss: 0.6298 - val_acc: 0.6847\n",
      "Epoch 92/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5909 - acc: 0.7039 - val_loss: 0.6334 - val_acc: 0.6853\n",
      "Epoch 93/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5907 - acc: 0.7018 - val_loss: 0.6482 - val_acc: 0.6847\n",
      "Epoch 94/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5922 - acc: 0.6995 - val_loss: 0.6565 - val_acc: 0.6843\n",
      "Epoch 95/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5921 - acc: 0.7012 - val_loss: 0.6521 - val_acc: 0.6831\n",
      "Epoch 96/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5920 - acc: 0.7003 - val_loss: 0.6463 - val_acc: 0.6811\n",
      "Epoch 97/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5917 - acc: 0.7030 - val_loss: 0.6488 - val_acc: 0.6851\n",
      "Epoch 98/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5902 - acc: 0.7041 - val_loss: 0.6362 - val_acc: 0.6855\n",
      "Epoch 99/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5927 - acc: 0.7019 - val_loss: 0.6583 - val_acc: 0.6842\n",
      "Epoch 100/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5927 - acc: 0.7021 - val_loss: 0.6459 - val_acc: 0.6856\n",
      "Epoch 101/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5915 - acc: 0.7033 - val_loss: 0.6362 - val_acc: 0.6852\n",
      "Epoch 102/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5931 - acc: 0.6997 - val_loss: 0.6431 - val_acc: 0.6830\n",
      "Epoch 103/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6002 - acc: 0.7003 - val_loss: 0.6471 - val_acc: 0.6574\n",
      "Epoch 104/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5950 - acc: 0.7027 - val_loss: 0.6440 - val_acc: 0.6842\n",
      "Epoch 105/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.5896 - acc: 0.7076 - val_loss: 0.6280 - val_acc: 0.6858\n",
      "Epoch 106/125\n",
      "21816/21816 [==============================] - 0s 15us/step - loss: 0.5916 - acc: 0.7015 - val_loss: 0.6525 - val_acc: 0.6846\n",
      "Epoch 107/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.5903 - acc: 0.7031 - val_loss: 0.6313 - val_acc: 0.6860\n",
      "Epoch 108/125\n",
      "21816/21816 [==============================] - 0s 17us/step - loss: 0.5950 - acc: 0.6995 - val_loss: 0.6179 - val_acc: 0.6854\n",
      "Epoch 109/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5920 - acc: 0.7004 - val_loss: 0.6425 - val_acc: 0.6845\n",
      "Epoch 110/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5912 - acc: 0.7022 - val_loss: 0.6336 - val_acc: 0.6856\n",
      "Epoch 111/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5898 - acc: 0.7086 - val_loss: 0.6384 - val_acc: 0.6802\n",
      "Epoch 112/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5921 - acc: 0.7032 - val_loss: 0.6485 - val_acc: 0.6835\n",
      "Epoch 113/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.5936 - acc: 0.7001 - val_loss: 0.6515 - val_acc: 0.6848\n",
      "Epoch 114/125\n",
      "21816/21816 [==============================] - 0s 16us/step - loss: 0.5880 - acc: 0.7070 - val_loss: 0.6391 - val_acc: 0.6860\n",
      "Epoch 115/125\n",
      "21816/21816 [==============================] - 0s 15us/step - loss: 0.5879 - acc: 0.7062 - val_loss: 0.6341 - val_acc: 0.6857\n",
      "Epoch 116/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.5888 - acc: 0.7048 - val_loss: 0.6423 - val_acc: 0.6858\n",
      "Epoch 117/125\n",
      "21816/21816 [==============================] - 0s 15us/step - loss: 0.5874 - acc: 0.7068 - val_loss: 0.6402 - val_acc: 0.6860\n",
      "Epoch 118/125\n",
      "21816/21816 [==============================] - 0s 16us/step - loss: 0.5886 - acc: 0.7065 - val_loss: 0.6646 - val_acc: 0.6735\n",
      "Epoch 119/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5939 - acc: 0.7061 - val_loss: 0.6383 - val_acc: 0.6755\n",
      "Epoch 120/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5936 - acc: 0.7012 - val_loss: 0.6419 - val_acc: 0.6797\n",
      "Epoch 121/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5889 - acc: 0.7039 - val_loss: 0.6503 - val_acc: 0.6837\n",
      "Epoch 122/125\n",
      "21816/21816 [==============================] - 0s 15us/step - loss: 0.5892 - acc: 0.7063 - val_loss: 0.6410 - val_acc: 0.6844\n",
      "Epoch 123/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5913 - acc: 0.7018 - val_loss: 0.6701 - val_acc: 0.6806\n",
      "Epoch 124/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5903 - acc: 0.7055 - val_loss: 0.6388 - val_acc: 0.6846\n",
      "Epoch 125/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5924 - acc: 0.7001 - val_loss: 0.6269 - val_acc: 0.6855\n",
      "Fold # {3}\n",
      "Train on 21816 samples, validate on 9351 samples\n",
      "Epoch 1/125\n",
      "21816/21816 [==============================] - 3s 148us/step - loss: 3.9666 - acc: 0.4865 - val_loss: 3.4564 - val_acc: 0.4829\n",
      "Epoch 2/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 3.0665 - acc: 0.5083 - val_loss: 2.6735 - val_acc: 0.6014\n",
      "Epoch 3/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 2.3840 - acc: 0.6482 - val_loss: 2.0937 - val_acc: 0.6708\n",
      "Epoch 4/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 1.8802 - acc: 0.6627 - val_loss: 1.6669 - val_acc: 0.6719\n",
      "Epoch 5/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 1.5108 - acc: 0.6642 - val_loss: 1.3555 - val_acc: 0.6736\n",
      "Epoch 6/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 1.2424 - acc: 0.6722 - val_loss: 1.1333 - val_acc: 0.6736\n",
      "Epoch 7/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 1.0506 - acc: 0.6758 - val_loss: 0.9735 - val_acc: 0.6732\n",
      "Epoch 8/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.9142 - acc: 0.6781 - val_loss: 0.8608 - val_acc: 0.6730\n",
      "Epoch 9/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.8167 - acc: 0.6797 - val_loss: 0.7816 - val_acc: 0.6713\n",
      "Epoch 10/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.7499 - acc: 0.6784 - val_loss: 0.7284 - val_acc: 0.6771\n",
      "Epoch 11/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.7014 - acc: 0.6823 - val_loss: 0.6866 - val_acc: 0.6740\n",
      "Epoch 12/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6680 - acc: 0.6872 - val_loss: 0.6641 - val_acc: 0.6802\n",
      "Epoch 13/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6499 - acc: 0.6850 - val_loss: 0.6449 - val_acc: 0.6826\n",
      "Epoch 14/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6329 - acc: 0.6865 - val_loss: 0.6348 - val_acc: 0.6845\n",
      "Epoch 15/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6245 - acc: 0.6855 - val_loss: 0.6297 - val_acc: 0.6837\n",
      "Epoch 16/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6178 - acc: 0.6893 - val_loss: 0.6237 - val_acc: 0.6842\n",
      "Epoch 17/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6130 - acc: 0.6883 - val_loss: 0.6194 - val_acc: 0.6853\n",
      "Epoch 18/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.6147 - acc: 0.6863 - val_loss: 0.6209 - val_acc: 0.6846\n",
      "Epoch 19/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6115 - acc: 0.6874 - val_loss: 0.6316 - val_acc: 0.6827\n",
      "Epoch 20/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6089 - acc: 0.6896 - val_loss: 0.6213 - val_acc: 0.6845\n",
      "Epoch 21/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6054 - acc: 0.6952 - val_loss: 0.6268 - val_acc: 0.6842\n",
      "Epoch 22/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.6052 - acc: 0.6940 - val_loss: 0.6195 - val_acc: 0.6841\n",
      "Epoch 23/125\n",
      "21816/21816 [==============================] - 0s 18us/step - loss: 0.6048 - acc: 0.6938 - val_loss: 0.6220 - val_acc: 0.6841\n",
      "Epoch 24/125\n",
      "21816/21816 [==============================] - 0s 15us/step - loss: 0.6033 - acc: 0.6959 - val_loss: 0.6187 - val_acc: 0.6850\n",
      "Epoch 25/125\n",
      "21816/21816 [==============================] - 0s 15us/step - loss: 0.6018 - acc: 0.6985 - val_loss: 0.6216 - val_acc: 0.6851\n",
      "Epoch 26/125\n",
      "21816/21816 [==============================] - 0s 16us/step - loss: 0.6030 - acc: 0.6928 - val_loss: 0.6353 - val_acc: 0.6830\n",
      "Epoch 27/125\n",
      "21816/21816 [==============================] - 0s 16us/step - loss: 0.6053 - acc: 0.6907 - val_loss: 0.6242 - val_acc: 0.6851\n",
      "Epoch 28/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6024 - acc: 0.6947 - val_loss: 0.6209 - val_acc: 0.6856\n",
      "Epoch 29/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6066 - acc: 0.6903 - val_loss: 0.6364 - val_acc: 0.6838\n",
      "Epoch 30/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.6032 - acc: 0.6949 - val_loss: 0.6257 - val_acc: 0.6844\n",
      "Epoch 31/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6025 - acc: 0.6964 - val_loss: 0.6273 - val_acc: 0.6845\n",
      "Epoch 32/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6043 - acc: 0.6913 - val_loss: 0.6216 - val_acc: 0.6845\n",
      "Epoch 33/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5993 - acc: 0.6980 - val_loss: 0.6225 - val_acc: 0.6847\n",
      "Epoch 34/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6035 - acc: 0.6938 - val_loss: 0.6192 - val_acc: 0.6847\n",
      "Epoch 35/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6028 - acc: 0.6947 - val_loss: 0.6278 - val_acc: 0.6842\n",
      "Epoch 36/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5989 - acc: 0.6987 - val_loss: 0.6351 - val_acc: 0.6831\n",
      "Epoch 37/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5973 - acc: 0.7018 - val_loss: 0.6408 - val_acc: 0.6824\n",
      "Epoch 38/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6022 - acc: 0.6902 - val_loss: 0.6298 - val_acc: 0.6840\n",
      "Epoch 39/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5991 - acc: 0.6973 - val_loss: 0.6282 - val_acc: 0.6844\n",
      "Epoch 40/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5982 - acc: 0.6972 - val_loss: 0.6268 - val_acc: 0.6847\n",
      "Epoch 41/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5977 - acc: 0.6985 - val_loss: 0.6363 - val_acc: 0.6843\n",
      "Epoch 42/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5959 - acc: 0.7020 - val_loss: 0.6412 - val_acc: 0.6830\n",
      "Epoch 43/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6042 - acc: 0.6913 - val_loss: 0.6389 - val_acc: 0.6833\n",
      "Epoch 44/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5996 - acc: 0.6976 - val_loss: 0.6219 - val_acc: 0.6854\n",
      "Epoch 45/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5981 - acc: 0.6991 - val_loss: 0.6274 - val_acc: 0.6847\n",
      "Epoch 46/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5974 - acc: 0.6973 - val_loss: 0.6392 - val_acc: 0.6827\n",
      "Epoch 47/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5999 - acc: 0.6972 - val_loss: 0.6462 - val_acc: 0.6793\n",
      "Epoch 48/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5992 - acc: 0.6994 - val_loss: 0.6440 - val_acc: 0.6825\n",
      "Epoch 49/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5995 - acc: 0.6998 - val_loss: 0.6470 - val_acc: 0.6816\n",
      "Epoch 50/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5973 - acc: 0.6995 - val_loss: 0.6411 - val_acc: 0.6845\n",
      "Epoch 51/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5949 - acc: 0.7025 - val_loss: 0.6373 - val_acc: 0.6848\n",
      "Epoch 52/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5958 - acc: 0.7020 - val_loss: 0.6307 - val_acc: 0.6847\n",
      "Epoch 53/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5959 - acc: 0.7015 - val_loss: 0.6242 - val_acc: 0.6845\n",
      "Epoch 54/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5958 - acc: 0.7007 - val_loss: 0.6359 - val_acc: 0.6850\n",
      "Epoch 55/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5964 - acc: 0.6993 - val_loss: 0.6403 - val_acc: 0.6845\n",
      "Epoch 56/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5965 - acc: 0.6987 - val_loss: 0.6299 - val_acc: 0.6850\n",
      "Epoch 57/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5968 - acc: 0.6998 - val_loss: 0.6343 - val_acc: 0.6847\n",
      "Epoch 58/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5947 - acc: 0.7028 - val_loss: 0.6338 - val_acc: 0.6837\n",
      "Epoch 59/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.6044 - acc: 0.6897 - val_loss: 0.6213 - val_acc: 0.6862\n",
      "Epoch 60/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5996 - acc: 0.6946 - val_loss: 0.6489 - val_acc: 0.6841\n",
      "Epoch 61/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5956 - acc: 0.7021 - val_loss: 0.6366 - val_acc: 0.6851\n",
      "Epoch 62/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5954 - acc: 0.7012 - val_loss: 0.6465 - val_acc: 0.6676\n",
      "Epoch 63/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5941 - acc: 0.7013 - val_loss: 0.6283 - val_acc: 0.6848\n",
      "Epoch 64/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5953 - acc: 0.6998 - val_loss: 0.6507 - val_acc: 0.6521\n",
      "Epoch 65/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5960 - acc: 0.6989 - val_loss: 0.6300 - val_acc: 0.6842\n",
      "Epoch 66/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5952 - acc: 0.7017 - val_loss: 0.6276 - val_acc: 0.6855\n",
      "Epoch 67/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5969 - acc: 0.7001 - val_loss: 0.6371 - val_acc: 0.6841\n",
      "Epoch 68/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5973 - acc: 0.6993 - val_loss: 0.6394 - val_acc: 0.6833\n",
      "Epoch 69/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5939 - acc: 0.7017 - val_loss: 0.6360 - val_acc: 0.6850\n",
      "Epoch 70/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5937 - acc: 0.7011 - val_loss: 0.6337 - val_acc: 0.6851\n",
      "Epoch 71/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5926 - acc: 0.7043 - val_loss: 0.6322 - val_acc: 0.6854\n",
      "Epoch 72/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5928 - acc: 0.7033 - val_loss: 0.6352 - val_acc: 0.6829\n",
      "Epoch 73/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6043 - acc: 0.6992 - val_loss: 0.6575 - val_acc: 0.6730\n",
      "Epoch 74/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6047 - acc: 0.6945 - val_loss: 0.6501 - val_acc: 0.6778\n",
      "Epoch 75/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5967 - acc: 0.7039 - val_loss: 0.6470 - val_acc: 0.6799\n",
      "Epoch 76/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5946 - acc: 0.7005 - val_loss: 0.6321 - val_acc: 0.6822\n",
      "Epoch 77/125\n",
      "21816/21816 [==============================] - 0s 15us/step - loss: 0.5929 - acc: 0.7034 - val_loss: 0.6437 - val_acc: 0.6831\n",
      "Epoch 78/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5951 - acc: 0.7002 - val_loss: 0.6453 - val_acc: 0.6841\n",
      "Epoch 79/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.5923 - acc: 0.7047 - val_loss: 0.6436 - val_acc: 0.6837\n",
      "Epoch 80/125\n",
      "21816/21816 [==============================] - 0s 18us/step - loss: 0.5917 - acc: 0.7048 - val_loss: 0.6253 - val_acc: 0.6856\n",
      "Epoch 81/125\n",
      "21816/21816 [==============================] - 0s 22us/step - loss: 0.5936 - acc: 0.6999 - val_loss: 0.6190 - val_acc: 0.6859\n",
      "Epoch 82/125\n",
      "21816/21816 [==============================] - 1s 23us/step - loss: 0.5944 - acc: 0.7011 - val_loss: 0.6403 - val_acc: 0.6855\n",
      "Epoch 83/125\n",
      "21816/21816 [==============================] - 0s 21us/step - loss: 0.5908 - acc: 0.7040 - val_loss: 0.6449 - val_acc: 0.6855\n",
      "Epoch 84/125\n",
      "21816/21816 [==============================] - 0s 15us/step - loss: 0.5933 - acc: 0.7031 - val_loss: 0.6364 - val_acc: 0.6852\n",
      "Epoch 85/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.5914 - acc: 0.7017 - val_loss: 0.6459 - val_acc: 0.6851\n",
      "Epoch 86/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.6010 - acc: 0.6974 - val_loss: 0.6519 - val_acc: 0.6759\n",
      "Epoch 87/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.5985 - acc: 0.6960 - val_loss: 0.6459 - val_acc: 0.6841\n",
      "Epoch 88/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5944 - acc: 0.7038 - val_loss: 0.6412 - val_acc: 0.6846\n",
      "Epoch 89/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5910 - acc: 0.7042 - val_loss: 0.6479 - val_acc: 0.6848\n",
      "Epoch 90/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5897 - acc: 0.7051 - val_loss: 0.6383 - val_acc: 0.6853\n",
      "Epoch 91/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5885 - acc: 0.7077 - val_loss: 0.6384 - val_acc: 0.6846\n",
      "Epoch 92/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5892 - acc: 0.7068 - val_loss: 0.6403 - val_acc: 0.6848\n",
      "Epoch 93/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5920 - acc: 0.7037 - val_loss: 0.6474 - val_acc: 0.6844\n",
      "Epoch 94/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5906 - acc: 0.7050 - val_loss: 0.6315 - val_acc: 0.6855\n",
      "Epoch 95/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5887 - acc: 0.7062 - val_loss: 0.6672 - val_acc: 0.6499\n",
      "Epoch 96/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5896 - acc: 0.7033 - val_loss: 0.6298 - val_acc: 0.6857\n",
      "Epoch 97/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5911 - acc: 0.7040 - val_loss: 0.6406 - val_acc: 0.6851\n",
      "Epoch 98/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5909 - acc: 0.7011 - val_loss: 0.6292 - val_acc: 0.6858\n",
      "Epoch 99/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5935 - acc: 0.6992 - val_loss: 0.6394 - val_acc: 0.6855\n",
      "Epoch 100/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5894 - acc: 0.7042 - val_loss: 0.6401 - val_acc: 0.6854\n",
      "Epoch 101/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5879 - acc: 0.7083 - val_loss: 0.6504 - val_acc: 0.6857\n",
      "Epoch 102/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5899 - acc: 0.7043 - val_loss: 0.6571 - val_acc: 0.6856\n",
      "Epoch 103/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5910 - acc: 0.7002 - val_loss: 0.6565 - val_acc: 0.6845\n",
      "Epoch 104/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5900 - acc: 0.7044 - val_loss: 0.6441 - val_acc: 0.6853\n",
      "Epoch 105/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5896 - acc: 0.7066 - val_loss: 0.6360 - val_acc: 0.6812\n",
      "Epoch 106/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5939 - acc: 0.7011 - val_loss: 0.6291 - val_acc: 0.6839\n",
      "Epoch 107/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5927 - acc: 0.7020 - val_loss: 0.6483 - val_acc: 0.6842\n",
      "Epoch 108/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5890 - acc: 0.7059 - val_loss: 0.6443 - val_acc: 0.6861\n",
      "Epoch 109/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5882 - acc: 0.7061 - val_loss: 0.6391 - val_acc: 0.6855\n",
      "Epoch 110/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5887 - acc: 0.7087 - val_loss: 0.6360 - val_acc: 0.6828\n",
      "Epoch 111/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5945 - acc: 0.7032 - val_loss: 0.6440 - val_acc: 0.6782\n",
      "Epoch 112/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5950 - acc: 0.6995 - val_loss: 0.6620 - val_acc: 0.6807\n",
      "Epoch 113/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5897 - acc: 0.7049 - val_loss: 0.6415 - val_acc: 0.6827\n",
      "Epoch 114/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5871 - acc: 0.7087 - val_loss: 0.6528 - val_acc: 0.6837\n",
      "Epoch 115/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5893 - acc: 0.7049 - val_loss: 0.6401 - val_acc: 0.6843\n",
      "Epoch 116/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5918 - acc: 0.7018 - val_loss: 0.6386 - val_acc: 0.6853\n",
      "Epoch 117/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5900 - acc: 0.7045 - val_loss: 0.6581 - val_acc: 0.6538\n",
      "Epoch 118/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5902 - acc: 0.7045 - val_loss: 0.6587 - val_acc: 0.6852\n",
      "Epoch 119/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5879 - acc: 0.7081 - val_loss: 0.6403 - val_acc: 0.6856\n",
      "Epoch 120/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.5893 - acc: 0.7039 - val_loss: 0.6468 - val_acc: 0.6848\n",
      "Epoch 121/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5900 - acc: 0.7026 - val_loss: 0.6454 - val_acc: 0.6856\n",
      "Epoch 122/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5868 - acc: 0.7048 - val_loss: 0.6491 - val_acc: 0.6860\n",
      "Epoch 123/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5868 - acc: 0.7065 - val_loss: 0.6444 - val_acc: 0.6856\n",
      "Epoch 124/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5878 - acc: 0.7048 - val_loss: 0.6426 - val_acc: 0.6859\n",
      "Epoch 125/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5892 - acc: 0.7054 - val_loss: 0.6678 - val_acc: 0.6524\n",
      "Fold # {4}\n",
      "Train on 21816 samples, validate on 9351 samples\n",
      "Epoch 1/125\n",
      "21816/21816 [==============================] - 4s 170us/step - loss: 3.9504 - acc: 0.5136 - val_loss: 3.4449 - val_acc: 0.5179\n",
      "Epoch 2/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 3.0608 - acc: 0.5131 - val_loss: 2.6729 - val_acc: 0.5170\n",
      "Epoch 3/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 2.3871 - acc: 0.5928 - val_loss: 2.0992 - val_acc: 0.6383\n",
      "Epoch 4/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 1.8866 - acc: 0.6557 - val_loss: 1.6732 - val_acc: 0.6628\n",
      "Epoch 5/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 1.5149 - acc: 0.6712 - val_loss: 1.3594 - val_acc: 0.6614\n",
      "Epoch 6/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 1.2432 - acc: 0.6752 - val_loss: 1.1333 - val_acc: 0.6625\n",
      "Epoch 7/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 1.0481 - acc: 0.6765 - val_loss: 0.9705 - val_acc: 0.6645\n",
      "Epoch 8/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.9071 - acc: 0.6821 - val_loss: 0.8546 - val_acc: 0.6743\n",
      "Epoch 9/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.8078 - acc: 0.6837 - val_loss: 0.7751 - val_acc: 0.6762\n",
      "Epoch 10/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.7396 - acc: 0.6848 - val_loss: 0.7198 - val_acc: 0.6785\n",
      "Epoch 11/125\n",
      "21816/21816 [==============================] - 0s 15us/step - loss: 0.6924 - acc: 0.6878 - val_loss: 0.6876 - val_acc: 0.6795\n",
      "Epoch 12/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6621 - acc: 0.6864 - val_loss: 0.6617 - val_acc: 0.6801\n",
      "Epoch 13/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6409 - acc: 0.6910 - val_loss: 0.6485 - val_acc: 0.6809\n",
      "Epoch 14/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6299 - acc: 0.6901 - val_loss: 0.6342 - val_acc: 0.6694\n",
      "Epoch 15/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6226 - acc: 0.6872 - val_loss: 0.6293 - val_acc: 0.6828\n",
      "Epoch 16/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6178 - acc: 0.6866 - val_loss: 0.6241 - val_acc: 0.6850\n",
      "Epoch 17/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6135 - acc: 0.6922 - val_loss: 0.6239 - val_acc: 0.6848\n",
      "Epoch 18/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.6078 - acc: 0.6929 - val_loss: 0.6284 - val_acc: 0.6838\n",
      "Epoch 19/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6094 - acc: 0.6903 - val_loss: 0.6155 - val_acc: 0.6855\n",
      "Epoch 20/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6072 - acc: 0.6922 - val_loss: 0.6287 - val_acc: 0.6837\n",
      "Epoch 21/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6061 - acc: 0.6964 - val_loss: 0.6235 - val_acc: 0.6846\n",
      "Epoch 22/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6064 - acc: 0.6919 - val_loss: 0.6198 - val_acc: 0.6831\n",
      "Epoch 23/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6094 - acc: 0.6900 - val_loss: 0.6191 - val_acc: 0.6802\n",
      "Epoch 24/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6043 - acc: 0.6947 - val_loss: 0.6275 - val_acc: 0.6824\n",
      "Epoch 25/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6049 - acc: 0.6926 - val_loss: 0.6314 - val_acc: 0.6833\n",
      "Epoch 26/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6029 - acc: 0.6965 - val_loss: 0.6177 - val_acc: 0.6848\n",
      "Epoch 27/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6025 - acc: 0.6940 - val_loss: 0.6269 - val_acc: 0.6844\n",
      "Epoch 28/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6009 - acc: 0.6937 - val_loss: 0.6328 - val_acc: 0.6848\n",
      "Epoch 29/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5994 - acc: 0.6977 - val_loss: 0.6310 - val_acc: 0.6845\n",
      "Epoch 30/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5988 - acc: 0.6982 - val_loss: 0.6328 - val_acc: 0.6843\n",
      "Epoch 31/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6016 - acc: 0.6949 - val_loss: 0.6260 - val_acc: 0.6845\n",
      "Epoch 32/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6009 - acc: 0.6956 - val_loss: 0.6387 - val_acc: 0.6843\n",
      "Epoch 33/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5987 - acc: 0.6989 - val_loss: 0.6341 - val_acc: 0.6845\n",
      "Epoch 34/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6033 - acc: 0.6931 - val_loss: 0.6282 - val_acc: 0.6844\n",
      "Epoch 35/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6004 - acc: 0.6947 - val_loss: 0.6291 - val_acc: 0.6846\n",
      "Epoch 36/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5988 - acc: 0.6974 - val_loss: 0.6409 - val_acc: 0.6838\n",
      "Epoch 37/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6015 - acc: 0.6967 - val_loss: 0.6460 - val_acc: 0.6829\n",
      "Epoch 38/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6015 - acc: 0.6961 - val_loss: 0.6388 - val_acc: 0.6841\n",
      "Epoch 39/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5989 - acc: 0.6993 - val_loss: 0.6281 - val_acc: 0.6850\n",
      "Epoch 40/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6001 - acc: 0.6971 - val_loss: 0.6293 - val_acc: 0.6858\n",
      "Epoch 41/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5975 - acc: 0.6997 - val_loss: 0.6356 - val_acc: 0.6847\n",
      "Epoch 42/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5972 - acc: 0.6997 - val_loss: 0.6411 - val_acc: 0.6833\n",
      "Epoch 43/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5978 - acc: 0.6986 - val_loss: 0.6348 - val_acc: 0.6845\n",
      "Epoch 44/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5966 - acc: 0.7022 - val_loss: 0.6435 - val_acc: 0.6836\n",
      "Epoch 45/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5977 - acc: 0.6971 - val_loss: 0.6338 - val_acc: 0.6844\n",
      "Epoch 46/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5970 - acc: 0.6960 - val_loss: 0.6288 - val_acc: 0.6854\n",
      "Epoch 47/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5970 - acc: 0.6998 - val_loss: 0.6457 - val_acc: 0.6845\n",
      "Epoch 48/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5968 - acc: 0.6986 - val_loss: 0.6544 - val_acc: 0.6841\n",
      "Epoch 49/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5985 - acc: 0.6995 - val_loss: 0.6502 - val_acc: 0.6845\n",
      "Epoch 50/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5966 - acc: 0.6981 - val_loss: 0.6543 - val_acc: 0.6839\n",
      "Epoch 51/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6005 - acc: 0.6931 - val_loss: 0.6197 - val_acc: 0.6857\n",
      "Epoch 52/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5981 - acc: 0.6968 - val_loss: 0.6301 - val_acc: 0.6851\n",
      "Epoch 53/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5958 - acc: 0.7003 - val_loss: 0.6384 - val_acc: 0.6844\n",
      "Epoch 54/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5961 - acc: 0.6985 - val_loss: 0.6585 - val_acc: 0.6378\n",
      "Epoch 55/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5973 - acc: 0.6981 - val_loss: 0.6368 - val_acc: 0.6847\n",
      "Epoch 56/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5957 - acc: 0.6999 - val_loss: 0.6577 - val_acc: 0.6839\n",
      "Epoch 57/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5991 - acc: 0.6943 - val_loss: 0.6380 - val_acc: 0.6852\n",
      "Epoch 58/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5945 - acc: 0.7054 - val_loss: 0.6296 - val_acc: 0.6848\n",
      "Epoch 59/125\n",
      "21816/21816 [==============================] - 0s 17us/step - loss: 0.5930 - acc: 0.7046 - val_loss: 0.6386 - val_acc: 0.6848\n",
      "Epoch 60/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.5950 - acc: 0.7036 - val_loss: 0.6338 - val_acc: 0.6850\n",
      "Epoch 61/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5943 - acc: 0.7014 - val_loss: 0.6441 - val_acc: 0.6855\n",
      "Epoch 62/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5950 - acc: 0.7004 - val_loss: 0.6449 - val_acc: 0.6846\n",
      "Epoch 63/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5932 - acc: 0.7049 - val_loss: 0.6274 - val_acc: 0.6842\n",
      "Epoch 64/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5957 - acc: 0.7004 - val_loss: 0.6468 - val_acc: 0.6844\n",
      "Epoch 65/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5982 - acc: 0.6959 - val_loss: 0.6330 - val_acc: 0.6845\n",
      "Epoch 66/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5941 - acc: 0.7025 - val_loss: 0.6308 - val_acc: 0.6851\n",
      "Epoch 67/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5939 - acc: 0.7020 - val_loss: 0.6270 - val_acc: 0.6860\n",
      "Epoch 68/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5952 - acc: 0.7017 - val_loss: 0.6338 - val_acc: 0.6852\n",
      "Epoch 69/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5965 - acc: 0.6999 - val_loss: 0.6455 - val_acc: 0.6848\n",
      "Epoch 70/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5950 - acc: 0.7025 - val_loss: 0.6324 - val_acc: 0.6813\n",
      "Epoch 71/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5932 - acc: 0.7015 - val_loss: 0.6412 - val_acc: 0.6841\n",
      "Epoch 72/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5921 - acc: 0.7020 - val_loss: 0.6379 - val_acc: 0.6853\n",
      "Epoch 73/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5942 - acc: 0.7005 - val_loss: 0.6324 - val_acc: 0.6856\n",
      "Epoch 74/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5923 - acc: 0.7023 - val_loss: 0.6464 - val_acc: 0.6851\n",
      "Epoch 75/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5962 - acc: 0.6998 - val_loss: 0.6384 - val_acc: 0.6854\n",
      "Epoch 76/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5924 - acc: 0.7028 - val_loss: 0.6469 - val_acc: 0.6844\n",
      "Epoch 77/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5922 - acc: 0.7032 - val_loss: 0.6338 - val_acc: 0.6854\n",
      "Epoch 78/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5929 - acc: 0.7006 - val_loss: 0.6367 - val_acc: 0.6852\n",
      "Epoch 79/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5900 - acc: 0.7029 - val_loss: 0.6269 - val_acc: 0.6854\n",
      "Epoch 80/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5902 - acc: 0.7034 - val_loss: 0.6405 - val_acc: 0.6848\n",
      "Epoch 81/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5894 - acc: 0.7032 - val_loss: 0.6346 - val_acc: 0.6859\n",
      "Epoch 82/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5932 - acc: 0.7006 - val_loss: 0.6337 - val_acc: 0.6851\n",
      "Epoch 83/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5965 - acc: 0.6995 - val_loss: 0.6257 - val_acc: 0.6862\n",
      "Epoch 84/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5898 - acc: 0.7038 - val_loss: 0.6329 - val_acc: 0.6859\n",
      "Epoch 85/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5893 - acc: 0.7048 - val_loss: 0.6436 - val_acc: 0.6855\n",
      "Epoch 86/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5894 - acc: 0.7043 - val_loss: 0.6318 - val_acc: 0.6863\n",
      "Epoch 87/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5879 - acc: 0.7073 - val_loss: 0.6299 - val_acc: 0.6859\n",
      "Epoch 88/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5932 - acc: 0.7037 - val_loss: 0.6543 - val_acc: 0.6853\n",
      "Epoch 89/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5957 - acc: 0.6977 - val_loss: 0.6467 - val_acc: 0.6854\n",
      "Epoch 90/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5895 - acc: 0.7059 - val_loss: 0.6392 - val_acc: 0.6859\n",
      "Epoch 91/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5942 - acc: 0.6996 - val_loss: 0.6181 - val_acc: 0.6858\n",
      "Epoch 92/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5935 - acc: 0.6978 - val_loss: 0.6645 - val_acc: 0.6855\n",
      "Epoch 93/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5902 - acc: 0.7032 - val_loss: 0.6479 - val_acc: 0.6852\n",
      "Epoch 94/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5880 - acc: 0.7066 - val_loss: 0.6175 - val_acc: 0.6863\n",
      "Epoch 95/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5887 - acc: 0.7041 - val_loss: 0.6474 - val_acc: 0.6848\n",
      "Epoch 96/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5903 - acc: 0.7047 - val_loss: 0.6400 - val_acc: 0.6837\n",
      "Epoch 97/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5968 - acc: 0.7031 - val_loss: 0.6213 - val_acc: 0.6853\n",
      "Epoch 98/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5927 - acc: 0.7026 - val_loss: 0.6428 - val_acc: 0.6848\n",
      "Epoch 99/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5881 - acc: 0.7066 - val_loss: 0.6378 - val_acc: 0.6861\n",
      "Epoch 100/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5874 - acc: 0.7090 - val_loss: 0.6318 - val_acc: 0.6861\n",
      "Epoch 101/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5897 - acc: 0.7032 - val_loss: 0.6535 - val_acc: 0.6855\n",
      "Epoch 102/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5912 - acc: 0.7016 - val_loss: 0.6322 - val_acc: 0.6862\n",
      "Epoch 103/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5890 - acc: 0.7052 - val_loss: 0.6427 - val_acc: 0.6860\n",
      "Epoch 104/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5885 - acc: 0.7060 - val_loss: 0.6316 - val_acc: 0.6861\n",
      "Epoch 105/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5888 - acc: 0.7044 - val_loss: 0.6927 - val_acc: 0.6028\n",
      "Epoch 106/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5930 - acc: 0.6990 - val_loss: 0.6674 - val_acc: 0.6360\n",
      "Epoch 107/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5877 - acc: 0.7059 - val_loss: 0.6399 - val_acc: 0.6855\n",
      "Epoch 108/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5889 - acc: 0.7050 - val_loss: 0.6382 - val_acc: 0.6856\n",
      "Epoch 109/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5933 - acc: 0.6998 - val_loss: 0.6439 - val_acc: 0.6861\n",
      "Epoch 110/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5899 - acc: 0.7031 - val_loss: 0.6386 - val_acc: 0.6861\n",
      "Epoch 111/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5880 - acc: 0.7066 - val_loss: 0.6277 - val_acc: 0.6863\n",
      "Epoch 112/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5867 - acc: 0.7062 - val_loss: 0.6399 - val_acc: 0.6863\n",
      "Epoch 113/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5863 - acc: 0.7076 - val_loss: 0.6405 - val_acc: 0.6861\n",
      "Epoch 114/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5865 - acc: 0.7068 - val_loss: 0.6366 - val_acc: 0.6858\n",
      "Epoch 115/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5874 - acc: 0.7042 - val_loss: 0.6390 - val_acc: 0.6860\n",
      "Epoch 116/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5894 - acc: 0.7054 - val_loss: 0.6462 - val_acc: 0.6847\n",
      "Epoch 117/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5888 - acc: 0.7042 - val_loss: 0.6258 - val_acc: 0.6859\n",
      "Epoch 118/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5901 - acc: 0.7013 - val_loss: 0.6663 - val_acc: 0.6825\n",
      "Epoch 119/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5919 - acc: 0.7004 - val_loss: 0.6583 - val_acc: 0.6845\n",
      "Epoch 120/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5871 - acc: 0.7051 - val_loss: 0.6490 - val_acc: 0.6859\n",
      "Epoch 121/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5879 - acc: 0.7055 - val_loss: 0.6364 - val_acc: 0.6856\n",
      "Epoch 122/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5856 - acc: 0.7073 - val_loss: 0.6389 - val_acc: 0.6863\n",
      "Epoch 123/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5850 - acc: 0.7091 - val_loss: 0.6294 - val_acc: 0.6860\n",
      "Epoch 124/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5871 - acc: 0.7064 - val_loss: 0.6265 - val_acc: 0.6863\n",
      "Epoch 125/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5865 - acc: 0.7060 - val_loss: 0.6466 - val_acc: 0.6859\n",
      "Fold # {5}\n",
      "Train on 21816 samples, validate on 9351 samples\n",
      "Epoch 1/125\n",
      "21816/21816 [==============================] - 3s 150us/step - loss: 4.1181 - acc: 0.5136 - val_loss: 3.5912 - val_acc: 0.5178\n",
      "Epoch 2/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 3.1913 - acc: 0.5136 - val_loss: 2.7846 - val_acc: 0.5178\n",
      "Epoch 3/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 2.4869 - acc: 0.5077 - val_loss: 2.1877 - val_acc: 0.5626\n",
      "Epoch 4/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 1.9669 - acc: 0.6082 - val_loss: 1.7462 - val_acc: 0.6138\n",
      "Epoch 5/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 1.5824 - acc: 0.6118 - val_loss: 1.4200 - val_acc: 0.6151\n",
      "Epoch 6/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 1.2980 - acc: 0.6184 - val_loss: 1.1809 - val_acc: 0.6055\n",
      "Epoch 7/125\n",
      "21816/21816 [==============================] - 0s 9us/step - loss: 1.0924 - acc: 0.6177 - val_loss: 1.0114 - val_acc: 0.6139\n",
      "Epoch 8/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.9449 - acc: 0.6195 - val_loss: 0.8899 - val_acc: 0.6083\n",
      "Epoch 9/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.8409 - acc: 0.6198 - val_loss: 0.8072 - val_acc: 0.6158\n",
      "Epoch 10/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.7680 - acc: 0.6482 - val_loss: 0.7430 - val_acc: 0.6771\n",
      "Epoch 11/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.7115 - acc: 0.6843 - val_loss: 0.6960 - val_acc: 0.6748\n",
      "Epoch 12/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6745 - acc: 0.6844 - val_loss: 0.6687 - val_acc: 0.6718\n",
      "Epoch 13/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6519 - acc: 0.6838 - val_loss: 0.6488 - val_acc: 0.6747\n",
      "Epoch 14/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6349 - acc: 0.6859 - val_loss: 0.6377 - val_acc: 0.6798\n",
      "Epoch 15/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.6260 - acc: 0.6864 - val_loss: 0.6288 - val_acc: 0.6719\n",
      "Epoch 16/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6191 - acc: 0.6859 - val_loss: 0.6258 - val_acc: 0.6815\n",
      "Epoch 17/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6152 - acc: 0.6853 - val_loss: 0.6223 - val_acc: 0.6816\n",
      "Epoch 18/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6118 - acc: 0.6877 - val_loss: 0.6206 - val_acc: 0.6820\n",
      "Epoch 19/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6105 - acc: 0.6882 - val_loss: 0.6166 - val_acc: 0.6903\n",
      "Epoch 20/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6111 - acc: 0.6836 - val_loss: 0.6287 - val_acc: 0.6813\n",
      "Epoch 21/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.6097 - acc: 0.6842 - val_loss: 0.6144 - val_acc: 0.6779\n",
      "Epoch 22/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6087 - acc: 0.6851 - val_loss: 0.6169 - val_acc: 0.6824\n",
      "Epoch 23/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6069 - acc: 0.6872 - val_loss: 0.6174 - val_acc: 0.6823\n",
      "Epoch 24/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6053 - acc: 0.6901 - val_loss: 0.6242 - val_acc: 0.6797\n",
      "Epoch 25/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6085 - acc: 0.6890 - val_loss: 0.6225 - val_acc: 0.6814\n",
      "Epoch 26/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6068 - acc: 0.6901 - val_loss: 0.6157 - val_acc: 0.6838\n",
      "Epoch 27/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6053 - acc: 0.6897 - val_loss: 0.6175 - val_acc: 0.6832\n",
      "Epoch 28/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.6054 - acc: 0.6900 - val_loss: 0.6188 - val_acc: 0.6837\n",
      "Epoch 29/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6043 - acc: 0.6903 - val_loss: 0.6201 - val_acc: 0.6831\n",
      "Epoch 30/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6026 - acc: 0.6922 - val_loss: 0.6168 - val_acc: 0.6843\n",
      "Epoch 31/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6029 - acc: 0.6919 - val_loss: 0.6115 - val_acc: 0.6848\n",
      "Epoch 32/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6037 - acc: 0.6905 - val_loss: 0.6180 - val_acc: 0.6831\n",
      "Epoch 33/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6028 - acc: 0.6920 - val_loss: 0.6198 - val_acc: 0.6840\n",
      "Epoch 34/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6022 - acc: 0.6919 - val_loss: 0.6339 - val_acc: 0.6829\n",
      "Epoch 35/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.6016 - acc: 0.6934 - val_loss: 0.6203 - val_acc: 0.6838\n",
      "Epoch 36/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.6021 - acc: 0.6930 - val_loss: 0.6105 - val_acc: 0.6851\n",
      "Epoch 37/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.6055 - acc: 0.6918 - val_loss: 0.6160 - val_acc: 0.6848\n",
      "Epoch 38/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.6048 - acc: 0.6895 - val_loss: 0.6135 - val_acc: 0.6861\n",
      "Epoch 39/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.6066 - acc: 0.6894 - val_loss: 0.6277 - val_acc: 0.6805\n",
      "Epoch 40/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.6045 - acc: 0.6924 - val_loss: 0.6227 - val_acc: 0.6812\n",
      "Epoch 41/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.6009 - acc: 0.6939 - val_loss: 0.6198 - val_acc: 0.6827\n",
      "Epoch 42/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5993 - acc: 0.6959 - val_loss: 0.6125 - val_acc: 0.6850\n",
      "Epoch 43/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6009 - acc: 0.6930 - val_loss: 0.6103 - val_acc: 0.6861\n",
      "Epoch 44/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6013 - acc: 0.6940 - val_loss: 0.6140 - val_acc: 0.6852\n",
      "Epoch 45/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5998 - acc: 0.6940 - val_loss: 0.6172 - val_acc: 0.6847\n",
      "Epoch 46/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6024 - acc: 0.6949 - val_loss: 0.6475 - val_acc: 0.6799\n",
      "Epoch 47/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6064 - acc: 0.6949 - val_loss: 0.6320 - val_acc: 0.6813\n",
      "Epoch 48/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6044 - acc: 0.6893 - val_loss: 0.6310 - val_acc: 0.6825\n",
      "Epoch 49/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6007 - acc: 0.6973 - val_loss: 0.6206 - val_acc: 0.6835\n",
      "Epoch 50/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6013 - acc: 0.6913 - val_loss: 0.6167 - val_acc: 0.6827\n",
      "Epoch 51/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.6024 - acc: 0.6941 - val_loss: 0.6243 - val_acc: 0.6836\n",
      "Epoch 52/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.6010 - acc: 0.6910 - val_loss: 0.6209 - val_acc: 0.6846\n",
      "Epoch 53/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.6007 - acc: 0.6947 - val_loss: 0.6413 - val_acc: 0.6837\n",
      "Epoch 54/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.6025 - acc: 0.6909 - val_loss: 0.6586 - val_acc: 0.6829\n",
      "Epoch 55/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6014 - acc: 0.6928 - val_loss: 0.6212 - val_acc: 0.6851\n",
      "Epoch 56/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5987 - acc: 0.6964 - val_loss: 0.6325 - val_acc: 0.6844\n",
      "Epoch 57/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5994 - acc: 0.6952 - val_loss: 0.6236 - val_acc: 0.6840\n",
      "Epoch 58/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6002 - acc: 0.6918 - val_loss: 0.6375 - val_acc: 0.6841\n",
      "Epoch 59/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.6004 - acc: 0.6906 - val_loss: 0.6367 - val_acc: 0.6835\n",
      "Epoch 60/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5997 - acc: 0.6933 - val_loss: 0.6222 - val_acc: 0.6843\n",
      "Epoch 61/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5973 - acc: 0.6959 - val_loss: 0.6387 - val_acc: 0.6839\n",
      "Epoch 62/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5992 - acc: 0.6942 - val_loss: 0.6329 - val_acc: 0.6845\n",
      "Epoch 63/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6052 - acc: 0.6894 - val_loss: 0.6187 - val_acc: 0.6845\n",
      "Epoch 64/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6005 - acc: 0.6960 - val_loss: 0.6306 - val_acc: 0.6829\n",
      "Epoch 65/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5993 - acc: 0.6951 - val_loss: 0.6276 - val_acc: 0.6843\n",
      "Epoch 66/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5982 - acc: 0.6949 - val_loss: 0.6276 - val_acc: 0.6843\n",
      "Epoch 67/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5972 - acc: 0.6979 - val_loss: 0.6445 - val_acc: 0.6836\n",
      "Epoch 68/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5975 - acc: 0.6983 - val_loss: 0.6174 - val_acc: 0.6847\n",
      "Epoch 69/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.6011 - acc: 0.6933 - val_loss: 0.6187 - val_acc: 0.6856\n",
      "Epoch 70/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5991 - acc: 0.6944 - val_loss: 0.6342 - val_acc: 0.6843\n",
      "Epoch 71/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5961 - acc: 0.7000 - val_loss: 0.6372 - val_acc: 0.6848\n",
      "Epoch 72/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5963 - acc: 0.6977 - val_loss: 0.6394 - val_acc: 0.6837\n",
      "Epoch 73/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5965 - acc: 0.6975 - val_loss: 0.6239 - val_acc: 0.6848\n",
      "Epoch 74/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5979 - acc: 0.6963 - val_loss: 0.6473 - val_acc: 0.6839\n",
      "Epoch 75/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5972 - acc: 0.6985 - val_loss: 0.6419 - val_acc: 0.6836\n",
      "Epoch 76/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6038 - acc: 0.6857 - val_loss: 0.6486 - val_acc: 0.6838\n",
      "Epoch 77/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.6008 - acc: 0.6933 - val_loss: 0.6195 - val_acc: 0.6848\n",
      "Epoch 78/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5969 - acc: 0.6987 - val_loss: 0.6457 - val_acc: 0.6838\n",
      "Epoch 79/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5961 - acc: 0.6967 - val_loss: 0.6375 - val_acc: 0.6836\n",
      "Epoch 80/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5956 - acc: 0.7008 - val_loss: 0.6294 - val_acc: 0.6843\n",
      "Epoch 81/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5957 - acc: 0.6997 - val_loss: 0.6305 - val_acc: 0.6768\n",
      "Epoch 82/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5979 - acc: 0.6988 - val_loss: 0.6494 - val_acc: 0.6784\n",
      "Epoch 83/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5994 - acc: 0.6951 - val_loss: 0.6286 - val_acc: 0.6815\n",
      "Epoch 84/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5957 - acc: 0.6999 - val_loss: 0.6235 - val_acc: 0.6836\n",
      "Epoch 85/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5945 - acc: 0.7015 - val_loss: 0.6365 - val_acc: 0.6838\n",
      "Epoch 86/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5967 - acc: 0.6973 - val_loss: 0.6478 - val_acc: 0.6832\n",
      "Epoch 87/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5962 - acc: 0.7005 - val_loss: 0.6272 - val_acc: 0.6844\n",
      "Epoch 88/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5952 - acc: 0.6978 - val_loss: 0.6271 - val_acc: 0.6846\n",
      "Epoch 89/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5970 - acc: 0.6955 - val_loss: 0.6320 - val_acc: 0.6846\n",
      "Epoch 90/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5986 - acc: 0.6951 - val_loss: 0.6277 - val_acc: 0.6846\n",
      "Epoch 91/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5967 - acc: 0.6972 - val_loss: 0.6487 - val_acc: 0.6831\n",
      "Epoch 92/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5986 - acc: 0.6932 - val_loss: 0.6290 - val_acc: 0.6846\n",
      "Epoch 93/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5982 - acc: 0.6943 - val_loss: 0.6211 - val_acc: 0.6846\n",
      "Epoch 94/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5949 - acc: 0.6979 - val_loss: 0.6383 - val_acc: 0.6841\n",
      "Epoch 95/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5993 - acc: 0.6928 - val_loss: 0.6447 - val_acc: 0.6838\n",
      "Epoch 96/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5951 - acc: 0.6996 - val_loss: 0.6365 - val_acc: 0.6846\n",
      "Epoch 97/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5939 - acc: 0.7004 - val_loss: 0.6365 - val_acc: 0.6840\n",
      "Epoch 98/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5934 - acc: 0.7010 - val_loss: 0.6541 - val_acc: 0.6838\n",
      "Epoch 99/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5947 - acc: 0.6985 - val_loss: 0.6496 - val_acc: 0.6844\n",
      "Epoch 100/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5942 - acc: 0.7013 - val_loss: 0.6291 - val_acc: 0.6850\n",
      "Epoch 101/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5940 - acc: 0.6993 - val_loss: 0.6347 - val_acc: 0.6842\n",
      "Epoch 102/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5960 - acc: 0.6985 - val_loss: 0.6380 - val_acc: 0.6852\n",
      "Epoch 103/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5959 - acc: 0.7004 - val_loss: 0.6505 - val_acc: 0.6838\n",
      "Epoch 104/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5951 - acc: 0.7006 - val_loss: 0.6444 - val_acc: 0.6838\n",
      "Epoch 105/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5962 - acc: 0.6982 - val_loss: 0.6296 - val_acc: 0.6848\n",
      "Epoch 106/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5996 - acc: 0.6940 - val_loss: 0.6469 - val_acc: 0.6839\n",
      "Epoch 107/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5986 - acc: 0.6954 - val_loss: 0.6334 - val_acc: 0.6844\n",
      "Epoch 108/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5972 - acc: 0.6953 - val_loss: 0.6596 - val_acc: 0.6821\n",
      "Epoch 109/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5945 - acc: 0.6991 - val_loss: 0.6221 - val_acc: 0.6857\n",
      "Epoch 110/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5935 - acc: 0.6993 - val_loss: 0.6287 - val_acc: 0.6847\n",
      "Epoch 111/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5934 - acc: 0.7016 - val_loss: 0.6328 - val_acc: 0.6853\n",
      "Epoch 112/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5963 - acc: 0.6958 - val_loss: 0.6314 - val_acc: 0.6854\n",
      "Epoch 113/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5923 - acc: 0.7008 - val_loss: 0.6309 - val_acc: 0.6804\n",
      "Epoch 114/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5987 - acc: 0.7015 - val_loss: 0.6275 - val_acc: 0.6749\n",
      "Epoch 115/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5953 - acc: 0.6997 - val_loss: 0.6293 - val_acc: 0.6800\n",
      "Epoch 116/125\n",
      "21816/21816 [==============================] - 0s 10us/step - loss: 0.5961 - acc: 0.6992 - val_loss: 0.6361 - val_acc: 0.6833\n",
      "Epoch 117/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5951 - acc: 0.6987 - val_loss: 0.6555 - val_acc: 0.6836\n",
      "Epoch 118/125\n",
      "21816/21816 [==============================] - 0s 13us/step - loss: 0.5930 - acc: 0.7027 - val_loss: 0.6457 - val_acc: 0.6839\n",
      "Epoch 119/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.5918 - acc: 0.7016 - val_loss: 0.6311 - val_acc: 0.6851\n",
      "Epoch 120/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.5924 - acc: 0.7043 - val_loss: 0.6481 - val_acc: 0.6845\n",
      "Epoch 121/125\n",
      "21816/21816 [==============================] - 0s 14us/step - loss: 0.5919 - acc: 0.7032 - val_loss: 0.6431 - val_acc: 0.6846\n",
      "Epoch 122/125\n",
      "21816/21816 [==============================] - 0s 12us/step - loss: 0.5916 - acc: 0.7018 - val_loss: 0.6309 - val_acc: 0.6857\n",
      "Epoch 123/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5921 - acc: 0.7003 - val_loss: 0.6275 - val_acc: 0.6852\n",
      "Epoch 124/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5944 - acc: 0.6990 - val_loss: 0.6226 - val_acc: 0.6847\n",
      "Epoch 125/125\n",
      "21816/21816 [==============================] - 0s 11us/step - loss: 0.5944 - acc: 0.7010 - val_loss: 0.6374 - val_acc: 0.6852\n"
     ]
    }
   ],
   "source": [
    "import keras\n",
    "from keras import layers\n",
    "from keras import regularizers\n",
    "from sklearn.model_selection import KFold\n",
    "\n",
    "x = X.values\n",
    "kf = KFold(5, shuffle=True,random_state=2)\n",
    "nn_y = []\n",
    "nn_pred = []\n",
    "fold = 0\n",
    "\n",
    "for train,test in kf.split(x):\n",
    "    fold+=1\n",
    "    print(\"Fold #\",{fold})\n",
    "    \n",
    "    x_train = X_scaledtr\n",
    "    y_train = Y_train\n",
    "    x_test = X_scaledte\n",
    "    y_test = Y_test\n",
    "    model3=keras.Sequential()\n",
    "    model3.add(keras.layers.Dense(100, input_dim=23, activation='relu',kernel_regularizer=regularizers.l2(0.1)))\n",
    "    model3.add(keras.layers.Dense(50, activation='relu'))\n",
    "    model3.add(keras.layers.Dense(25, activation='relu'))\n",
    "    model3.add(keras.layers.Dense(12, activation='relu'))\n",
    "    model3.add(keras.layers.Dense(6, activation='relu'))\n",
    "    model3.add(keras.layers.Dense(3, activation='sigmoid'))\n",
    "    model3.add(keras.layers.Dense(1, activation='sigmoid'))\n",
    "    model3.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])\n",
    "    history=model3.fit(X_scaledtr, Y_train, epochs=125, batch_size=1000, validation_data=(X_scaledte, Y_test))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<Figure size 1080x432 with 0 Axes>"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "<matplotlib.axes._subplots.AxesSubplot at 0x1828299b860>"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "[<matplotlib.lines.Line2D at 0x182894433c8>]"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "[<matplotlib.lines.Line2D at 0x18290745978>]"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "Text(0.5,1,'Model accuracy')"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "Text(0,0.5,'Accuracy')"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "Text(0.5,0,'Epoch')"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "<matplotlib.legend.Legend at 0x18290745eb8>"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "<matplotlib.axes._subplots.AxesSubplot at 0x1828f762438>"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "[<matplotlib.lines.Line2D at 0x1829074d4e0>]"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "[<matplotlib.lines.Line2D at 0x182907804a8>]"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "Text(0.5,1,'Model loss')"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "Text(0,0.5,'Loss')"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "Text(0.5,0,'Epoch')"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "<matplotlib.legend.Legend at 0x1829076ae48>"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAA4gAAAGDCAYAAABp6D4kAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAAIABJREFUeJzs3Xd4VMX6wPHvZLObRggpQIAQAiH0EiCANBGpoliRZkFFsYvlXq/ea/deewP1Z8cuiCiKBVEsIAhCgNBChwAJLSQhfZPs7vz+mE1IhQRI9f08Tx6Tc+acnd0Ez3nPO/OO0lojhBBCCCGEEEJ41HYHhBBCCCGEEELUDRIgCiGEEEIIIYQAJEAUQgghhBBCCOEmAaIQQgghhBBCCEACRCGEEEIIIYQQbhIgCiGEEEIIIYQAJEAUolYopSKUUlop5VmJttcppZbXRL+EEEKI+uxsXV+rch4hGhoJEIU4BaVUglIqXykVUmp7nPviEVE7PRNCCCHqL7m+ClE3SYAoROXsBSYX/qCU6g741F536gZ5siqEEOIMyfVViDpGAkQhKudj4NpiP08FPireQCkVoJT6SCmVrJTap5R6SCnl4d5nUUq9oJQ6ppTaA1xYzrHvKaUOKaWSlFL/VUpZKtMxpdQXSqnDSql0pdQypVTXYvt8lFIvuvuTrpRarpTyce8brJT6Uyl1XCl1QCl1nXv770qpG4udo8QQHPdT3duVUjuBne5tM93nyFBKrVVKDSnW3qKU+rdSardSKtO9v7VS6nWl1Iul3su3Sqm7K/O+hRBCNAh19vpa6jwtlVILlVKpSqldSqmbiu3rp5SKdV8DjyilXnJv91ZKfaKUSnFfa9copZpX9bWFqGkSIApROauAxkqpzu4Ly0Tgk1JtXgUCgHbAUMwF73r3vpuAi4BeQAwwvtSxHwIOoL27zSjgRipnERAFNAPWAZ8W2/cC0AcYCAQB9wMupVS4+7hXgaZANBBXydcDuBToD3Rx/7zGfY4g4DPgC6WUt3vfvZinw2OBxsANQI77PU8udpEPAYYDc6rQDyGEEPVbXb6+FjcHSARaul/jKaXUcPe+mcBMrXVjIBKY594+1d3v1kAwcAuQexqvLUSNkgBRiMorfMo5EtgGJBXuKHZRe1Brnam1TgBeBK5xN5kAvKK1PqC1TgWeLnZsc+AC4G6tdbbW+ijwMjCpMp3SWs92v2Ye8BjQ0/3E1AMTjM3QWidprZ1a6z/d7a4Clmit52itC7TWKVrrqgSIT2utU7XWue4+fOI+h0Nr/SLgBXR0t70ReEhrvV0bG9xtVwPpmKAQ9/v9XWt9pAr9EEIIUf/VyetrsfO0BgYD/9Ja293Xy3eL9aEAaK+UCtFaZ2mtVxXbHgy0d1+D12qtM6ry2kLUBpk/JETlfQwsA9pSavgLEALYgH3Ftu0DWrm/bwkcKLWvUBvAChxSShVu8yjVvlzuC+f/gCsxmUBXsf54Ad7A7nIObV3B9soq0Tel1H2YQLAloDGZwsKiAyd7rQ+Bq4Gf3f+deQZ9EkIIUT/VuetrKS2BVK11ZqnXiXF/Pw14AtimlNoLPK61/s79vloDc5VSTTCZ0f9orQuq+PpC1CjJIApRSVrrfZjJ9GOBr0rtPoZ5Utim2LZwTjwFPYS5SBTfV+gAkAeEaK2buL8aa627cmpTgEuAEZhhLBHu7crdJztmuEtpByrYDpAN+Bb7ObScNrrwG/d8w39hnuIGaq2bYDKDhVfjk73WJ8AlSqmeQGfg6wraCSGEaKDq6PW1uINAkFLKv7w+aK13aq0nY6Z6PAvMV0r5uUfoPK617oKZ6nERJedbClEnSYAoRNVMA87XWmcX36i1dmLmHPxPKeWvlGqDmXtXOI9iHnCXUipMKRUIPFDs2EPAT8CLSqnGSikPpVSkUmpoJfrjj7n4pWCCuqeKndcFzAZeck+utyilBiilvDDzFEcopSYopTyVUsFKqWj3oXHA5UopX6VUe/d7PlUfHEAy4KmUegSTQSz0LvCkUipKGT2UUsHuPiZi5i9+DHxZOGRVCCHE305du74W78MB4E/gaXfhmR7u/n4KoJS6WinV1H3dPe4+zKmUGqaU6u4e7ZOBCXSdVXltIWqDBIhCVIHWerfWOraC3Xdism97gOWYYi2z3fveARYDGzCFZEo/Ib0WM4QmHkgD5gMtKtGljzDDXJLcx64qtf8fwCZMEJaKebLpobXej3lSe597exzQ033My0A+cAQzBPRTTm4xpuDNDndf7JQcvvMS5gL+E+YC+R4lS5h/CHTHBIlCCCH+hurg9bW0yZhROgeBBcCjWuuf3fvGAFuUUlmYqRKTtNZ2zAic+Zhr31ZgKWUL8AhR5yit9albCSFENVFKnYu5YEa4n74KIYQQQohaIhlEIUStUUpZgRnAuxIcCiGEEELUPgkQhRC1QinVGTNXowXwSi13RwghhBBCIENMhRBCCCGEEEK4SQZRCCGEEEIIIQQgAaIQQgghhBBCCDfP2u5ATQgJCdERERG13Q0hhBDVbO3atce01k1rux/1hVwfhRDi76Oy18i/RYAYERFBbGxFS+sIIYRoKJRS+2q7D/WJXB+FEOLvo7LXSBliKoQQQgghhBACkABRCCGEEEIIIYSbBIhCCCGEEEIIIYC/yRxEIYQQQgghxN9PQUEBiYmJ2O322u5KjfH29iYsLAyr1Xpax0uAKIQQQgghhGiQEhMT8ff3JyIiAqVUbXen2mmtSUlJITExkbZt257WOWSIqRBCCCGEEKJBstvtBAcH/y2CQwClFMHBwWeUMZUAUQghhBBCCNFg/V2Cw0Jn+n4lQBRCCCGEEEKIapCSkkJ0dDTR0dGEhobSqlWrop/z8/MrdY7rr7+e7du3V3NPT5A5iEIIIYQQQghRDYKDg4mLiwPgscceo1GjRvzjH/8o0UZrjdYaD4/yc3fvv/9+tfezOMkgCiGEEEIIIUQN2rVrF926deOWW26hd+/eHDp0iOnTpxMTE0PXrl154oknitoOHjyYuLg4HA4HTZo04YEHHqBnz54MGDCAo0ePnvW+SQZRCCGEEEII0eA9/u0W4g9mnNVzdmnZmEfHdT2tY+Pj43n//fd58803AXjmmWcICgrC4XAwbNgwxo8fT5cuXUock56eztChQ3nmmWe49957mT17Ng888MAZv4/iJIMohBDirDmWlUfS8dza7oaoIfYCJ79tP0piWk5td0UIIeqdyMhI+vbtW/TznDlz6N27N71792br1q3Ex8eXOcbHx4cLLrgAgD59+pCQkHDW+yUZRCGEEGdFbr6TK99cSWJaDrcMjeT2Ye3xtlpqu1uiGmXaHVz//hqevKQr1wyIqO3uCCHESZ1upq+6+Pn5FX2/c+dOZs6cyerVq2nSpAlXX311uUtV2Gy2ou8tFgsOh+Os90syiEIIIc6K5xZvY++xbIZENeXVX3cx+pVlrNydUtvdEtXI22puI3ILnLXcEyGEqN8yMjLw9/encePGHDp0iMWLF9daXyRAFEIIUWX7U3KY9sEafo4/gtaaVXtSeH9FAlMHtGH2dX357Mb+eCjFVe+u4pUlO3C6dG13WVSDwgyxvcBVyz0RQoj6rXfv3nTp0oVu3bpx0003MWjQoFrri9K64V+0Y2JidGxsbG13QwghatXxnHysFg/8vM58dsE/vtjA/LWJAAzr2JTdydkoBYtmDMHXZs6fnefg4W8289W6JM5pF8Ssyb1o5u99xq99MkqptVrrmGp9kQbkbFwf2//7B6af2477x3Q6S70SQoizZ+vWrXTu3Lm2u1Hjynvflb1GVmsGUSk1Rim1XSm1SylVpryOUuplpVSc+2uHUup4sX1TlVI73V9Ti23vo5Ta5D7nLKWUqs73IISoH95bvpchz/3KgdS6USyjwOliTUIq9lJD79YkpPLYwi1ltle35Mw8xrzyB3d/HldmX3aeg2/ikpj2wRpGv7yMXUezTnquQ+m5fBOXxFX9w3nows6sSUjjQFoOz4/vWRQcAvh5efLShGheuLIncQeOc+lrK9h6qPzqcftTcjiSUXauhaj7vK0WySAKIUQDUm1FapRSFuB1YCSQCKxRSi3UWheV49Fa31Os/Z1AL/f3QcCjQAyggbXuY9OAN4DpwCrgB2AMsKi63ocQomZtTkrntk/XMaV/ONMGt8VqOflzLJdL89QPW3l3+V4AZq/YW2ISeuEcuAGRwdXX6WK2H85kXuwBvolL4lhWPpdEt2TmpF4ApOcWcMdn6ziSkce+lGzeuiYGm2fJ9+dyab7fdIh9Kdmk5RTgbfXg9mHtSwReVeVwurhzzjoOZ9hJ3Z5Ppr0Af28rAHuSs7jk9RVk2h20CPAm3+Fi0tsr+eTG/nQKbVzu+d77Yy8uDbcMjaR1kC8XR7ckKS2XXuGB5bYf3yeMTqH+TPtwDVe88SezJvViRJfmRfsPp9u56r1VBPna+Pr2Qchzv/rF2+qB3SFzEIUQoqGozgxiP2CX1nqP1jofmAtccpL2k4E57u9HAz9rrVPdQeHPwBilVAugsdZ6pTZjYz8CLq2+tyCEKG5/Sg6PfLOZDHtBtb3Ga7/uIul4Ls8s2sa4V5cTm5BaYVuXS3PPvDjeXb6Xawe04eKeLfkiNpFMd/+OZtq58cM1XP/BavYknzwrdiYKnC6+23iQCW+tZPQry/hoZQJ92gQyISaMb+IO8tU6MxTzmUVbSc7M44ZBbfltezL3zosrMTdPa82T38dz55z1vPDTDuas3s///b6bB77cxJlMB3j+p+2s2pPKlP7h5Dtd/L49uWjfnNX7sRc4mXPTOaz41/nMu2UAnh4eTHp7Fav3ppZ53fScAuas3s9FPVrQOsgXgGb+3hUGh4W6tQrgm9sHE9m0ETd9HMvTP2wlz+EkNTufa977i7TsAp64pJsEh/WQl6elxjPiQgghqk91LnPRCjhQ7OdEoH95DZVSbYC2wK8nObaV+yuxnO3lnXM6JtNIeHh41XsvhCjjnT/28PGqfexLyWH2dX2xeJzdm/l9Kdksjj/MrUMjiW7dhMcWbmH8mysZ3D6EW8+LZGBkcIkAYunOZL6JO8iM4VHcPSKKzUkZLNxwkM/XHODGIe146acd5Dtd+Fgt3PfFBr64eQCep8hIno4nv4vno5X7aB3kw7/HdmJ8n9YE+dlwujQJx3J4+OvN5DlczFl9gJvPbceDYzvTvLEXTy/aRoHTxf1jOhHZtBEzf9nJ+ysSuH5QBA9c0AkvTwuv/bqTF37aQZ82gUwdGFGp/tgLnCxYn8SRDDuH0+3MXXOAq/qH88Ql3Vi8+TCLtxxmXM+WOJwuvo47yLCOzYoyrJFNGzHv5gFMeXcVE95aSUgjG/3bBjOwfTDnRjVl4YaDZOc7ufncyCp/TqEB3sy7eQBPfh/PW8v2sHRHMp4Wxf7UHD68oR89Wzep8jn/btyjc2KBJK31RaX2eWEenPYBUoCJWuuE6u6Tt9VDAkQhhGhAqjNALO/OsaJH4JOA+VrrwitMRcdW+pxa67eBt8FMwj95V4UQYIqYeHla8LGVXbvO5dL8uOUwoY29WbojmWcWbeU/F3ap8Fy5+U7mr0vkyj5hFa6F98qSHYQ08uLqc9oAMHv5Xjw9FFMHRtC8sTeD2ofwyap9vLt8L1e9+xcX92zJrMm9io6ft+YAQX42bhsWiVKK7mEB9I0I5IM/E+jfNpjPYw8wbVBbuocFMGNuHG8t28Ptw9qf4adU9nP5YdNhRndtzv9d1adE0GzxULw8KZoLXlnGg19tIiLYl7tHdADg5qGRuDTM/GUHP8UfoU94ILH70riyTxgPX9gFD/d5bjuvPev3H+e/38fTrVUAfdqcPFMH8Pzi7bznHnLr7+3JiM7NeWRcFyweipFdmvPdxkPkOZys3J1CcmYel/cOK3F8eLAv394xmJ/iD7NqTyqr9qTw/aZDAHgoGNqhKV1alj/89FR8bBaeuqw7Izo34/75mziek8/b1/bhnHY1MwS4AZgBbAXK+wVMA9K01u2VUpOAZ4GJ1d0hmYMohBANS3UGiIlA62I/hwEHK2g7Cbi91LHnlTr2d/f2sFLbKzqnEKISdh3N4o3fd7N+fxp7jmUzMDKYz246p0y7tfvTSM7MY9bkXqxNSOWdP/YS0siL8zo2o1WgD42KVcbUWvOP+Rv4fuMhmvl7MbpraJnzHcmwM/OXnWgNSsGF3VswLzaRi3u2onljU+nSz8uTm4dGMnVgRFHQM3VgBH3aBJKSlceSrUe4dkAEXp4nAtBpg9tyyyfruOHDNTTxsXLn8Cgae3vy05YjvLJkB+d1bErXlgGn9Vll5Tl4a+lubjq3HY3dc/i2Hc7kWFYeI7uElptRbdXEh+fG9+CBrzbx7BU9SgTft54XyYSYMD74M4EP/0zgwh4tePry7kXBIYCHh+KlCdGMe205d81Zz2//OK/MvMXiUrLy+PSvfVwa3ZLnr+xZZg7n6K6hzF1zgD93p7BgXRJNfK0M69S0zHkC/WxM7BvOxL7haK3ZcyybP3Yks27/cW49r+rZw9LO79ScX+4NIjnLTvtm/md8vr8DpVQYcCHwP+DecppcAjzm/n4+8JpSSulqLlduAkTJIAohRENRnXMQ1wBRSqm2SikbJghcWLqRUqojEAisLLZ5MTBKKRWolAoERgGLtdaHgEyl1Dnu6qXXAt9U43sQokKLtxzmx82HarsbZyTP4eTmj2NZvOUw7Zo2YninZvy5O4WEY9ll2v6w6RA2Tw/O79SMhy/qwpCoEJ5etI3Rryyj26OLmfjWyqLjXv11F99vNJ/N3nLOVXg+rSG6dRMe+nozd82NI7fAybTBbcu09bZauG9UB4L8bMz8ZScAC9YnUeDUTOzbukTbkV1CCQv0ITkzj3tHdiDAx4pSiicv7UYTXxt3fra+aI5iaTuPZPL79qMVfl5vL93Nq7/uYt6aEyPg/9hp5vMNiQqp8Lgx3Vqw7qGR9C8nSxbcyIv7RnVk/SOjeG1yr3KHwAb4Wnn8kq4kHc/lh00n/5t7d/le8hwu7jg/qtwCPwPbB+Nns/DVuiR+ij/MuB4tSwTY5VFKEdm0EdcNasusyb3o3OL0soelBfhaJTismleA+4GK0nVF0zO01g4gHSjzR6eUmq6UilVKxSYnJ5feXWU+EiAKIUSFUlJSiI6OJjo6mtDQUFq1alX0c35+fqXPM3v2bA4fPlyNPT2h2gJE98XpDkywtxWYp7XeopR6Qil1cbGmk4G5xZ9waq1TgScxQeYa4An3NoBbgXeBXcBupIKpqCVP/bCVh7/ZgussLQC+62jWWTtXZb35+x52J2fz2pRevDs1hv9d1h0PBV+uSyzRzuXS/Lj5MEM7NKWRlyeeFg/ev64vX902kFcn9+KeER2IP5TBmJnL+PeCTbz08w4u69WKYD9bucEmwLcbDtIp1J85N51D7/BAlu1IZnD7kAqHLvraPJl+bjuW7Uhm7b40Pl9zgF7hTejQvGSAYfFQ/HN0R0Z3bc7kfifmHwf52Xh9Sm/2pebwzy82lim+sutoJuPfXMmNH8aSdDy3zOsfz8ln9ooEwASnhf7YeYyOzf2Lsp4V8TjFfE2LhzppgZahUU2JbOrH7BV7KyxYczwnn4/+TODC7i1o36xRuW28PC2c16kZ3244iL3AxeW9y53GLeoYpdRFwFGt9dqTNStnW5k/Fq3121rrGK11TNOmZbPHVWXmIMoQUyGEKE9wcDBxcXHExcVxyy23cM899xT9bLPZKn2eBhEgAmitf9Bad9BaR2qt/+fe9ojWemGxNo9prcuskai1nq21bu/+er/Y9litdTf3Oe+o7qEzouHItBcwL/bAWXnSfTTTzr6UHJIz89iQePzUB5zC4i2HGfHSUq58ayW7jmZW+rj03ALeWrqb9JyKq4o+s2gbz/24jeM5JZ9S7Tqaxeu/7WJcz5ac17EZYIqIDI5qylfrkkoEq3GJxzmUbmds9xNDRT0tHvQOD2Rcz5bMGBHFz/cMZUC7YD77az/RrZvw9OXdiQjxKzeDmJiWw7r9xxnXsyU+Nguzp/blyj5hPHDByRfavuacNgT52bhvXhw7j2YxqVT2sNAl0a1465qYMtm4fm2DePCCTvy45TBvL9tTtP1wup2ps9dgtZj76/f+2FvmnO8t30tWnoNJfVuz5WAGO45kYi9wsjoh9aTZw7PFw0Nxw+C2bExMJ3ZfWrlt3l+RQHa+kzvOP/k8y8Ihv+1C/IiWwjD1xSDgYqVUAqYq+PlKqU9KtSma2qGU8gQCgIrLAJ8lXlaLLHMhhBCn4cMPP6Rfv35ER0dz22234XK5cDgcXHPNNXTv3p1u3boxa9YsPv/8c+Li4pg4cWKVM4+nozrnIApRZ9gLnEz7MJbVe1PZeyybf405eSACppy/S2t8vSxlhuCtTThxg/5z/JFTlvg/GadL88Li7bQI8GZ3chZjZy7n7pFR3Do08qQZJa0198/fwOItR1iwPomPp/Wnqb9XiTbJmXm8uXQ3AB+v3MeNQ9oRExFIgI+VJ7+Lx9vqwSMXlSw0M75PGHfNWc+qPSkMbG8Cn0WbDmG1KIZ3bk5FQgO8mX1dX1bsSqF7qwC8rRYigv1YvqvsELbC4acX9WgBmKGGz1/Z85SflZ+XJzcNacezP27D12bhwh4tT3lMadMGt2Xd/jSe/XEbf+w8Rofm/qzYdYz03ALmTj+H2cv3MnfNfu4a3p4mvubJ3vGcfN5fYTJz943qyBdrE1mwPokB7YLJd7gY0uHMszCVcXmvMJ77cTuzl++lb0QQKVl5PLNoG4cz7NgsHvy1N5VRXZpXuH5hoWEdm9LY25PJ/cJlWYl6Qmv9IPAggFLqPOAfWuurSzVbCEzFTNkYD/xaEw9RvT0t2PMlQBRC1AOLHoDDm87uOUO7wwXPVPmwzZs3s2DBAv788088PT2ZPn06c+fOJTIykmPHjrFpk+nn8ePHadKkCa+++iqvvfYa0dHRZ7f/5ZAAUTR4+Q4Xt36yljUJqfQMC+DtZXu4qEeLCguVHEjN4ZUlO1mwPpHCJFqgr5Xv7xpCyyY+AMTuS8PL04PurQL4Of4I958i4Pxh0yFiE9J4+KLOZW7Iv91wkJ1Hs3htSi/6tw3mkW8289yP2+kc2phhnZpVeM7PVu9n8ZYjXN67FYs2HWbCW2Zx81buPsKJ+XEvXtmTH7cc5uUlO0qc45nLu5cJKkd1aY6/tyfz1yYysH0IWpsqnUOimhYVZqmIUorBxbJpbUN8+XJdHjn5jhILvX+38RA9wgJoE+x30vOV59oBbZi9Yi+jujQvURinspRSPDe+J8F+XsQdOM5nq/ehULxzbQzdWgUwfWg7vlqfxMcr93Hn8CjALO+Rne9gxogomvp7MSQqhG/WJ5HvcGGzeNAvIqjK/TgdPjYLU/qH89bS3Xy5NpHnFm8jLaeALi0aU+B0ERHiy72jOpzyPP7eVlY+OByfCqrLivpDKfUEEOsemfMe8LFSahcmczipJvrgbfXA7pAhpkIIURVLlixhzZo1xMTEAJCbm0vr1q0ZPXo027dvZ8aMGYwdO5ZRo0bVeN8kQBQNWr7Dxb3z4vhtezJPXdadsd1DGfHSMh74chMLbhtIboGTN5fuZmNiOjaLBxoTVClllloID/IlLaeAWb/s5NsNB7l5qKneGJuQSs/WTRjTNZQnvosn4Vg2ESF+JKbl8K8vN3L/6E5Fa7rZC5w8unALyZl5jOravEQ5/wKni5eX7KBzi8aM7dYCDw/FzEm92PryUp76YStDokLKLVqy80gmT34Xz5CoEF4Y35Or+odz3ftrmPjWSn65b2hRxvOPnccI9rNxWa9WXNEnjP0pORxMzy0ablpedVFvq4WLerRkwfpEroxpzau/7iTpeC73VSLwKC0ixASACcdyiuYW7j2WzaakdP4ztnOVzwcmi7jk3qFnFNw08vLkyUu7ASaDW+B0FS3F0Sm0McM6NuWDPxO4+pw2fLRyH+/8sZcLu7comu94Wa9WzJgbx5zV++nbNrDcZUGqy7UD2vDOsj3c98UG2oX4Mfu6vqdVldXvNIJrUTdorX/HVPZGa/1Ise124Mqa7o9UMRVC1BunkemrLlprbrjhBp588sky+zZu3MiiRYuYNWsWX375JW+//XaN9q1a5yAKcSbsBU6e+mErt3+2jnf/2MO6/Wk4q1DE5UiGnUlvr+S7jYf499hOTOkfThNfG49f3JVNSenM+DyOYS/8zuu/7eZ4TgFHMu0kpeUysW9rlv1zGI+O68r1g9py78gO9AgLKKoemZPvYMvBDGLaBDKyixly+XP8EbTWPPjVJlbsSuHfCzYV9XXu6v0kZ+bha7Pw6q87S/Rx/tpE9qXkcN/IDkVFTGyeHjxwQSd2Hs3ii7Uli8WAqTx619w4/GyevDihJx4eij5tgnhlYjSJabksiTdVOF0uzR87kxkcFVJ07vBgX85pF8yYbi0Y061FhcMLx/cJw17gYvI7q9h2OJNHx3Xh0uiqFzOJcGcIE1JOzEP8boNZmeZC9/DS0xHgYz3pUg9VYfFQZdZpvGVoJCnZ+Qx69ldeXrKDkZ2b89jFXYv2j+zSHF+bhZx8J0OiamZ4aaEWAT7ceX4UV/UPZ+Gdg097yQ4hzpbCKqZSEkAIISpvxIgRzJs3j2PHjgGm2un+/ftJTk5Ga82VV17J448/zrp16wDw9/cnM7PydSrOhDxCFjVGa13p+U5Jx3O59ZO1bExMp0WAd9GctZ5hATw7vscp51it3ZfKLZ+sIzvPwWtTenFRsblqY7uHMqJzc77feIiYNoG8N7VvUbavImO7t+CZRds4kJrDgbQcHC5N34ggWgf50inUn5/jjxDoZ+OPncc4v1Mzft12lM/XHODy3q14Y+lu+rUNYkTnZjz1wzbW7kujT5tAjmbYmblkJ9GtmzC8c8mhpKO7hhLTJpAXf9rBxT1blsj2zPplJ1sPZfDe1Bia+Z+onHlex2a0DPDm89gDXNijBfGHMjiWlc+5pxHA9A5vwqS+rQlp5MX0oe1OObS0IkUZxGIB4rKdyfQMCygarlsX9WsbxLCOTUnNKeA/YzvTr23JIaS+Nk/GdAvlq3VS3tepAAAgAElEQVRJNVKgprQZI6Jq/DWFqIi31QOXhgKnxuYpc1qFEKIyunfvzqOPPsqIESNwuVxYrVbefPNNLBYL06ZNK7pvfvbZZwG4/vrrufHGG/Hx8WH16tVVqoBaVRIgimqXaS/gpo9i2ZSYTteWAXRrFcCEvmElgrzYhFTe/zMBq4fC18uTHzcfJt/h4u1r+jCqayhHM+z8tv0oz/24nYtmLefW8yKZMTyq3OGXRzPtXPf+GoL8bHwyrT8dQ0sug6CU4uWJPdlyMIP+bYMqFbRe6A4Qf9h0iHz3XJve7sI0o7o057XfdrHtcAZ9IwJ559oYJr+zihd+2k5aTj5HMvJ4aUI00a2b8Mbvu3nt1508N74nU979iwx7AW9d06dMH5RS/PvCzlz+f3/yf7/v4p+jzRzHuAPHeeP33UyICStTMMbioRjfJ4xXf9vFweO5/LHTPJEa0qHqAYxSimeu6FHl40pr5OVJU3+voqUu7AVONhxI5/pBEWd87uqklOL96/udtM1d50fRNtiPLmdpTUAh6qvCDLzd4TxrmX0hhGiIHnvssRI/T5kyhSlTppRpt379+jLbJkyYwIQJE6qrayXI/8lFtcrNdzLtg1hiE9IY270FDpeLz1bv4+JXV/De8r24XJqPViYw6e1VrNydwrr9x1m8+TBhgT58c8cgRrnnyDVr7M3EvuEsuXcoF/dsyau/7uK/328t9zX/+91W8gpczL6ub5ngsJC/t5Vz2gVXOqPZOsi3aJjpmn1pdGjeiABfk1Ub2SUUlwa7w8UzV/TA4qF4dFwXjufk8/zi7fRpE8jAyGD8vDyZNrgtv21P5vI3VpCUlsv711WcvewdHsgl0S15/bfd3PN5HEcy7Nw3L47Qxt48VKryaKHxfVqjNXy5NpFlO5Lp3KJxiSxjbWgb7EfCsRwA1u8/Tr7TVSYjVx9FhPhx5/AoqQIq/va8CgNEmYcohBANgmQQ/060BmcBeFZfSrq47DwHt3yylth9qcyc1ItxPc0wz9TsfO6fv5Env4vno5UJ7EvJYXinZrw0MZoAn5MPZQz0s/HSxGiC/Gy8u3wvXVo2ZkLMibXwlu1IZuGGg8wYHkVk0/IXCj9dF3ZvwdOLtuHl6cEVfcKKtndr1ZghUSGM6tK86DW7tgxgcr9wPv1rP3ee374oiLh2YARvLdtDcmYes6/rS/9iBWvK8/z4nkQE+/H6b7v4fuMh8p0uPp7Wr8Ihn+HBvgxoF8zcNQc4mmnnhkFtz9K7P31tgn35fYepprp6bypKQUwNVf0UQlQ/b3fW0J4vlUyFEKIhkADx72Tzl/DdvXDPJvCufGGLHzcf5v0Ve5kxIoqBkWa44uakdJ5fvB2Hy0Xv8EC6twogPbeAvcey2Z2cxY4jWexLycal4YUrexYFhwBBfjbeubYPn/y1nxcWb2fG8ChmDI8qKqRSGQ9c0IlthzN5aMFm2jdrRO/wQOwFTh7+ZjNtQ/y49bzIyn8ulTTWHSDmOVzEtDmx7qFSio+n9S/T/uGLunBJdCv6Rpxo29jbykc39MPm6VGp4iI2Tw/uGdmBkV2a8/A3m+nXNuiURVEm9m3N3Z/HAXBuDa3PdzIRIX4kr00kK8/BX3tT6Bza+JQPAoQQ9UfxIaZCCCHqPwkQ/052/gR56ZC0DiKHVeqQY1l5PPDVRtJzC5jyzl9c2KMFAT5W5qzeT5CvjdAAb/7v991FFTs9PRThwaZwy8U9WzIgMrjEsg6FlFJcc04bru5/egt1e1o8eHVyLy5+fTlXvfMXTf29cDhdHEy389mN/ctUpTwbWgf50jMsgA2J6cS0OXUGzNtqKXcoZa/wwHJan1y3VgEsuG1QpdqO6RaK/zeeOJyamIiqv9bZ1tZdqGbX0SzW7U9jUt/wWu6REOJs8pEhpkKIOq4qhRIbgjOtKi0B4t/JgdXmvwcrHyA+/m08OXlOvr1jMEu2HuGN33fjcGmuGxjB3SM6EOBjJSffwbbDmQT52ggL9Cm3cExFzuQfa6CfjQ+u78d7y/eSm+8k3+FiekQgA9tXX1XJG4e047uNB2kdVHcrcHpbLdw3sgPHcwuK1kOsTYVLXSyMO4i9wEX/BjD/UAhxQlEGsUCGmAoh6h5vb29SUlIIDq587Yn6TGtNSkoK3t6nX4NCAsS/gS0H04ndspOpaXsBcBxYW6lf/C9bj/DthoPcO7ID3VqZ6qNT+oWT73QRFuhb1M7X5llU0bOmRTZtxFOXda+x1xvXs2WJ4bJ11XV1YO5hoYgQ87eyYL1Z07GvBIhCNCjeVvccRMkgCiHqoLCwMBITE0lOTq7trtQYb29vwsLCTt2wAhIgNnBfxB7gn/M3MtxjLVNtkKhDCNx/6gAx017AQ19vpkPzRtwy9MR8vmaNa7cipqh/fG2eNG/sxZGMPNo3a0RII6/a7pIQ4izyliGmQog6zGq10rZt3XlwXh/IMhf1XHpuAQeP55a7b8vBdB76ejMDI4OZNbgA7eHJEu8x+NkPQ9bRCs/pcLq447P1HM3M45kresi6VuKMFQ4zbQjLWwghSirMIOZKgCiEEA2CZBDroZSsPN5cupuVe1LYcjADrSEi2JfBUSEM7dCMQe2DKXBobv1kHYG+NmZN7oXf/P9CaHeatxoOaz5h38Y/aDPwijLn1lrz6MItLN2RzNOXd6+1oaOVojXEvgcJy+Gyt8CzkpkpZwGsfA0OrIG+N0DkcPgbjEmvTW1D/Phrb6rMP6wpWkP2MWhU+1Vsi2QdBZ9AsEgF24amcK5znsxBFEKIBkECxHomN9/J9R+sYeuhDPq0CWTG8Cgae1v5c/cxFqxL4pNV+/Hy9KCpvxdHMuzMnT6AEB+LqVza62oGDhqGc7ViV1z5AeI7f+zh07/2c/PQdkzuV8eqTWYcNMFd45bm5veb22H3L2Zfj0nQccypz5EYCwvvgqNbwLsJbP8ewgfCwDuhzQBzA1uTMg6afth8T922puSkwuq3Yd3HENYHLnwJ/M6s8E9Uc388lGQQARO8QcmHElrDvhUQ2BYCWp35a6x+GxbdD50ugvMfgqadIGU37PoZ0hIgLxPys2HQDGjV+8xfz5EPWYchoHXZhy3pifDzo7B5PngFmAJZUSOhzUDzfpUCezokrgENRI048/6IGuVjk2UuhBCiIZEAsQ7SWmMvcJGV5yA330logDc2Tw9cLs3dn69nU1I6b13dh1FdQ4uOuWFwW/IdLtYkpPLL1qOs2HWMJy/pRp82gXBoIxRkQ1hfAgICOewdgeeRODLtBfh7W9Fa8+fuFGb+spPVe1O5oFso/xrd6ey+KWcB7F9pltpIWAGdL4LB91Yuc+csgKXPwh8vgnYBymQhlAUueA5+/R9sXXjqAHHL1zD/emgUCpM+g/YjYN1HsOx5mDvZnLdZZxj1X2g//Mzf86GNsOd36D7eBLWlJSyHjy8Diw06joVul0PEYPDyP/W5Hfnuz8D9+WkNRzZD0toTAUjLaGjZq+Rx+TlQ4B6S7Gkr+Vr52eZzXv0OFORAxBDYvgj2/Wk+54Ic8/s7fgDO/Sd0GmuOS9kNPz8CHhY4934I7Vamu1f1D6dfRBAt9DH46r+QmwbDHizbv0IuF3icpaHNWUfNg4H0RMhIAkde2Ta+wTDgNrCZobBoDRvnwcH15melILS7+Ztp1Kzksc4C2LLAvE7ncRDYpuz5Hfmwd5n5/Hb+ZNp2Hw8x15tg7df/woG/zPqkl74BnS4seXz2Mfjtf3BsJ5xzG3S8wPRJa0jeDoERYHXPD87PhqXPQVA72LMUtn0PAWGQfsDs92psvnKOgcsBkz497Y8WMP+ev50BKTtNgBg1EkI6mPeVeRjiPjP/bgfcYQLBnT9D/NfmWL9m5uHD0a2AhrC+EiDWQzIHUQghGhYJEOug/3y9mc/+2l/0s7+XJ8M7N8NDKRZvOcLDF3UpERwWsnl6MKh9CINKL/OQ6F7eonVfAKzhMXTdsZh3lu3B18uT7zceYlNSOs0be/HYuC5M6d+mcovWp+4FD09o0vrENq0hL8Pc6Bba/Rt8dw+k7QUPKwS3h1+egIxDJvBQCvb8Zm7ie11dMpg6Eg9f3wqH4qDnFJN1SE80N5p9p0FIlMmObvve3KhXNHztYBwsuMXcgF41H7wbm+39boJe15jPaP9fJmBc/G+IXHX6w06zj8GvT8LaDwFtbuz73giD7zmRiUveAXOvMjf24QMg/hvYNA+UBzTvBs27moDMngG+QdDlUogaZW7yV8yEjZ+bz7h1fwhqCzt+gmPbS/ZDWeCil6HPVPN7WfMuLP4PON0BkvKA9iMh5gYT3H1/LxzfDz0mmr426wxHtsBX001gDeDfAqy+JqDuconp6x8vgsU9vDd+oQl0u15ujg+MAHs63hlJdN/5nek72gRibw+DXldBnxvM35B3AGz7DmLfh/2r4NYV0LRj5T5ze4b5u8jLgBbRJlhyOU2w++uTkJ9l2lls4FnOEiV5GSZomfSpeY/f3wvrPwFbI/M5ugrM7wOgRU/zGs27mmDzr7cgw1Ro5af/QFg/83caEGYCzz2/mc/Ffty8druhEH6OCUDXfej+XFvC6KfN73XuFBMERo00+w5vhmUvmIc8jULNZ98qBpp1gp1LTOYucjhc9YX5Pa5+2wR/k+eYf2t/zjKB5aAZ5pyBEe6+PgSr3oTsFPAru1ZphbSGzENwNN783a77CJqEw4jHTRZww+emr2D+VjqMhpFPnAictTbH7l9lguKcVPO31Lo/hMVUvh+izvD2LKxiKkNMhRCiIVBnupBifRATE6NjY2NruxuVdunrK8i0F3DdwAi8PC3E7kvl5/gjpOUUcM05bXjikq5VW8flq5th96/wjx2gFHr1O6gf/sEg+0ySaEqPsACu7BPGlTGtK7/A/JF4eHeEuRFs1gXanmsySwf+MjenTTuZm9HsY7BhjslmDH/UZGBsfibj9Ocs6DDGDLM8vNGc1+oLQ+6FNoPNPMFt35sAadxMk50pz7YfzE3z1V+Vn/nLPALvDAMUTP+tbAaouLUfmGzIjb+c/GY1LQGWv2wChF7XmIAg6wj89Sasec8EE/1uhujJsOoN8xlYbND1Mug23gQgBTlw4xJzw+7Ih33LzU3z/pUmK2fzM5metATzmdoameyQp5cJ4pz5pn1agnn9bpebz9fiZfZ9d48ZgjtohmkT/40JCKNGmfeQkWT6lXXE/BwcZT7niEEl36sjD7b/AEGRJovmcphAb+lzJtjsfDGMfd70a8Us8xkUBlPKw531det6OYx83ASDy543AYqroOTrBYRD+n6TyR14Z8W/AzDB05xJJ7JjcCIIy042GdX2I0xmM6gt+IaUn5nctQTm32CCwSbh5oHEuffDeQ+a9lrD4U0m+7fnd/O9/bg5ts0gGHS3eVix5SuTTTy67cT7svqZjGC3K0y/rO4ANfc4bPrCfEbRV5mg1pFnArfVb5fsX+RwGPO0+XcU95n57PMyof350Ki5+czP/ScMvAtm9jAB5NXzT/7ZHdkCbwyEC56H/tNP3nbtB+bfYnqS+azzMsx2ZTGZ1/MePJF9deSbgNzLv1bmGyql1mqtJdKspLN1fWz/7x+Yfm477h9zlkefCCGEOGsqe42UALEOGvHSUqKaNeKNq/sUbXM4Xew8mkXH5v6Vy+4VN6u3Cdgmf2Z+TlwL757P0p4vEHneVSXWNCTzsBmC6B1gghNLOUlme7rJ/uRnmUzH7l9g30qTBWp9jrmJ3bfcDD1DmwDl3H+euDEutPxlWPKYCUwG3WWO/fVJM1wUzNy8fjdB/1tOPgeuwA7PR5ob8ItnldyXmwafXmluhm/40WR/TsaeAS92hO5Xlj0XmKGBS5812UHlAZ7ekJdu5lJlJJngqfM4GPYQNO1w4rjkHeYmfuM8yM80Qcx135s5fqfidMDepeZzadQc+t5UsviII98MFy1zXAF8f5/JUikLjHgUBtxZMkByFpjgLyfFBCmVLfQDkLrHBAxth5Tcnp9tAqSj8aaNX4jJpoV0NFmv4tITTbCVnmj+9toMgHbnw2t9TPspcyt+/bQEeG+UeW/n3GJew+JlPqsdi02wNeq/ZihnZR6opOyGOZNNXy57w2S1KqK16W9+NoS0L7vf5YLso6ZNSIeqzzE9uu1EAGprZLKVpecsapfJGGoNC+8wGc/2I0ywe9Ov0KoSf1tvDjZZ/em/Vdzm4Hp4+zzz77ppJ/fvsoPJEDfrYh7g1CESIFbN2bo+dnt0MVfGhPHouK5noVdCCCGqQ2WvkTLEtA7KznPg51XyV+Np8aBzi8aVO4HWJoOTssvMg0vdDb2vObE/tBt4WBnaKBGKB4cZB2Fm9IkhiDZ/c4MeMfhEG5fLDNU8vg+mfmsyV4PvLmfO2D9NhsORV3FwN/gek01r3OrEsRM/NoFl6h6TbfNqdOr3a/U2w9i2fW8KqhQGtUe3mczi8QMwfvapg0MwQ0+7XAqbvzIZm+Jz0tZ/bLI7+dnQ+1oT9Ho3MVmjTV+Y4hsD7oDgyLLnbdoBLnrJDLWL/9oElJUJDsG8n/bDK54XWV5wCCZ7M26m+f0FRZb/ehbryQOhkwlqZ75Ks/mZ16rM+wsIM1+lRQw2GU+X0wRBpWUlm/mbjjwT+DfrfGJf54vM0GWo2jDh4Ei4eZl58HGqojxKQeMWFe/38AD/UPN1OkoH0uW9vrKc+H7siyabumsJdLigcsEhQM/JZkh18vbyh/NqDYsfMkNlp/9ecui4EMV4Wz1kiKkQQjQQEiDWQVl5Dhp5ncav5scHYcNcM/zL5Tix3eprMguFPL1MRiJpbcnj135ohiaOfcE9f+stM//sluUmS6A1/PZfk3Ea86wJDguVN2zPy//UBVeKz18sFDGo7DDHU+lyCWz+Evb/aYb8bVkA395tgsep35rMVGX1uho2fGYClOgpJlP19e0mK9pmEFz0SsnsYK+rzVdleDWqfNuzQSnoMaHmXu9saTPYzG07sgVa9Ci5z+mAORPNHNapC0sGh4VOd/6o1ftEsZf6xOptHq4s/jec/0jlj+s2Hn562Px/Y8SjZfdvX2T+7se+IMGhOClvq4U8KVIjhBANggSIdYzW2p1BrORcwBMHmrlJTcJNpsmrsZlz1ayLyfKUngvU9lxY+bopXhESZYYarv3ABJL9bjJtwvvDuyPh27vgyo9MAY5V/2cCnP43n5X3e9a0H2GGbf70sJl7lpFkColM+rT8DNXJtBloPrP1n5jP8ZvbzOc7bpaZb3i2qmuKihU+INi3omyAuPZ983Djivegdb+a71td1SQcJn5StWP8m5v/X2z83CyHUTxb6yyAnx82w0n7XH92+yoaHG+rRZa5EEKIBkLudOsYe4ELl6bMENMSXE4zdLS49ANmzlKf62DEY6bQS9fLzLCx8gpFDLzLzAn85XHz8/YfTDXEvtNOtGnZC4Y/Alu/hXfPN8Fh/1tg3Kt1b2F5m7sQyKE4854nfmIKwFQ1OATz3npdbYKTz68yweLNy0w1UAkOa0ZAGDRpY5YCKa6wQmzboWbOqThzPSebByqlP+u1H5hh6iOfLH8ushDFyBBTIYRoOORut47JyjNDQ/1PFiCufgfeGgLHdp3YdmiD+W9l5tmBKXIy8C4T/B1YY5ZACGh9osJloQF3QLthplDFsP/AmGfqbpA0bibcuw2uWWAKxZxJBcWeU8zSA/1uhhsWm2ysqFkRQ0yQ7ip207nkUTMHdOzzde8hRX3VYYyptLrlqxPbtDbLd4T1M/N7hTgFb0+LrIMohBANRB290//7ynYHiCfNIG5wVyPdV+yJ/6GNpqpmsy6Vf7EBt5uFqhfeYRbx7nNd2YIgHh5mbtO0n2Ho/XX7ptyr0ckLh1RF4xZwbzyMfa5qlT3F2RMxyFShTd5qfk6MNcN+z7m18usjilOz+ULHC8xajU730hwH15uF73tdXbf/zYs6w9tqIVcCRCGEaBAkQKxjsk4VIB7ddiJbuH/Vie2HN5plAapSTt+rEZz3L0jeZkrd9762gnb+f8+5XnJjXLvauOchJqwwxWrmTTULxQ/9V+32qyHqdjnkppolQsDMSbTYTr/CrfjbkSGmQgjRcEiAWFfk50Dq3qIAscIqphs/N5nC1v3NguqFDm0sW8yjMnpPNVnHHhNPvoC8EDUtsI0Z9rzuQ5g9xlTmvWreqSvjiqqLdBe22rzAZBE3zTdDT32a1HbPRD0hVUyFEKLhqNYAUSk1Rim1XSm1Syn1QAVtJiil4pVSW5RSn7m3DVNKxRX7siulLnXv+0AptbfYvujqfA81ZuVr8FpfLElrgAoyiC6Xe72986HzxWb5hczDZk24zIMQehoBosVqCrCUtyi8ELUtYjAc2WwCxZt+qfwcW1E1Vm/oOBa2fQs7f4KcY+ahkRCV5G2VOYhCCNFQVFtpOqWUBXgdGAkkAmuUUgu11vHF2kQBDwKDtNZpSqlmAFrr34Bod5sgYBfwU7HT/1NrPb+6+l4rclLBVUC3FXcSwuM0Km+Zi/0rTbXS4Y+Yhc/BDDMtXEz+dDKIcGbFXISoTufcBj6BcN4Dsg5fdet2OWycC4v+Bd5NIGpkbfdI1CPeVg/sDhliKoQQDUF11i7vB+zSWu8BUErNBS4B4ou1uQl4XWudBqC1PlrOecYDi7TWOdXY19rnzANPb6z5Gbxmm4WfdVzZNhs/N9UGO11o5gd5+pgAsXBoaGj3mu2zENWtRY/Tf/AhqqbdMBMYph8w6x5KcSZRBd4WD8kgCiFEA1GdQ0xbAQeK/Zzo3lZcB6CDUmqFUmqVUmpMOeeZBMwpte1/SqmNSqmXlVIN4y7GkQd+Tfmj00Oc47GV4FXPlNxfYIf4r83yDTY/k/ULizFZxcMbzSLZPoG103chRP3naYPOF5nve06q3b6I+iPzCDwRTL+0heQWONFa13aPhBBCnKHqDBDLKwFZ+srhCUQB5wGTgXeVUkVVEZRSLYDuwOJixzwIdAL6AkFAuSUNlVLTlVKxSqnY5OTk030PNcdhB4uNuMDRfOY4H+vq/zOl5gstfwns6dD7mhPbws8xweGB1TI3Swhx5obcB8MfNUWwhKgMqze4HPhiR2vId8owUyGEqO+qM0BMBFoX+zkMOFhOm2+01gVa673AdkzAWGgCsEBrXVC4QWt9SBt5wPuYoaxlaK3f1lrHaK1jmjZtehbeTjVzmCGm2XkOXvG4BuXXFL69G1xOOBIPf7xkikZEDD5xTPg5oF2QkQShEiAKIc5QUDsYcq8s8SIqz2qWVvImH0CWuhBCiAagOgPENUCUUqqtUsqGGSq6sFSbr4FhAEqpEMyQ0z3F9k+m1PBSd1YRpZQCLgU2V0vva5ojDzy9yM53oL0aw+in4FAcrH4bFt4J3o1h9NMljwnrZ5a8AJmnJYQQouZZrOBhxVvnAchSF0II0QBUW5EarbVDKXUHZnioBZittd6ilHoCiNVaL3TvG6WUigecmOqkKQBKqQhMBnJpqVN/qpRqihnCGgfcUl3voUY57ODpRabdYdZA7HYFxH0KPz4IaLj8HfALLnmMd2No3hUObzq9JS6EEEKIM2X1xQsTIEoGUQgh6r/qrGKK1voH4IdS2x4p9r0G7nV/lT42gbJFbdBan3/WO1oXOPLA5kt2ngM/L4sZ4jX2BXhjoBlW2v3K8o+LGg0FueAfWrP9FUIIUWOUUt7AMsALc+2er7V+tFSb64DngST3pte01u9We+dsvni5cgGwOySDKIQQ9V21BoiiCpx54BlEdo4TP5v71xIcCbevhkbNK54TNOzfMPR+mTMkhBANWx5wvtY6SyllBZYrpRZprVeVave51vqOGu2Z1Qebe4hpbr4EiEIIUd9V5xxEURXuOYhZeQ78vYvF7YFtTJW4inhYZL0yIYRo4NzF2bLcP1rdX3VjTQmrH1aXHUDWQhRCiAZAAsS6wmEHiylS4+cliV0hhBAlKaUsSqk44Cjws9b6r3KaXeFeJ3i+Uqp1OfvP/jJQVh+sRUNMZQ6iEELUdxIg1hWFVUzzJEAUQghRltbaqbWOxiwb1U8p1a1Uk2+BCK11D2AJ8GEF5zm7y0DZfPF0SAZRCCEaCgkQ6wr3OohZee4qpkIIIUQ5tNbHgd+BMaW2p7jXCAZ4B+hTIx2y+mJx5gASIAohREMgAWJd4cjDZbFhL3CdKFIjhBBCAEqppkqpJu7vfYARwLZSbVoU+/FiYGuNdM7qi4c7g5gny1wIIUS9J5FIXeGwk48NwCxzIYQQQpzQAvhQKWXBPNydp7X+rtTawncppS4GHEAqcF2N9Mzqg4fDZBBzJYMohBD1ngSIdYHTAdpJvrICyBBTIYQQJWitNwK9ytlefG3hB4EHa7JfANj88HC4i9RIgCiEEPWeDDGtC5xmykiedgeI3hIgCiGEqCesPlBQOAdRhpgKIUR9JwFiXeAwAaJdm8BQqpgKIYSoN6x+KJcDH4sTu0MyiEIIUd9JgFgXFJYH1zLEVAghRD1j9QEg0NMhQ0yFEKIBkACxLnBnEHMLM4hSxVQIIUR9YfMFoLGnQ4aYCiFEAyABYl3gDhBznCYwlAyiEEKIesPqB0ATz3zJIAohRAMgAWJd4B5imu00y1vIMhdCCCHqDfcQ08YyxFQIIRoECRDrAncGMdslRWqEEELUM+4hpgEWySAKIURDIAFiXeBe5iLL6YnVovDylF+LEEKIesJqAkR/S4HMQRRCiAZAIpG6wJ1BzHJ44OfliVKqljskhBBCVFJRgJgvy1wIIUQDIAFiXeCeg5jhsEgFUyGEEPWLO0D0U5JBFEKIhkACxLrAnUHMKLBIBVMhhBD1i3sOop9HnsxBFEKIBkACxLrAHSBmFnhIBVMhhBD1S1EGUYrUCCFEQyABYl3gHmJ6PN8iFUyFEELUL+4A0VdJBlEIIRoCCRDrAncGMa3AQ4aYCiGEqF88vQCFLyGHBJEAACAASURBVHkyB1EIIRoACRDrAvcyF8fzlGQQhRBC1C9Kgc0Pb/KwO5xorWu7R0IIIc6ABIh1gTuDmJqvJIMohBCi/rH64EMeWkO+U7KIQghRn0mAWBc47GgPK1l5LgkQhRBC1D9WX2zaPOy050uAKIQQ9ZkEiHWBIw88vXBpZIipEEKI+sfqi5c2BdfsDilUI4QQ9ZkEiHWBIw9t8QKgkSxzIYQQor6x+WJzuQNEqWQqhBD1mgSIdYEjD5fFBkgGUQghRD1kLR4gyhBTIYSozyRArAscdpweJoMoAaIQQoh6x+qLpztAzJUMohBC1GvVGiAqpcYopbYrpXYppR6ooM0EpVS8UmqLUuqzYtudSqk499fCYtvbKqX+UkrtVEp9rpSyVed7qBHOPJwe5m1IkRohhBD1js0XqzMXgJx8Ry13RgghxJmotgBRKWUBXgcuALoAk5VSXUq1iQIeBAZprbsCdxfbnau1jnZ/XVxs+7PAy1rrKCANmFZd76HGOPJwKAkQhRBC1FNWXyxOk0HMyZMMohBC1GfVmUHsB+zSWu/RWucDc4FLSrW5CXhda50GoLU+erITKqUUcD4w373pQ+DSs9rr2uCwU6BkDqIQQoh6yuqLxZEDQLZkEIUQol6rzgCxFf/P3p3HyVXX+f5/fWrtJd3pdNKkQwJJSMIOiRAiCCqbCDqCo1wNM14FlwxeHb3jMsLoIMN9zB303p8rXhmUTURcR42Kw6CCLLIFDEsSICFI0iSQTied3mqvz++PczopOp2k0t3V1ZW8n49HPbrq1DmnP1XdkHr353u+X9hY8rgj3FbqSOBIM3vQzB42s/NLnqszsxXh9sEQOBXodvfBf32GOycAZrYsPH5FZ2fn6F9NJeUz5CwOqIMoIiI1KF5PJB92ELPqIIqI1LJKphEbZpsP8/0XAGcCs4D7zex4d+8GDnf3TWZ2BPAHM3sa6CnjnMFG9xuAGwAWL1487D4TRj5DlkkANGqZCxERqTWJRqyQJkKR/ow6iCIitaySHcQO4LCSx7OATcPs80t3z7n7i8BzBIERd98Ufl0P3Au8DtgKtJhZbC/nrD35DBmCDmJjQh1EERGpMfF6AOrI0q9rEEVEalolA+JjwIJw1tEEsBRYPmSfXwBnAZjZNIIhp+vNbIqZJUu2nw6sdncH7gEuDo//APDLCr6G8ZFPk/Y4jYkokchwjVcREZEJLN4AQEssq1lMRURqXMUCYnid4MeBu4A1wI/dfZWZXWNmg7OS3gV0mdlqguD3WXfvAo4BVpjZk+H2a919dXjM54BPmdk6gmsSb6zUaxg3hSwDxRiT6+PVrkRERGT/hQGxNVHQJDUiIjWuouMZ3f1O4M4h264que/Ap8Jb6T5/Ak7YwznXE8yQeuDIpxmIxWhWQBQRkVqUCALilHhOy1yIiNS4Sg4xlXLlM/QXouogiohIbQo7iFNieXUQRURqnALiRJBP05dXB1FERGpUGBAnx3Na5kJEpMYpIFZbsQDFPH35iDqIIiJSmwYDYjSnZS5ERGqcAmK15TMA9OQ0xFRERIZnZnVm9qiZPWlmq8zsX4bZJ2lmPzKzdWb2iJnNGbcCw2sQm6LqIIqI1DoFxGrLpwHoLURprlNAFBGRYWWAs919IbAION/MTh2yz4eA7e4+H/gq8KVxqy5cB7E5mtU1iCIiNU4BsdoKWQAyJJhcX9FJZUVEpEZ5oC98GA9vPmS3i4Bbw/s/Bc4xs/FZXDfeCEBjJKtZTEVEapwCYrWFHcQsMSY3qIMoIiLDM7Ooma0EtgB3u/sjQ3aZCWyEnWsR7yBYL3joeZaZ2QozW9HZ2Tk2xYUdxEkRdRBFRGqdAmK1hdcgZjyuIaYiIrJH7l5w90XALGCJmR0/ZJfhuoVDu4y4+w3uvtjdF7e1tY1NceEkNQ1kSeeKFIq7fVsREakRCojVFnYQM8Q1SY2IiOyTu3cD9wLnD3mqAzgMwMxiwGRg27gUFYlArJ56C/7oOaAuoohIzVJArLZ8cA1ilrjWQRQRkWGZWZuZtYT364FzgWeH7LYc+EB4/2LgD+4+fq28eD31DAZEXYcoIlKrNCtKtamDKCIi+zYDuNXMogR/3P2xu//azK4BVrj7cuBG4DYzW0fQOVw6rhUmGqnzICBqLUQRkdqlgFhtJdcgKiCKiMhw3P0p4HXDbL+q5H4a+G/jWddrxOtJuDqIIiK1TkNMqy3sIBYiSZIx/ThERKRGxRtIFlOAOogiIrVMiaTaCsFfWxN19YzXclUiIiJjLt5AzIM/emqpCxGR2qWAWG3hENNkXX2VCxERERmFRAPxwmAHUUNMRURqlQJitYVDTBN1DVUuREREZBTi9UQLwb9pWuZCRKR2KSBWW7jMRX29AqKIiNSweCORvDqIIiK1TgGx2sIOYkODAqKIiNSweD2R3ACgDqKISC1TQKy28BrEBnUQRUSkliUasVyKeNTo1zIXIiI1S+sgVpnn0+Q9SlODJqkREZEaFq+H3AAN8SgDWuZCRKRmKSBWWS6TIkeMyfXxapciIiIycvEGwJmSKKiDKCJSwxQQqyybSZElTnO9fhQiIlLD4sGlEq2Jgq5BFBGpYboGscpymRQZEuogiohIbUsEAXFKPKdZTEVEapgCYpXls2myHqNZAVFERGpZ2EFsieXVQRQRqWEKiFVWyKbJEKe5TgFRRERq2GBAjGfVQRQRqWEKiFVWzKXIENcQUxERqW3JJgBaoll1EEVEapgCYpV5Lh1cg9iggCgiIjUsDIiTIynNYioiUsMUEKvM8xmyxJiU0CymIiJSwwYDoqW0DqKISA1TQKwyK2QoRJJEIlbtUkREREaubjIATZZiIFegWPQqFyQiIiNR0YBoZueb2XNmts7MrtjDPu8xs9VmtsrMfhBuW2RmD4XbnjKz95bsf4uZvWhmK8Pbokq+hkqzQgaPJqtdhoiIyOiEHcRGUrhDOq9hpiIitahi4xrNLAp8C3gL0AE8ZmbL3X11yT4LgCuB0919u5kdEj41ALzf3dea2aHA42Z2l7t3h89/1t1/Wqnax1O0kMGjiWqXISIiMjqxJEQTNJICoC+Tp0GXT4iI1JxKdhCXAOvcfb27Z4EfAhcN2ecjwLfcfTuAu28Jvz7v7mvD+5uALUBbBWutmkgxF/yjKiIiUuuSTTQU+wEY0FIXIiI1qZIBcSawseRxR7it1JHAkWb2oJk9bGbnDz2JmS0BEsALJZv/NRx6+lUzGzZdmdkyM1thZis6OztH90oqKOZZLF5X7TJERERGL9lEXRgQ+7XUhYhITapkQBxu1pWhV6zHgAXAmcAlwHfNrGXnCcxmALcBl7l7Mdx8JXA0cArQCnxuuG/u7je4+2J3X9zWNnGbj3HPEokpIIqIyAEg2UxysIOopS5ERGpSJQNiB3BYyeNZwKZh9vmlu+fc/UXgOYLAiJk1A78BvuDuDw8e4O6bPZABbiYYylqzEp4jmlBAFBGRA0CymUQ+7CBqqQsRkZpUyYD4GLDAzOaaWQJYCiwfss8vgLMAzGwawZDT9eH+Pwe+5+4/KT0g7CpiZga8E3imgq+hotLZHAnLE03UV7sUERGR0Us2Ec+rgygiUssqNr2Yu+fN7OPAXUAUuMndV5nZNcAKd18ePneema0GCgSzk3aZ2fuANwFTzezS8JSXuvtK4HYzayMYwroSuLxSr6HSenr7qANiSQVEERE5ACSbiOV6AXUQRURqVUXnn3b3O4E7h2y7quS+A58Kb6X7fB/4/h7OefbYV1odff19HAIkkhpiKiIiB4C6ZiK5PkAdRBGRWlXJIaayD739AwAk6hqqXImIiMgYSDZhmV7ANYupiEiN0gq248zdufe5Tp5+eQd/eeE5vgIkNcRUREQOBMkmrJijznJaB1FEpEbtMyCG1xHePriYvYycu/O/71zDd+5/EYDTmrsBmN7asrfDREREakOyGYD2RFYdRBGRGlVOB7EdeMzMngBuAu4Krx2UffjZ4x08tL6Ld580iyVzW/mXX63iew+9xPtPm82VFxxD/bbVcD3U1amDKCIiB4BkEwDTEhl1EEVEatQ+A6K7f8HM/hk4D7gMuM7Mfgzc6O4vVLrAWvaNP6zlpa4Bfvp4B62NCbb1Z1n2piO48oKjMTPIZ4IdY5qkRkREDgBhB3FqTB1EEZFaVdY1iO7uZvYK8AqQB6YAPzWzu939HytZYK16qaufl7oGuOKCo2mblORnT3TwhnlT+dhZ84NwCJBPB19jyeoVKiIiMlbCDmJrPMOrmsVURKQmlXMN4ieADwBbge8SrFWYM7MIsBZQQBzGfWu3AnDesdM5om0S7z551u47KSCKiMiBZDAgRtOs1zqIIiI1qZxlLqYB73L3t7r7T9w9B+DuReCvKlpdDbvv+U5mttQzd1rjnnfKZ4OvCogiIrIXZnaYmd1jZmvMbJWZfXKYfc40sx1mtjK8XTXcuSoqDIgtkbTWQRQRqVHlDDG9E9g2+MDMmoBj3f0Rd19TscpqkTs8+UMKm5/koRfO5B0LZ+4aTjqcrc8HX5tnjk99IiJSq/LAp939ifDf4cfDyzxWD9nvfnev3h9v6yYDMDmSpj+tDqKISC0qJyB+Gzip5HH/MNukfyv86pPw7K+JAlOyR/LmIxfu/ZiXHoSpC2DSIeNSooiI1CZ33wxsDu/3mtkaYCYwNCBWV9hBbLKUZjEVEalR5QwxtdJlLcKhpWVNbnPQ6NsC/+80WPtfcNIHADgx8hKnzZu2a5/vXwwPfn3X40IeNjwMc84Y52JFRKSWmdkc4HXAI8M8fZqZPWlmvzWz48a1MAgumYgmaLIU/boGUUSkJpUTENeb2SfMLB7ePgmsr3RhNWXzk9C/Bd5zG1zwZQpEOLtlM5Pr48Hz/V2w7m549LvBMFSAV56CTI8CooiIlM3MJgE/A/6nu/cMefoJYLa7LwS+CfxiD+dYZmYrzGxFZ2fn2BeZbGISA/Rn82jZZBGR2lNOQLwceAPwMtABvB5YVsmiak5uIPg6eRbbshGeL87i5MTGXc+/vCL4umMDbPpzcP+lB4Ovs08fvzpFRKRmmVmcIBze7u7/MfR5d+9x977w/p1A3MymDbPfDe6+2N0Xt7W1jX2hySYaSVF06NdENSIiNWefQ0XdfQuwdBxqqV25cLmKeD33r+0k53O4KF0yf0/HY2BRMIPVv4SZJ8FfHoTWedA8ozo1i4hIzbBgxrMbgTXu/pU97NMOvBquXbyE4I/AXeNYZiDZTEMx+MNpTyrHpKSuShERqSXlrINYB3wIOA6oG9zu7h+sYF21JZ8CwGNJbnzgRc5Ozieeug96X4Gm9iAgTj8OGqfBmuVwzlWw4U9w7EVVLlxERMabmc0DOtw9Y2ZnAicC33P37r0cdjrw34GnzWxluO2fgMMB3P164GLgo2aWB1LAUq/GGM9kM3X9/QD0pHMcSv24lyAiIiNXzhDT24B24K3AH4FZQG8li6o5YQfxDy/08VTHDk485U3B9s1PQbEAHY/DrFOCQLhtPTz1I0jvgNm6/lBE5CD0M6BgZvMJuoJzgR/s7QB3f8Ddzd1PdPdF4e1Od78+DIe4+3Xufpy7L3T3U939T5V/KcNINpEsBAFxx0CuKiWIiMjIlRMQ57v7PwP97n4r8HbghMqWVWPCaxC/cs8Gjmhr5E1vPCvY/sqTwVqH2d4gIB79V2AR+N3VwfNzdP2hiMhBqOjueeCvga+5+z8AB871Bskm4vk+AHq0FqKISM0pJyAO/vmv28yOByYDcypWUS3KBx3EVZ1ZPv2Wo4g1tMCUuUEHseOxYJ9ZpwRDTGefDn2vwpQ5MHlW9WoWEZFqyZnZJcAHgF+H2+JVrGds1TUTy4UBMaUOoohIrSknIN5gZlOALwDLCRbl/VJFq6oxhewAGRIcP3MyFxzfHmycsTBYyqLjMahrganzgu2D1x1qeKmIyMHqMuA04F/d/UUzmwt8v8o1jZ1kE5FsH+DsUEAUEak5e52kxswiQI+7bwfuA44Yl6pqzJZtO6jzBP/jzPlEIhZsnHEirP5FcA3irFOCGUwBjrkQfv+/4Oi3V69gERGpGndfDXwCIPwDbJO7X1vdqsZQsgkr5kiSoyetgCgiUmv22kF09yLw8XGqpWZ5doA0Cdon1+3a2L4w+LpjIxy2ZNf2pulwxUtw9NvGt0gREZkQzOxeM2s2s1bgSeBmMxt26YqalGwGoD2ZpSelaxBFRGpNOUNM7zazz5jZYWbWOnireGU1xPJp0h4nHil5O2ecuOv+rMVDDrDxKUxERCaiye7eA7wLuNndTwbOrXJNYyfZBMD0RFZDTEVEalA5q9cOrnf4sZJtjoab7mSFNGkSxKIlwW/SITCpPZiQZubJ1StOREQmmpiZzQDeA3y+2sWMubCDeEhSQ0xFRGrRPgOiu88dj0JqWSSfJk2SpuiQzuBhS2D7X6BuclXqEhGRCeka4C7gQXd/zMyOANZWuaaxE3YQ2xJZVquDKCJSc/YZEM3s/cNtd/fvjX05tSmST5H2BC2RISN2L/wGFPSPo4iI7OLuPwF+UvJ4PfDu6lU0xsKAODWW1hBTEZEaVM4Q01NK7tcB5wBPAAqIoUghTZo4sciQDmL9lOoUJCIiE5aZzQK+CZxOcMnGA8An3b2jqoWNlTAgtsYy9PZokhoRkVpTzhDTvy99bGaTgdsqVlENihQypGkgHi1nzh8RETnI3Qz8APhv4eP3hdveUrWKxlJ4WUVLNE2POogiIjVnJIlmAFgw1oXUsmghTYok0aEdRBERkd21ufvN7p4Pb7cAbdUuasyEHcRmS9ObyVMoepULEhGR/bHPgGhmvzKz5eHt18BzwC/LObmZnW9mz5nZOjO7Yg/7vMfMVpvZKjP7Qcn2D5jZ2vD2gZLtJ5vZ0+E5v2FW/TUjIoVMsMzF0ElqREREdrfVzN5nZtHw9j6gq9pFjZlYEqIJmm0AgF7NZCoiUlPKuQbx/5bczwMvlXOdhJlFgW8RDJnpAB4zs+XuvrpknwXAlcDp7r7dzA4Jt7cCXwQWE1yf8Xh47Hbg28Ay4GHgTuB84LdlvI6KiRVTZEgQ0xBTERHZtw8C1wFfJfg37k/AZVWtaKwlm2gkBUBPKk9LQ6LKBYmISLnKSTQbgEfc/Y/u/iDQZWZzyjhuCbDO3de7exb4IXDRkH0+AnwrDH64+5Zw+1uBu919W/jc3cD54bpRze7+kLs7wUQ57yyjloqKFTLBOogaYioiIvvg7hvc/UJ3b3P3Q9z9ncC7ql3XmEo20eD9AJrJVESkxpQTEH8CFEseFyiZnnsvZgIbSx53hNtKHQkcaWYPmtnDZnb+Po6dGd7f2znHV7FA1HOkPKmAKCIiI/WpahcwppLN1BWDIaY9GmIqIlJTyhliGgs7gAC4e9bMyhkrMlxaGnqleoxgwpszgVnA/WZ2/F6OLeecwTc3W0YwFJXDDz+8jHJHKJ8GIE1ck9SIiMhIHVj/gCSbSWTVQRQRqUXldBA7zezCwQdmdhGwtYzjOoDDSh7PAjYNs88v3T3n7i8STICzYC/HdoT393ZOANz9Bndf7O6L29oqODlcLrjGIhdJMgHmyxERkdp0YE31mWwinusD0FIXIiI1ppyAeDnwT2a2wcw2AJ8D/q6M4x4DFpjZ3LDjuBRYPmSfXwBnAZjZNIIhp+uBu4DzzGyKmU0BzgPucvfNQK+ZnRrOXvp+ypxRtWLCgJgtq6kqIiIHKzPrNbOeYW69wKHVrm9MJZuI5cOAqCGmIiI1ZZ9DTN39BeBUM5sEmLv3lnNid8+b2ccJwl4UuMndV5nZNcAKd1/OriC4muDaxs+6exeAmf0vgpAJcI27bwvvfxS4BagnmL20qjOYDg4xzVldVcsQEZGJzd2bql3DuKlrxjK9RExDTEVEas0+A6KZ/W/gy+7eHT6eAnza3b+wr2Pd/U6CpShKt11Vct8JLszf7eJ8d78JuGmY7SuA4/f1vcdNyRBTERERAZJNWKaH5roYPal8tasREZH9UM4Q0wsGwyFAuOzE2ypXUo0JA2JeAVFERCRQ1wLFPO31BQ0xFRGpMeUExKiZ7Uw/ZlYPKA0NyisgioiIvEZDKwAzEykNMRURqTHlLHPxfeD3ZnZz+Pgy4NbKlVRjcuE1iBFdgygiIgJAw1QAZiQGWK2AKCJSU8qZpObLZvYUcC7BOk3/CcyudGE1I+wgFhUQRUREAvVBB3F6rJ+H+3QNoohILSlniCnAK0AReDdwDrCmYhXVmsFJaqIaYioiIgLsHGI6LdqvIaYiIjVmjx1EMzuSYO3CS4Au4EcEy1ycNU611YYwIBai6iCKiIgAOzuIU62PHgVEEZGasrchps8C9wPvcPd1AGb2D+NSVS0J10EsKiCKiIgE6qcA0GJ9ZPJF0rkCdfFolYsSEZFy7G2I6bsJhpbeY2bfMbNzCK5BlFJhB7GoIaYiIiKBaAzqJtPsPQBa6kJEpIbsMSC6+8/d/b3A0cC9wD8A083s22Z23jjVN/HlUhSJQCRR7UpEREQmjvpWJhV7AehJaaIaEZFasc9Jaty9391vd/e/AmYBK4ErKl5ZrcinyViCWKzc+X5EREQOAg2tNOS7AXUQRURqyX6lGnff5u7/7u5nV6qgmpNLkSFJLKqAKCIislPDVJK5HQCayVREpIYo1YxWPk3WEsQiujxTRERkp/pWEtmwg6iAKCJSMxQQRys3QAYFRBERqRwzO8zM7jGzNWa2ysw+Ocw+ZmbfMLN1ZvaUmZ1UjVp3amglmt4OQE9a1yCKiNSKvS1zIeXIpUmTIK4hpiIiUjl54NPu/oSZNQGPm9nd7r66ZJ8LgAXh7fXAt8Ov1VHfSiTXT4KcOogiIjVEqWa08inSJIlF1UEUEZHKcPfN7v5EeL8XWAPMHLLbRcD3PPAw0GJmM8a51F0aWgGYHutXQBQRqSEKiKMVdhCjGmIqIiLjwMzmAK8DHhny1ExgY8njDnYPkeMnDIgz69KaxVREpIYoII5WboC0x4lH9FaKiEhlmdkk4GfA/3QPV6EveXqYQ3yYcywzsxVmtqKzs7MSZQbqg4B4aHxAs5iKiNQQpZrRygcdRA0xFRGRSjKzOEE4vN3d/2OYXTqAw0oezwI2Dd3J3W9w98Xuvritra0yxcLODmJ7fICelCapERGpFQqIo5VLM+CaxVRERCrHzAy4EVjj7l/Zw27LgfeHs5meCuxw983jVuRQDVOB4BrE7lS2amWIiMj+0Symo5VPkfIEMc1iKiIilXM68N+Bp81sZbjtn4DDAdz9euBO4G3AOmAAuKwKde4SDjFtiw3QtUMBUUSkViggjlYuRcrjGmIqIiIV4+4PMPw1hqX7OPCx8amoDPE6iDcwNdJHV18WdydohIqIyESmttdo5VIMFDVJjYiIyG7qW5lCH9lCkd6MrkMUEakFSjWjUciBFxjwpJa5EBERGaqhlaZwstWuPg0zFRGpBQqIo5FLAZAmTlxDTEVERF6roZXG/A4AuvoyVS5GRETKoYA4GmFAzKBJakRERHZT30oyFwTEreogiojUBKWa0cgPdhC1zIWIiMhuGlqJZ7sB6OpXB1FEpBYoII5GLg1AypMKiCIiIkM1TMXS3UQo6hpEEZEaoYA4Gvld1yBqiKmIiMgQ9a0YzmF1GV2DKCJSI5RqRiO3a4ipJqkREREZoqEVgDmNabb2q4MoIlILKhoQzex8M3vOzNaZ2RXDPH+pmXWa2crw9uFw+1kl21aaWdrM3hk+d4uZvVjy3KJKvoa9GgyIniCqdRBFREReqz4IiIcl1UEUEakVsUqd2MyiwLeAtwAdwGNmttzdVw/Z9Ufu/vHSDe5+D7AoPE8rsA74r5JdPuvuP61U7WXLB9cgpkmqgygiIjJU2EE8NDnAI7oGUUSkJlSy7bUEWOfu6909C/wQuGgE57kY+K27D4xpdWOhZB3EmDqIIiIirxUGxOmxAbo0xFREpCZUMtXMBDaWPO4Itw31bjN7ysx+amaHDfP8UuCOIdv+NTzmq2aWHO6bm9kyM1thZis6OztH9AL2qWSIaUwdRBERkdcKh5i2RfvYPpAlXyhWuSAREdmXSgbE4RKTD3n8K2COu58I/A649TUnMJsBnADcVbL5SuBo4BSgFfjccN/c3W9w98XuvritrW1kr2Bfdg4x1TqIIiIiu0k2QSRGq/XhDtsHctWuSERE9qGSAbEDKO0IzgI2le7g7l3uPnjV+neAk4ec4z3Az909V3LMZg9kgJsJhrJWR9hBTJHUMhciIiJDmUHDVJq9F4Cufk1UIyIy0VUy1TwGLDCzuWaWIBgqurx0h7BDOOhCYM2Qc1zCkOGlg8eYmQHvBJ4Z47rLV9JBjKuDKCIisrv6ViYVewDY2qvrEEVEJrqKzWLq7nkz+zjB8NAocJO7rzKza4AV7r4c+ISZXQjkgW3ApYPHm9kcgg7kH4ec+nYzayMYwroSuLxSr2GfcimKkThFIkQVEEVERHbX0Ep9bgegDqKISC2oWEAEcPc7gTuHbLuq5P6VBNcUDnfsXxhmUht3P3tsqxyFXIpiNJgjR0NMRUREhjHpEBKbnwZgq5a6EBGZ8JRqRiOfohCtB9A6iCIiIsNpmkGk71ViEaOrTx1EEZGJTgFxNHJpCoMdRK2DKCIisrumdizby6yGAl3qIIqITHhKNaORT1GIDA4xVQdRRERkN03BfHTzG/p1DaKISA1QQByNXIp8pA5A6yCKiIgMp6kdgLnJHl2DKCJSAxQQRyOXIh8NAmJck9SIiIjsLuwgHh7foQ6iiEgNUKoZjXyafDjEVMtciIiIDCPsIM6IdOsaRBGRGqCAOBq5XQFR1yCKiIgMI9kEiUm0sZ2BbIGBbL7aFYmIyF4oII5GboCcBQExrllMRUREhtfUzpRiF4C6iCIiE5xSzWjk0+TUQRQREdm7phk0ZzsB6OpXRGR/ZQAAIABJREFUQBQRmcgUEEcjlyJrWgdRRERkr5raqc+EAbFPE9WIiExkSjWjkU/vHGKqDqKIiMgeNLWTGNgCuIaYiohMcAqII+UOuQGylgC0DqKIiMgeNc3ACmma6WerlroQEZnQFBBHKh/8A5exJNGIYaaAKCIiMqxwqYs5iR46exUQRUQmMgXEkcoNAJC1pLqHIiIie9M0A4CjG/t5ZUe6ysWIiMjeKCCOVC4FQNrqiEf1NoqIiOxR2EGcV9/Lpu5UlYsREZG9UbIZqbCDmCahCWpERET2ZlIQEGfHe9ikDqKIyISmgDhSgwHR6jTEVEREKsrMbjKzLWb2zB6eP9PMdpjZyvB21XjXuFeJBqibzIxIN529GTL5QrUrEhGRPVBAHKlsEBBTJLUGooiIVNotwPn72Od+d18U3q4Zh5r2T9MMpvo2AF2HKCIygSnZjNRgB9GTGmIqIiIV5e73AduqXceoNLUzOd8FwMu6DlFEZMJSQBypMCAOoFlMRURkQjjNzJ40s9+a2XHVLmY3TTOoz2wBYFO3OogiIhNVrNoF1KydQ0wTxDSLqYiIVNcTwGx37zOztwG/ABYMt6OZLQOWARx++OHjV2FTO9H+LRhFzWQqIjKBKdmMVNhB7Hd1EEVEpLrcvcfd+8L7dwJxM5u2h31vcPfF7r64ra1t/IpsmoEVc8xrzCogiohMYAqIIzU4xNQTWgdRRESqyszazczC+0sI/n3vqm5VQ4RrIR7X1K+lLkREJjANMR2pnQExifKhiIhUkpndAZwJTDOzDuCLQBzA3a8HLgY+amZ5IAUsdXevUrnDa5oBwIL6Pn6hDqKIyISlgDhS2QGIxMkUo8SjxWpXIyIiBzB3v2Qfz18HXDdO5YxM2EGcnehh08sp3J2w6SkiIhOIel8jlUtBvIF8sah1EEVERPZl0nQAZkS7GcgW2JHKVbkgEREZjpLNSOX6IdFAvuhaB1FERGRfYklomMb04lZAayGKiExUCogjlR2AeD35gmsWUxERkXK0zqUl0wFoLUQRkYlKAXGkcimIN5IrFLUOooiISDmmzKWhbyMAm3eogygiMhFVNNmY2flm9pyZrTOzK4Z5/lIz6zSzleHtwyXPFUq2Ly/ZPtfMHjGztWb2IzNLVPI17FGuP+ggFp24hpiKiIjsW+sRRHpfpjFW0BBTEZEJqmIB0cyiwLeAC4BjgUvM7Nhhdv2Ruy8Kb98t2Z4q2X5hyfYvAV919wXAduBDlXoNe5VLQaKBQtGJapIaERGRfWs9AsM5uWmHhpiKiExQlUw2S4B17r7e3bPAD4GLRnPCcBHgs4GfhptuBd45qipHKjuwc4hpXNcgioiI7FvrXABOqO9ikzqIIiITUiUD4kxgY8njjnDbUO82s6fM7KdmdljJ9jozW2FmD5vZYAicCnS7e34f58TMloXHr+js7BzlSxnG4BDTgmYxFRERKUvrEQAsiG9VQBQRmaAqGRCHS00+5PGvgDnufiLwO4KO4KDD3X0x8DfA18xsXpnnDDa63+Dui919cVtb2/5Xvy/hENO8hpiKiIiUp2EqJJqYba/wak+aXKFY7YpERGSISiabDqC0IzgL2FS6g7t3uXsmfPgd4OSS5zaFX9cD9wKvA7YCLWYW29M5x012AOIN5ItFTVIjIiJSDjNoncv0wmaKDq/26DpEEZGJppIB8TFgQTjraAJYCiwv3cHMZpQ8vBBYE26fYmbJ8P404HRgtbs7cA9wcXjMB4BfVvA17FkuDIgFJ6YOooiISHla59KSDtZC3LxDAVFEZKKpWLIJrxP8OHAXQfD7sbuvMrNrzGxwVtJPmNkqM3sS+ARwabj9GGBFuP0e4Fp3Xx0+9zngU2a2juCaxBsr9Rr2qJCDYg7iDcEkNeogioiIlKf1COr7XyZKgY7tA9WuRkREhojte5eRc/c7gTuHbLuq5P6VwJXDHPcn4IQ9nHM9wQyp1ZML/0HbucyFAqKIiEhZWo/AijlmRbbxwpb+alcjIiJDaGzkSGSDgOjxYJKaWFRvo4iISFmmBEtdLJnczbotfVUuRkREhlKyGYmwg1iI1gFoHUQREZFyhUtdLGrcztotvVUuRkREhlJAHIkwIBZjDQBEdQ2iiIhIeZpmQDTJkfEtvNQ1oKUuREQmGAXEkQiHmOZ3dhD1NoqIiJQlEoHWucz0V8kXnZe6dB2iiMhEomQzErnBgFgPQEwdRBERkfJNmUtrJljqYu2rug5RRGQiUUAciZ3XIA4GRL2NIiIiZWs9gmTvBsA1UY2IyASjZDMSuVTwJRxiGtMkNSIiIuVrnYvlUyycnGZdpwKiiMhEooA4Etngeol8JOwgKiCKiIiUr3VwqYsdGmIqIjLBKCCORDjENBtJAhDXEFMREZHytc4DYFH9FtZv7aNY9CoXJCIig5RsRmLIJDVRdRBFRETK1zIbks0c6S+SzhV5uTtV7YpERCSkgDgS2QGwCFmPAxDXLKYiIiLli0Sg/URmDDwHwNotvVUuSEREBikgjkQuBfFG8h4MiYlpHUQREZH9M2Mhjd3PEqWgmUxFRCYQJZuRyPVDvJ58eM2E1kEUERHZTzMWYvk0ixu3KiCKiEwgCogjkUtBooF8QR1EERGREZmxEIA3Nb3MWgVEEZEJQ8lmJLL9EG8gXygC6iCKiIjst2kLIN7A62IvsW5LH+6ayVREZCJQQByJ3ADEG8iFQ0w1SY2IiMh+ikSh/QTm5V+gN51nS2+m2hWJiAgKiCMTDjEtFIMOYlRDTEVERPbfjIVM63sOo8iTG7urXY2IiKCAODLhENPczmsQ1UEUERHZbzMWEs33Mz+6hcc3bK92NSIiggLiyORS4TWIg0NM9TaKiIjst3CimvOnvsqfX1IHUURkIlCyGYnwGsR8UZPUiIiIjFjb0RBNcFp9B092dJPNF6tdkYjIQU8BcSSy/UOWuVBAFBGRyjKzm8xsi5k9s4fnzcy+YWbrzOwpMztpvGvcb9E4TD+OI4vryeSLrN7cU+2KREQOegqII5FLQby+pIOot1FERCruFuD8vTx/AbAgvC0Dvj0ONY3ejIW09qwBnCde0nWIIiLVpmSzv4oFKGQg3rhzkpq4OogiIlJh7n4fsG0vu1wEfM8DDwMtZjZjfKobhRkLiWS6WdK8QxPViIhMAAqI+ys3EHxNNFAI10GMKiCKiEj1zQQ2ljzuCLdNbLPPAOCvW9bxZ3UQRUSqTgFxf2XDgBivJ1fQEFMREZkwhvtrpe+2k9kyM1thZis6OzvHoax9mLYAmmdxGk+xaUeazTtS1a5IROSgpmSzvwY7iPFG8sXBZS7UQRQRkarrAA4reTwL2DR0J3e/wd0Xu/vitra2cStuj8xg3lnM6n6UKAWe0HIXIiJVpYC4v3K7OoiDQ0xjEb2NIiJSdcuB94ezmZ4K7HD3zdUuqizzziaW7WFx/C88rmGmIiJVFat2ATVncIhponHXEFNdgygiIhVmZncAZwLTzKwD+CIQB3D364E7gbcB64AB4LLqVDoCR5wJGO+a/Dw/2HBylYsRETm4VbT1ZWbnm9lz4ZpMVwzz/KVm1mlmK8Pbh8Pti8zsITNbFa7l9N6SY24xsxdLjllUydewm5IOYr7gRAwiCogiIlJh7n6Ju89w97i7z3L3G939+jAcEs5e+jF3n+fuJ7j7imrXXLaGVjh0EafbUzzz8g66B7LVrkhE5KBVsYBoZlHgWwTrMh0LXGJmxw6z64/cfVF4+264bQB4v7sfR7Dm09fMrKXkmM+WHLOyUq9hWDsDYgP5omuCGhERkbEw72xm9j1DQ7Gf36/ZUu1qREQOWpVMN0uAde6+3t2zwA8J1mjaJ3d/3t3Xhvc3AVuACXAlPSXLXDSSLxS1BqKIiMhYmHc25gXeNmkt/7nqlWpXIyJy0KpkQCx3PaZ3h8NIf2pmhw190syWAAnghZLN/xoe81UzS45p1ftSssxFvuhaA1FERGQszFoC8UYubnme+57vZCCbr3ZFIiIHpUoGxHLWY/oVMMfdTwR+B9z6mhOYzQBuAy5z92K4+UrgaOAUoBX43LDfvFLrPOXC9ZniwSQ1cQ0xFRERGb1YAua+kRPSK8jkC/zxuQmwRqOIyEGokulmn+sxuXuXu2fCh98Bdk5dZmbNwG+AL7j7wyXHbA4vxM8ANxMMZd1NxdZ5yvUHX8NlLmJaA1FERGRsHHMhdX0bOad+nYaZiohUSSUD4mPAAjOba2YJYCnBGk07hR3CQRcCa8LtCeDnwPfc/SfDHWNmBrwTeKZir2A4JUNMcwXXGogiIiJj5bi/huRkPtb8AH9Ys4VsvrjvY0REZExVbB1Ed8+b2ceBu4AocJO7rzKza4AV7r4c+ISZXQjkgW3ApeHh7wHeBEw1s8Ftl4Yzlt5uZm0EQ1hXApdX6jUMKzcA8QYwI18sqoMoIhNCLpejo6ODdDpd7VLGRV1dHbNmzSIej1e7FBlLiQZY+F4WrriFaOZi/vTCVs486pBqVyUiclCpWEAEcPc7CRbuLd12Vcn9KwmuKRx63PeB7+/hnGePcZn7ZzAgQrDMhSapEZEJoKOjg6amJubMmUMwwOLA5e50dXXR0dHB3Llzq12OjLWTLyX66A38TeIBfvv0sQqIIiLjTOMj91cuFfyFE4JlLjRJjYhMAOl0mqlTpx7w4RDAzJg6depB0y096Ew/DmYt4dK6P7L8yZfpHshWuyIRkYOK0s3+yvbv6iAWtMyFiEwcB0M4HHQwvdaD0smXckh2AyfmV3H7IxuqXY2IyEFFAXF/5VI7A2Ku6MTUQRQRoauri0WLFrFo0SLa29uZOXPmzsfZbHkdoMsuu4znnnuuwpVKTQgnq/nUlPu4+cG/kM4Vql2RiMhBo6LXIB6QSq5BLBSLxNVBFBFh6tSprFy5EoCrr76aSZMm8ZnPfOY1+7g77k5kD7M/33zzzRWvU2pEogFO+SBLHvgarZm38cuVR/LeUw6vdlUiIgcFtb/KUSxCsRDcsv07r0HMaYipiMherVu3juOPP57LL7+ck046ic2bN7Ns2TIWL17McccdxzXXXLNz3zPOOIOVK1eSz+dpaWnhiiuuYOHChZx22mls2bKliq9CquINn4BkE1+c9EtuuG89xaJXuyIRkYOCOojl+MVH4akf7np83LuAYJKahoTeQhGZWP7lV6tYvalnTM957KHNfPEdx43o2NWrV3PzzTdz/fXXA3DttdfS2tpKPp/nrLPO4uKLL+bYY499zTE7duzgzW9+M9deey2f+tSnuOmmm7jiiitG/TqkhjS0Yqd9jNPv/Tfq+57mD88ew7nHTq92VSIiBzylm3Ic8w6YOm/X46PfDkCh6FoHUURkH+bNm8cpp5yy8/Edd9zBjTfeSD6fZ9OmTaxevXq3gFhfX88FF1wAwMknn8z9998/rjXLBHHqR/FHrufzkZ/zxbtO5M1HtWn2cBGRClNALMcxfxXchsgVnNgerqUREamWkXb6KqWxsXHn/bVr1/L1r3+dRx99lJaWFt73vvcNu1xFIpHYeT8ajZLP58elVplg6iZjp3+S0353NU1bHue2hw7ng2do7UsRkUpSuhmFfLFITNcgioiUraenh6amJpqbm9m8eTN33XVXtUuSiW7JMrxpBl9vvIX/97tn2NqXqXZFIiIHNAXEUcgXNMRURGR/nHTSSRx77LEcf/zxfOQjH+H000+vdkky0SUasYu+xaz8S/x94Tb+711aCkVEpJI0xHQU8kXXtRAiIkNcffXVO+/Pnz9/5/IXECxwf9tttw173AMPPLDzfnd39877S5cuZenSpWNfqNSO+efA6z/KBx75Nh94fBEPLZrJafOmVrsqEZEDkgJiGe54dAOPv7R95+OLFh3KGxe0kS8UtcyFiIjIeDj3agrr7+WrW/+d99w2h+su/yuObm+udlUiIgcctb/KsPbVPh56oYuHXujiN09t5t/ufBaAXNGJa4ipiIhI5cXriL77u7TEctzBP/Hl7/6ATd2palclInLAUUAsw1XvOJYHrzibB684m0+95UhWb+5h47aBYJkLzWIqIiIyPtqPJ/Lhu5ncNIlv57/Arddfy7otfdWuSkTkgKJ0s5/eelw7AHeteoVcoahJakRERMbT9ONIXP5HMtNP4sr019hw3YX8+O4HKBa92pXJaKW6YdUvIJ+tdiUyHnwc/5t9+XF4vsKzZhcLwe/wAUDXIO6nw6c2cHR7E/+16tVgFlNdgygiIjK+GqfSvOw39N37Dd7wwJfhgXfyyz+/k/nnfogTXncqmP5trqjO5+DlJ+C4v4Z43ejPl+2HR/4dHvw6pLvhxKXw19dPjJ/jtheh+VCIJUd/ru0vwQu/hylzYNpRwbbNK+GVZ2Dum2D2abv2zWeh49Fgv0ltez/v1rXw5B1w1Ntg1uK979u9Adb9Dg57PUzfjzVzU93w2Hdgzpvg8Nfvfd98FrrWQn0rNM/Y/fmNj8Hv/wU2PASTZwXvxxFnwakf3fv73Pk8PPkD2LYe2o6B9uNh9unQ0Lprn3QPrLsbDjsVJs8MQugj/w7/9Xko5uHim+D4d5f/usv1wj3wn1dA57MwYyEceT4c+06YfuyufXo2w1M/grrJMG1BeT/bKlFAHIHzjmvnm39YC0BMs5iKiIiMv2icSed8Gl/8Xjb+6NNctOmnRJb/hJfvPJzcgvOZvuh86o84fWwCzERRLMDGR6B1HjRNr04Nf3kA7rgEMj1w91XBh/qj3w6xuuA26ZDyg507PPMzuOvz0PcKLHgrtM6FR64PPtyfc1WwX38X5Pqh5fBdx3WsCD5s51IQjUNdM5zwniA0QPBh/KHroOdlOORYOOQYmPNGqG95bQ35DEQTu9f8yjNw77/Bs78OPvAv/UEQZobaujZ4T/o7oe9VSG0PwlS2H444E5YsCwLMk3fAnf8I2d7h34v7vgzv+Aa87m+D2n/8/iAgQhAk5pwehKHZbwgCa7EIqW1w//8Hj94QhJ/7vwKnfBjO+ecghAwa2AZP/Rie/gm8vCLYZtHgZ3fmlZCc9NqfSX8n5AageSZEYrD6l/DbfwxeH8DCv4Fzr37t72CqGx7+Njz/W9iyBgphF7j1CDj8NKifsuv9WnsXNLbBKR+B/i3QtQ5+90VYeTu8/SvQfkIQtDqfg97Nwe2Vp4MuoEWh5TBYvRzw4Hdu4VJY/MEgpD34teBnYFE46oLgZ7vqP4LwnNoOP78cJrUH7+egQj74PenbEvwuJRqD80ZiEIlCYhIkGl7783KH7S8GfyhZ9fPg92TKHHjjZ4Lfh/v+D/zxSzD3zcHPZOMj8Nh3IZ9+7XnmvBEWXhL8NzT0d3Pw+7z0J3jk23DB/xk+cFeA+Xi2d6tk8eLFvmLFijE736pNO3j7N4Lp2D9x9nw+dd5RY3ZuEZGRWLNmDcccc0zVvn9XVxfnnHMOAK+88grRaJS2tuAvo48++iiJRKKs89x000287W1vo729fZ/7Dveazexxd9/Hn9Bl0Fj/+1hNqW2bePw/v0fy+eUs8meJW4EsCXrrDsWaDqG+9VDqpszEmmfApOmQbApu9a3BB85kU/CB9Mkfwtr/gpknw+svf20H4DXfsBs2Pxl0gPLZ4EN2shkOORqmH797J6RYCMJMLhV8+M6ng/0b2yC6h7/XF3KQ6Q26aquXw2M3wo4NwQfXY94RfCiefXrwIXZPcqkgGDz14+CD7rT5YcBsD7735FnB/XKs+TX89IPBB+GzvwCP3xJ0xEo1zww+jB91AbQdHZ7bgg/8Gx6C/q3h954WBJv198KMRXD+tUEHzR1+9Ul44lZY8ndBJ2r9H8ELwfed+6bwfX8S4g1B8CjkgveokA2eb50HK38QhKbJs6D7paC2eAOc+F5Y9DfBB/unfgSbnoBYfRBs6yYHQbFYgFefCX4+C5fCyjsgXg/v/X7QPSvkgu//wFfh2d8A4WfpuhZomBp80LdoEPDiDUHg2fhI8LO64EtBUOl8Lnithy6Cltnw87+D9ffA4g8FYSPTB+ddE3z9ywOw4eFd4TKa2BXALAInvR9O/2TQKXvk34PvP2NR0KUa2AZrfgWFTFDHce+C+ecGYeWJW4P/FqbMCX5G+VTQMc307Dp34yFBeG8/Mah97d3wp28GNcw5I/iZFfJBGE93B4Hn0NcF+/e9GoSbjkeD38PBn8Hrl8HrP/raYLr2bvjNp3f9rEo1TIMps4OO9QnvCYJptj8I8StvD/6bLWSCfee/BU69HF68H/78fRjogrM+D2/8dFDfTW8N6jr1Y8HP+JWnoHtj8Pu1N8nJwfe1SPDfZKo7+KMFQKIJ3vgPwTkH/yDV3wV//h48cgP0bgqOO3EpvOkzwXvXtQ42PgpP/TDoiA6ep2l68IeQ1nnBHwLWLIdNfw7+P3XxjTDv7L3XuQ/l/hupgDgC7s4ZX7qHl7tT/MO5R/LJcxeM2blFREai2gGx1NVXX82kSZP4zGc+s9/HnnHGGVx33XUsWrRon/sqII7egRQQB6VzBf68diMdK39HbMMD1A1sYirdHEI37baNOssNe1w2NolEvo8iEba2nEhrzxpixQw7WhdikQiJ7HaiuX4inse8SCSzY481eDRBccpcIoUslumFbN/unYNBgx/Cp84PPtA3ToNXVwUBpOfl1+47+4wgDGx+MvhgnO4OAsmC84IuV9cLQRDL9AQfKOua4cX7ILUdn3YURKJY1wu7PkwPap4VDE1sOyoIOXWTgw/BPR3Qswl6Xw26ONtfhENPgr/9yc5hff7K07DlWayQDV7ni/fBut8HYQOCMBurC54bKtkcdAkXf/C1IbeQhx/9LTz/n0F4Of7dwXu0/t4gLE2eBUs+HIS9ZFNwzMA2eOJ7QejsezUIgW/8dHB8pi/oQP35+0FYHnz97ScEQwFzqaB7NBiMIAg4p340eJ1bnoUfXhKEp0g0CJ4QvE9L/i7o+jUdCrEhfwjb8mwwbPaFPwTnesPf7znMF3Kw/BPBEMrWebD09qDrWfqevPo0/OXBoMMXrw/e1/nn7uqaAnQ8HrwHW58LunUWhRPfAyd/IHi9pTY8EtSX7QM8CC6tRwS/i/GGIKx1bwgC3ykf2fWHjK3r4KFvBrV0BSPqWPBWOPvzwe/hSGUHYMVNUMwFXd+2o4Z/X4fq6wy6eO0nDBmmm4H0jiD8D9r+UhASezfDlLlBvVPnB6GsqT342WYHgt/fYiEIjume4Heqd3MQ6pNNu/4YdOhJwc8pGh++tkIu6Gy2HhH8cWYo9yAobngo+B49m4L3veuF4Pdx6gI47X8E4XJoF3MEFBBLVOIfwGt+tZqbHnyRz771KD521jA/cBGRcTSRA+Ktt97Kt771LbLZLG94wxu47rrrKBaLXHbZZaxcuRJ3Z9myZUyfPp0PfehDzJw5k/r6+n12HhUQR+9ADIhDpXMFVm3qYfWmHXRsH2D7tq2kt28m1ddNZqCHyYVuDrUuDrWtbPDpLC+8gU5aaKGXS6L38JboCvq9jm4m0esN5IgGIdIn87QfwbrYfLpyCeo9RYv1cYxtYGFkPXNtMwMkSVkDmUgDKZKkSJKzBLloPR5NMiWaZoZtZzpbac93MCO3kYZiH5uis3ghNo+Xo7PIRBvJxRrZUHc0r9TNIx41YtEI9WQ5se8Bjun9E0f3PUJjsZcBq+cFDmN7sZEp1sdk+niew7k5fx4P5o8mGonQkjTm1/UwryHFnPp+5tirzE6t4dC+p2lKb37Ne5ezBD3xafQlDiGdnMaOhtn8furf0pmJ0tmXoWN7ipe7U+QLRZrq4jTVxWhrSjK7yTjZnmVKdjPNmc3E8gP8JXkkz8WPoyvWxnTr5hDbzo6Gw8nEp2BAKlegP5NnIFsgHovQEC0wyzdTmHIkU5vqqI9HyRaK5PIFckUnV3Cy+SLuPti/IxoxEpEijZajsXkKUxriRMzozeTpSeV4cWs/HR0bObTzfl5pPJpI+7EcNqWBxkSU+kSUZDxKMhYJb8H9RCyCAZF0N9PX3EIqnaY7H6OLFl5sP4/6xhYmN8Rpa0rSNilJXTwKOEWHojvFYvA1nSswkC2QC9fQjkcjbO3L8MzLO1i1qYdC0WmblOD1vpL8jJOZNu0Q2ifXUReLkohFgp97JEIkAhEziu64w0C2QE86R08qR1/4/uWLziFNSWY0J5k6KUF9Ik5dPEKu4PSkcvSkc7gH71csYiTC1xuJQH+mQF86T28mR+r/b+/+g+wq6zuOv7/33F/7MwkhBMwGEm1EoyBIqlQ71sEIAS04o45YZ0R+NFOmVup0WqHOyFDrVMZOi1Eqg4hAxwEsSE0dEYFqnYIiiDQEMRIVYTGQzZJlN/vj/jrf/nGeTS7hLrvs7t275+7nNXNm7zn33LPP9z733u99znOe55ZrjJZrRGb0FLN0F7OUKjEjExVGy1UyZhTL++n2A3S96nWs6ing7jz9/Dj9+8co5CLWLO+gb0UHPcUsxWxEpsHcHdVaTM2dyIwoY1jd5b4TlSS+0VLSw2ehnnNR8pzU3ClVYiYqNQZHyzw3PMHggTJRxpJ6zGXoyGXpzEd0F7Os7MqzsghdmTI2edlrnXI15vnRMqVqjdW9RYq5CHenf/84v9gzjAGre4us6ilgBpWqU41jctHk/4roymcbxvmKuOPj+4kLy3CSY2XM5nzcmeZIjUGcpTPesJrr7/utfgdRRBafOy9NzpbPp6NPgDM//4oftnPnTu644w7uv/9+stksW7du5ZZbbuE1r3kN+/bt49FHk3IODQ2xfPlyvvSlL824B1FkJoq5iFOOW8Epx730y6C7M16pMV5OvrwD/HmUIcoY5VrMWOk9jJVrFKoxy6s1CuUao+UqIxNVuks1TipX2VCu0ZWPWNVbZGVXnkot5kCpyu/Cl/Wxco1SpXbwS281jpMvtNWYp8pV7gv7GcmX30KmhmXz5LNJT1OlGicNo0pMZSLGkykdAAAPDklEQVRpjNVipxo798Unk8mcTL54MauiUXLLVnP0sg66i1mqNadSS764nljMcmouYqJaY3i8yv6xMk8MT/CjwQn2HXg1E5Vk0pEMMT2McWR2nGq2i2F6ydQylEZiRgeruEPX7wZY0ZVnZXeBjcf08u6Nq8lHGUYmKrwwXmHgQIkdeye4d3gNsIYoShohhWxEMZfBrMTPy3nGyiup1g7gHCB2pyOXfIHvyEVUajGlasxoqcjQ+K+nnexysj3x4v2eavBayHD86h46X/sBxkbLPPnMC9y181mqM54B91DvVHchy+iv9uC+52X2n14+m+H1R/eQz2Z4bM8IPxzp48DOZ4Bnpn3s4nLfjPbKZzNkM0ZkhpM0AA9//s2SxhBArYmzE+ej5ARApu71M1KqvmifI7vzlKsxwxPVBkdoLGPQU8zRXchSyGUohvdyNY6pxocawpPL5GdDHDu12BkrVxkaTxr99eHfcP4f8s7jj2r0L+edGoiztGndEVzw9vX8yWsXpqJERNLonnvu4cEHH2TTpuSE5fj4OGvXruWMM85g165dXHLJJZx11lmcfvrpLS6pLEVmRmc+S2c+y8pWF6aFarEzWk6+AHfls0QNeinck0ZpboEn56vFzv6xMhOVGvkoQzZ8qc9FRi6TeVGPShw7lThmohwzNF5m/1iF2J3eYpbuQtLL1yi2Si3pgRqv1ChVYkrVGqVqTDksAJnwZX5Vd4GjegsUshFxeN72jyYN44GREqVqjYwZZhBZ8uU/Y9CRj+jIReSiDNU4abz3FnNsWN39kud0eKLCnqEJnhueOFiOSjgxUIud2D1pQFlSX8s6cgd7+DrzEZEZe0dK/H5onMHR5LmbqMTkIqO3I0dvMUeUMWpxfLAntlRNevF6Clm6C1m6Clm6ChGd+Yhq7IxMVDkwUaWQy9BbzNGZjw7Wz3ilxsBIib0jyaW7a1d00reig3Itpn//GM8MTTBaqjJerjFRrRGHExyGUcxlKOaiUJ4kPvdDPbBdhSy9HTm68hFmSSNu8gRJuRqTCT2FxVzEEZ15VvcWWNldIPbk/sl6HS/XGJmoMjha5vnREgdKtYP1G9edWTiiK8+R3QXy2Qx7hpIe8kzG2HhMLxtf1Us2Y+wdLjFwoIQBuboTSuVqzFi5yvB4lRfGk17Wyd5NM8hmMkSRJe+l2qG6jD0ZxRpZ0jvamc+yvDPHso7cwdeGAetWds3jO+vlqYE4S1HG+MyfTjFwXUSklWbR09cs7s4FF1zAZz/72Zfct2PHDu688062bdvG7bffzrXXXtuCEopIlDF6i1OMoQrMrCVXTUUZ48jumf3ERCZjFDIRhWzEss4cx82w1Z9crpihZ5rnoNH/Sy6tzXHsyrmPD5vUW8zRe3SO44/umfUxjuot8sY1y6bfscleu3r2MUjr6DcaRESkaTZv3sw3v/lN9u3bBySznT711FMMDAzg7nzwgx/kiiuu4OGHHwagp6eHkZEppoEXERGRplMPooiINM0JJ5zA5ZdfzubNm4njmFwuxzXXXEMURVx44YW4O2bGlVdeCcD555/PRRddNKNJakRERGT+aRZTEZE2sJhmMV0oS20WUzPbAnwRiIDr3P3zh93/MeALHJrd4svuft3LHVP5UURk6dAspiIiIm3CzCLgauDdQD/woJltd/dfHLbrre7+8QUvoIiItA2NQRQREVn83gLsdvffuHsZuAU4p8VlEhGRNqQGooiIyOK3Bni6br0/bDvc+81sh5ndZmZrF6ZoIiLSTpraQDSzLWa2y8x2m9mlDe7/mJkNmNkjYbmo7r7zzOyJsJxXt/0UM3s0HHObTf66pIjIErcUxpRPWkqxBo1y3eFPwn8B69z9ROAe4MaGBzLbamYPmdlDAwMD81xMERFJu6Y1EOvGS5wJbAQ+bGaNfjjwVnc/KSzXhcceAVwOvJXksprLzWxF2P8rwFZgQ1i2NCsGEZG0KBaLDA4OLomGk7szODhIsVhsdVEWUj9Q3yPYB/y+fgd3H3T3Ulj9KnBKowO5+7XuvsndN61ataophRURkfRq5iQ1B8dLAJjZ5HiJwwfUN3IGcLe7Px8eezewxcx+CPS6+4/D9puA9wF3zn/xRUTSo6+vj/7+fpZKj1CxWKSvr6/VxVhIDwIbzGw9ySyl5wJ/Vr+DmR3j7nvC6tnA4wtbRBERaQfNbCA2Gi/x1gb7vd/M3gH8Cvikuz89xWPXhKW/wXYRkSUtl8uxfv36VhdDmsTdq2b2ceAukp+5uN7dHzOzfwAecvftwCfM7GygCjwPfKxlBRYRkdRqZgNxpuMlbnb3kpn9Bcl4idNe5rEzOWbyz822klyKyrHHHjvTMouIiCxK7v5d4LuHbftM3e3LgMsWulwiItJemjlJzVzGS0z12P5we8pj1h1bYyxERERERERegWY2EA+OlzCzPMl4ie31O5jZMXWr9eMl7gJON7MVYXKa04G7wtiKETM7Ncxe+lHg202MQUREREREZMmwZs54Z2ZnAVdxaLzE5+rHS5jZP5E0DCfHS1zs7r8Mj70A+PtwqM+5+9fD9k3ADUAHyeQ0f+XTBGFmA8Dv5hjOkcC+OR5jMVN86dXOsYHiS7uFju84d9dlIzM0T/kR2vt13M6xgeJLO8WXXq2IbUY5sqkNxHZiZg+5+6ZWl6NZFF96tXNsoPjSrt3jk0Q713M7xwaKL+0UX3ot5tiaeYmpiIiIiIiIpIgaiCIiIiIiIgKogfhKXNvqAjSZ4kuvdo4NFF/atXt8kmjnem7n2EDxpZ3iS69FG5vGIIqIiIiIiAigHkQREREREREJ1ECcATPbYma7zGy3mV3a6vLMhZmtNbMfmNnjZvaYmV0Sth9hZneb2RPh74pWl3UuzCwys5+b2XfC+nozeyDEd2v4bc5UMrPlZnabmf0y1OMftVP9mdknw2tzp5ndbGbFNNefmV1vZnvNbGfdtob1ZYlt4bNmh5m9uXUln94UsX0hvDZ3mNkdZra87r7LQmy7zOyM1pRa5lM75UdYGjlS+THVdaf8mJL8COnOkWogTsPMIuBq4ExgI/BhM9vY2lLNSRX4G3d/PXAq8JchnkuBe919A3BvWE+zS4DH69avBP41xLcfuLAlpZofXwS+5+6vA95EEmdb1J+ZrQE+AWxy9zeS/IbquaS7/m4Athy2bar6OhPYEJatwFcWqIyzdQMvje1u4I3ufiLwK+AygPA5cy7whvCYfwufr5JSbZgfYWnkSOXHFFJ+TF1+hBTnSDUQp/cWYLe7/8bdy8AtwDktLtOsufsed3843B4h+fBcQxLTjWG3G4H3taaEc2dmfcB7gOvCugGnAbeFXVIbn5n1Au8Avgbg7mV3H6KN6g/IAh1mlgU6gT2kuP7c/UfA84dtnqq+zgFu8sRPgOVmdszClPSVaxSbu3/f3ath9SdAX7h9DnCLu5fc/bfAbpLPV0mvtsqP0P45UvkxvfEFyo8pyY+Q7hypBuL01gBP1633h22pZ2brgJOBB4DV7r4HkgQJHNW6ks3ZVcDfAXFYXwkM1b0h01yHrwYGgK+HS4SuM7Mu2qT+3P0Z4J+Bp0gS3wvAz2if+ps0VX212+fNBcCd4Xa7xSZtXqdtmiOVH1Nad8qPbfl5s2hzpBqI07MG21I/9auZdQO3A3/t7sOtLs98MbP3Anvd/Wf1mxvsmtY6zAJvBr7i7icDo6T0cplGwliDc4D1wKuALpLLSg6X1vqbTtu8Vs3s0ySX631jclOD3VIZmxzUtnXajjlS+THdlB/b6rW66HOkGojT6wfW1q33Ab9vUVnmhZnlSBLfN9z9W2Hzc5Nd9eHv3laVb47eDpxtZk+SXO50GskZ0+XhkgxIdx32A/3u/kBYv40kIbZL/W0GfuvuA+5eAb4FvI32qb9JU9VXW3zemNl5wHuBj/ih31Jqi9jkRdqyTts4Ryo/prfuQPmxbT5v0pAj1UCc3oPAhjBLVJ5kAOn2Fpdp1sJ4g68Bj7v7v9TdtR04L9w+D/j2QpdtPrj7Ze7e5+7rSOrqv939I8APgA+E3dIc37PA02Z2fNj0LuAXtEn9kVw6c6qZdYbX6mR8bVF/daaqr+3AR8NsbacCL0xeapMWZrYF+BRwtruP1d21HTjXzApmtp5kooGftqKMMm/aKj9Ce+dI5UcgxfGh/Jj6/AgpypHurmWaBTiLZKahXwOfbnV55hjLH5N0We8AHgnLWSTjEO4Fngh/j2h1Wech1ncC3wm3X03yRtsN/AdQaHX55hDXScBDoQ7/E1jRTvUHXAH8EtgJ/DtQSHP9ATeTjBepkJwhvHCq+iK5xOTq8FnzKMlsdS2P4RXGtptkHMXk58s1dft/OsS2Cziz1eXXMi+vgbbJjyGeJZEjlR9bX9ZZxqf8mJL8+DLxpSJHWiiQiIiIiIiILHG6xFREREREREQANRBFREREREQkUANRREREREREADUQRUREREREJFADUURERERERAA1EEUWPTOrmdkjdcul83jsdWa2c76OJyIislCUH0WaI9vqAojItMbd/aRWF0JERGSRUX4UaQL1IIqklJk9aWZXmtlPw/IHYftxZnavme0If48N21eb2R1m9n9heVs4VGRmXzWzx8zs+2bW0bKgRERE5kj5UWRu1EAUWfw6DruE5kN19w27+1uALwNXhW1fBm5y9xOBbwDbwvZtwP+4+5uANwOPhe0bgKvd/Q3AEPD+JscjIiIyH5QfRZrA3L3VZRCRl2FmB9y9u8H2J4HT3P03ZpYDnnX3lWa2DzjG3Sth+x53P9LMBoA+dy/VHWMdcLe7bwjrnwJy7v6PzY9MRERk9pQfRZpDPYgi6eZT3J5qn0ZKdbdraGyyiIikn/KjyCypgSiSbh+q+/vjcPt+4Nxw+yPA/4bb9wIXA5hZZGa9C1VIERGRBab8KDJLOhMisvh1mNkjdevfc/fJqbwLZvYAycmeD4dtnwCuN7O/BQaA88P2S4BrzexCkjOhFwN7ml56ERGR5lB+FGkCjUEUSakwxmKTu+9rdVlEREQWC+VHkbnRJaYiIiIiIiICqAdRREREREREAvUgioiIiIiICKAGooiIiIiIiARqIIqIiIiIiAigBqKIiIiIiIgEaiCKiIiIiIgIoAaiiIiIiIiIBP8P+MbIib8alNwAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 1080x432 with 2 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "# plot the accuracy and loss\n",
    "plt.figure(figsize=(15,6))\n",
    "plt.subplot(1,2,1)\n",
    "plt.plot(history.history['acc'])\n",
    "plt.plot(history.history['val_acc'])\n",
    "plt.title('Model accuracy')\n",
    "plt.ylabel('Accuracy')\n",
    "plt.xlabel('Epoch')\n",
    "plt.legend(['Train', 'Test'], loc='lower right')\n",
    "plt.subplot(1,2,2)\n",
    "plt.plot(history.history['loss'])\n",
    "plt.plot(history.history['val_loss'])\n",
    "plt.title('Model loss')\n",
    "plt.ylabel('Loss')\n",
    "plt.xlabel('Epoch')\n",
    "plt.legend(['Train', 'Test'], loc='upper right')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "From above all data we can see that our DNN model improved a lot with much better accuracy than previous model with all features. Also our Random forest model accuracy imroved a bit. With this we can do our final prediction."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Now, we will run our model on entire dataframe, to check the prediction and actual results of all the stocks in our dataset."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>year</th>\n",
       "      <th>recession_prob</th>\n",
       "      <th>treasury_rate</th>\n",
       "      <th>STOCK_TIKR</th>\n",
       "      <th>CASH_CASH_EQUIVALENT</th>\n",
       "      <th>SHORT_TERM_INVESTMNET</th>\n",
       "      <th>CASH_SHORT_TERM_INVST</th>\n",
       "      <th>RECEIVABLES</th>\n",
       "      <th>INVENTORIES</th>\n",
       "      <th>TOTAL_CURRENT_ASSETS</th>\n",
       "      <th>...</th>\n",
       "      <th>CONSOLIDATED_INCOME</th>\n",
       "      <th>EARNINGS_BEFORE_MARGIN</th>\n",
       "      <th>NET_PROFIT_MARGIN</th>\n",
       "      <th>sector</th>\n",
       "      <th>mktCap</th>\n",
       "      <th>AVG_DAILY_RET</th>\n",
       "      <th>ANNUAL_RETURN</th>\n",
       "      <th>ANN_RET_SnP</th>\n",
       "      <th>TREND</th>\n",
       "      <th>Prediction</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>2009</td>\n",
       "      <td>0.120000</td>\n",
       "      <td>3.266284</td>\n",
       "      <td>FORD</td>\n",
       "      <td>20,103,502.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>20,103,502.000000</td>\n",
       "      <td>3,259,462.000000</td>\n",
       "      <td>666,485.000000</td>\n",
       "      <td>222,000.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>-1,394,125.000000</td>\n",
       "      <td>-0.062700</td>\n",
       "      <td>-0.079900</td>\n",
       "      <td>3</td>\n",
       "      <td>9310993.35</td>\n",
       "      <td>0.000825</td>\n",
       "      <td>35.135072</td>\n",
       "      <td>23.450000</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>2009</td>\n",
       "      <td>0.120000</td>\n",
       "      <td>3.266284</td>\n",
       "      <td>FORTY</td>\n",
       "      <td>100,205,000.000000</td>\n",
       "      <td>58,009,000.000000</td>\n",
       "      <td>158,214,000.000000</td>\n",
       "      <td>130,237,000.000000</td>\n",
       "      <td>2,439,000.000000</td>\n",
       "      <td>313,365,000.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>33,030,000.000000</td>\n",
       "      <td>0.058300</td>\n",
       "      <td>0.040600</td>\n",
       "      <td>9</td>\n",
       "      <td>969517079.96</td>\n",
       "      <td>-0.000680</td>\n",
       "      <td>-21.979784</td>\n",
       "      <td>23.450000</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>2009</td>\n",
       "      <td>0.120000</td>\n",
       "      <td>3.266284</td>\n",
       "      <td>FSI</td>\n",
       "      <td>2,126,150.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>2,126,150.000000</td>\n",
       "      <td>1,544,364.000000</td>\n",
       "      <td>2,796,307.000000</td>\n",
       "      <td>6,588,174.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>-743,441.000000</td>\n",
       "      <td>-0.023500</td>\n",
       "      <td>-0.076000</td>\n",
       "      <td>1</td>\n",
       "      <td>28726435.92</td>\n",
       "      <td>0.001973</td>\n",
       "      <td>105.361674</td>\n",
       "      <td>23.450000</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>2009</td>\n",
       "      <td>0.120000</td>\n",
       "      <td>3.266284</td>\n",
       "      <td>FTFT</td>\n",
       "      <td>14,404,500.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>14,404,500.000000</td>\n",
       "      <td>27,621,753.000000</td>\n",
       "      <td>4,925,625.000000</td>\n",
       "      <td>48,487,920.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>16,404,600.000000</td>\n",
       "      <td>0.358100</td>\n",
       "      <td>0.256400</td>\n",
       "      <td>4</td>\n",
       "      <td>11250795.92</td>\n",
       "      <td>0.001264</td>\n",
       "      <td>58.592164</td>\n",
       "      <td>23.450000</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>2009</td>\n",
       "      <td>0.120000</td>\n",
       "      <td>3.266284</td>\n",
       "      <td>GBR</td>\n",
       "      <td>155,000.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>155,000.000000</td>\n",
       "      <td>203,000.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>925,000.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>-2,210,000.000000</td>\n",
       "      <td>-0.539300</td>\n",
       "      <td>-0.539300</td>\n",
       "      <td>5</td>\n",
       "      <td>3324780.00</td>\n",
       "      <td>0.000345</td>\n",
       "      <td>13.400058</td>\n",
       "      <td>23.450000</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 85 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "   year  recession_prob  treasury_rate STOCK_TIKR  CASH_CASH_EQUIVALENT  \\\n",
       "0  2009        0.120000       3.266284       FORD     20,103,502.000000   \n",
       "1  2009        0.120000       3.266284      FORTY    100,205,000.000000   \n",
       "2  2009        0.120000       3.266284        FSI      2,126,150.000000   \n",
       "3  2009        0.120000       3.266284       FTFT     14,404,500.000000   \n",
       "4  2009        0.120000       3.266284        GBR        155,000.000000   \n",
       "\n",
       "   SHORT_TERM_INVESTMNET  CASH_SHORT_TERM_INVST        RECEIVABLES  \\\n",
       "0               0.000000      20,103,502.000000   3,259,462.000000   \n",
       "1      58,009,000.000000     158,214,000.000000 130,237,000.000000   \n",
       "2               0.000000       2,126,150.000000   1,544,364.000000   \n",
       "3               0.000000      14,404,500.000000  27,621,753.000000   \n",
       "4               0.000000         155,000.000000     203,000.000000   \n",
       "\n",
       "       INVENTORIES  TOTAL_CURRENT_ASSETS     ...      CONSOLIDATED_INCOME  \\\n",
       "0   666,485.000000        222,000.000000     ...        -1,394,125.000000   \n",
       "1 2,439,000.000000    313,365,000.000000     ...        33,030,000.000000   \n",
       "2 2,796,307.000000      6,588,174.000000     ...          -743,441.000000   \n",
       "3 4,925,625.000000     48,487,920.000000     ...        16,404,600.000000   \n",
       "4         0.000000        925,000.000000     ...        -2,210,000.000000   \n",
       "\n",
       "   EARNINGS_BEFORE_MARGIN  NET_PROFIT_MARGIN  sector        mktCap  \\\n",
       "0               -0.062700          -0.079900       3    9310993.35   \n",
       "1                0.058300           0.040600       9  969517079.96   \n",
       "2               -0.023500          -0.076000       1   28726435.92   \n",
       "3                0.358100           0.256400       4   11250795.92   \n",
       "4               -0.539300          -0.539300       5    3324780.00   \n",
       "\n",
       "   AVG_DAILY_RET  ANNUAL_RETURN  ANN_RET_SnP  TREND  Prediction  \n",
       "0       0.000825      35.135072    23.450000      1           1  \n",
       "1      -0.000680     -21.979784    23.450000      0           0  \n",
       "2       0.001973     105.361674    23.450000      1           1  \n",
       "3       0.001264      58.592164    23.450000      1           1  \n",
       "4       0.000345      13.400058    23.450000      0           0  \n",
       "\n",
       "[5 rows x 85 columns]"
      ]
     },
     "execution_count": 48,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "Y_prediction_full = random_forest.predict(X)\n",
    "\n",
    "ML_Portfolio_All_statemens.dfmerge_param['Prediction'] = Y_prediction_full\n",
    "ML_Portfolio_All_statemens.dfmerge_param.head()\n",
    "ML_Portfolio_All_statemens.dfmerge_param.to_csv('Prediction_All_stmt_few_features.csv')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "After predicting the dataset we will filer 'UP trend' (TREND=1) stocks only for mid-size companies (having market cap at least 1 billion) into consideration to finalize our ML or AL driven portfolio, to reduce the risk of investors by avoid penny and small stocks."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\Sudip\\Anaconda3\\lib\\site-packages\\ipykernel_launcher.py:2: FutureWarning: convert_objects is deprecated.  To re-infer data dtypes for object columns, use Series.infer_objects()\n",
      "For all other conversions use the data-type specific converters pd.to_datetime, pd.to_timedelta and pd.to_numeric.\n",
      "  \n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>STOCK_TIKR</th>\n",
       "      <th>year</th>\n",
       "      <th>mktCap</th>\n",
       "      <th>Prediction</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>FORD</td>\n",
       "      <td>2009</td>\n",
       "      <td>9,310,993.350000</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>FORTY</td>\n",
       "      <td>2009</td>\n",
       "      <td>969,517,079.960000</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>FSI</td>\n",
       "      <td>2009</td>\n",
       "      <td>28,726,435.920000</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>FTFT</td>\n",
       "      <td>2009</td>\n",
       "      <td>11,250,795.920000</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>GBR</td>\n",
       "      <td>2009</td>\n",
       "      <td>3,324,780.000000</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "  STOCK_TIKR  year             mktCap  Prediction\n",
       "0       FORD  2009   9,310,993.350000           1\n",
       "1      FORTY  2009 969,517,079.960000           0\n",
       "2        FSI  2009  28,726,435.920000           1\n",
       "3       FTFT  2009  11,250,795.920000           1\n",
       "4        GBR  2009   3,324,780.000000           0"
      ]
     },
     "execution_count": 49,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df_ml_port_1 = ML_Portfolio_All_statemens.dfmerge_param[['STOCK_TIKR','year','mktCap','Prediction']].copy()\n",
    "df_ml_port_1['mktCap'] = df_ml_port_1['mktCap'].convert_objects(convert_numeric=True)\n",
    "df_ml_port_1.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 50,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>STOCK_TIKR</th>\n",
       "      <th>year</th>\n",
       "      <th>mktCap</th>\n",
       "      <th>Prediction</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>136</th>\n",
       "      <td>CMCSA</td>\n",
       "      <td>2009</td>\n",
       "      <td>197,130,041,640.769989</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>139</th>\n",
       "      <td>GE</td>\n",
       "      <td>2009</td>\n",
       "      <td>101,028,605,725.000000</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>157</th>\n",
       "      <td>XOM</td>\n",
       "      <td>2009</td>\n",
       "      <td>293,635,696,275.349976</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>164</th>\n",
       "      <td>GILD</td>\n",
       "      <td>2009</td>\n",
       "      <td>87,219,110,000.000000</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>167</th>\n",
       "      <td>NUE</td>\n",
       "      <td>2009</td>\n",
       "      <td>17,550,398,648.700001</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "    STOCK_TIKR  year                 mktCap  Prediction\n",
       "136      CMCSA  2009 197,130,041,640.769989           1\n",
       "139         GE  2009 101,028,605,725.000000           1\n",
       "157        XOM  2009 293,635,696,275.349976           1\n",
       "164       GILD  2009  87,219,110,000.000000           1\n",
       "167        NUE  2009  17,550,398,648.700001           1"
      ]
     },
     "execution_count": 50,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "1470"
      ]
     },
     "execution_count": 50,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df_ml_port_up = df_ml_port_1[(df_ml_port_1['Prediction'] == 1) & (df_ml_port_1['mktCap'] > 10000000000)]\n",
    "df_ml_port_up.head()\n",
    "df_ml_port_up.shape[0]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "We have 10 years of data (10 rows for each company). We will take only those stocks into our AL portfolio for which stock is at least 6 times trend is UP. (60% of time stock performed better than S&P500 and positive trend)."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 59,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>STOCK_TIKR</th>\n",
       "      <th>COUNT</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>A</td>\n",
       "      <td>3</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>AAL</td>\n",
       "      <td>4</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>AAP</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>AAPL</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>ABB</td>\n",
       "      <td>3</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "  STOCK_TIKR  COUNT\n",
       "0          A      3\n",
       "1        AAL      4\n",
       "2        AAP      1\n",
       "3       AAPL      1\n",
       "4        ABB      3"
      ]
     },
     "execution_count": 59,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>STOCK_TIKR</th>\n",
       "      <th>COUNT</th>\n",
       "      <th>Next_Yr_beat_SnP</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>A</td>\n",
       "      <td>3</td>\n",
       "      <td>No</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>AAL</td>\n",
       "      <td>4</td>\n",
       "      <td>No</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>AAP</td>\n",
       "      <td>1</td>\n",
       "      <td>No</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>AAPL</td>\n",
       "      <td>1</td>\n",
       "      <td>No</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>ABB</td>\n",
       "      <td>3</td>\n",
       "      <td>No</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "  STOCK_TIKR  COUNT Next_Yr_beat_SnP\n",
       "0          A      3               No\n",
       "1        AAL      4               No\n",
       "2        AAP      1               No\n",
       "3       AAPL      1               No\n",
       "4        ABB      3               No"
      ]
     },
     "execution_count": 59,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "mycountval = df_ml_port_up.groupby('STOCK_TIKR')['Prediction'].count()\n",
    "#mycountval\n",
    "dfcountval = pd.DataFrame(mycountval)\n",
    "dfcountval.rename(columns={'Prediction':'COUNT'}, inplace=True)\n",
    "dfcountval = dfcountval.reset_index()\n",
    "dfcountval.head()\n",
    "dfcountval ['Next_Yr_beat_SnP'] = np.where( dfcountval['COUNT'] >= 6,'Yes','No')\n",
    "dfcountval.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 60,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>STOCK_TIKR</th>\n",
       "      <th>COUNT</th>\n",
       "      <th>Next_Yr_beat_SnP</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>ALNY</td>\n",
       "      <td>7</td>\n",
       "      <td>Yes</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>AMD</td>\n",
       "      <td>6</td>\n",
       "      <td>Yes</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>138</th>\n",
       "      <td>CXO</td>\n",
       "      <td>6</td>\n",
       "      <td>Yes</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>158</th>\n",
       "      <td>DQ</td>\n",
       "      <td>6</td>\n",
       "      <td>Yes</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>186</th>\n",
       "      <td>ERJ</td>\n",
       "      <td>6</td>\n",
       "      <td>Yes</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "    STOCK_TIKR  COUNT Next_Yr_beat_SnP\n",
       "25        ALNY      7              Yes\n",
       "28         AMD      6              Yes\n",
       "138        CXO      6              Yes\n",
       "158         DQ      6              Yes\n",
       "186        ERJ      6              Yes"
      ]
     },
     "execution_count": 60,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "15"
      ]
     },
     "execution_count": 60,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df_ml_port = dfcountval[dfcountval['Next_Yr_beat_SnP'] == 'Yes']\n",
    "df_ml_port.head()\n",
    "df_ml_port.shape[0]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "We got list of 15 stocks which will perform better than S&P500 and positive trend. Few stocks have 60% assurance and few have 70 to 80% of assuarance."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.0"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
