{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Import nibabel package to read NIfTI images"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "from nibabel.testing import data_path\n",
    "import numpy as np\n",
    "import tensorflow as tf\n",
    "from keras import applications\n",
    "import nibabel as nib\n",
    "import keras"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Enable GPU "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "config = tf.ConfigProto( device_count = {'GPU': 1 , 'CPU': 56} ) \n",
    "sess = tf.Session(config=config) \n",
    "keras.backend.set_session(sess)\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Check the GPU "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[name: \"/device:CPU:0\"\n",
      "device_type: \"CPU\"\n",
      "memory_limit: 268435456\n",
      "locality {\n",
      "}\n",
      "incarnation: 2403825222157431229\n",
      ", name: \"/device:XLA_GPU:0\"\n",
      "device_type: \"XLA_GPU\"\n",
      "memory_limit: 17179869184\n",
      "locality {\n",
      "}\n",
      "incarnation: 9104771758140632175\n",
      "physical_device_desc: \"device: XLA_GPU device\"\n",
      ", name: \"/device:XLA_CPU:0\"\n",
      "device_type: \"XLA_CPU\"\n",
      "memory_limit: 17179869184\n",
      "locality {\n",
      "}\n",
      "incarnation: 2142168042465693342\n",
      "physical_device_desc: \"device: XLA_CPU device\"\n",
      ", name: \"/device:GPU:0\"\n",
      "device_type: \"GPU\"\n",
      "memory_limit: 11558980813\n",
      "locality {\n",
      "  bus_id: 1\n",
      "  links {\n",
      "  }\n",
      "}\n",
      "incarnation: 14265821299850896445\n",
      "physical_device_desc: \"device: 0, name: TITAN Xp, pci bus id: 0000:01:00.0, compute capability: 6.1\"\n",
      "]\n"
     ]
    }
   ],
   "source": [
    "from tensorflow.python.client import device_lib \n",
    "print(device_lib.list_local_devices())"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Import all the keras required library"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "import glob\n",
    "from keras.losses import categorical_crossentropy\n",
    "from keras.models import Sequential, Model \n",
    "from keras.layers import Dense, Conv2D, Dropout, Flatten, MaxPooling2D\n",
    "from keras import layers\n",
    "\n",
    "from sklearn.model_selection import train_test_split"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Fatch all the preprocessed images from the folder, preprocessing is done in SPM (matlab)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "Adni_normal_6_months=glob.glob('/home/kushpal/Kushpal/ADNI_Normal/Normal-m06-3.0T(27)/[0-9]*/*/*/*/w*')\n",
    "Adni_normal_12_months=glob.glob('/home/kushpal/Kushpal/ADNI_Normal/Normal-m12-3.0T(27)/[0-9]*/*/*/*/w*')\n",
    "Adni_normal_24_months=glob.glob('/home/kushpal/Kushpal/ADNI_Normal/Normal-m24-3.0T(27)/[0-9]*/*/*/*/w*')\n",
    "Adni_normal_36_months=glob.glob('/home/kushpal/Kushpal/ADNI_Normal/Normal-m36-3.0T(27)/[0-9]*/*/*/*/w*')\n",
    "\n",
    "Mci_bl_3_months=glob.glob('/home/kushpal/Kushpal/MCI/MCI-bl-3.0T (22)/[0-9]*/*/*/*/w*')\n",
    "Mci_6_months=glob.glob('/home/kushpal/Kushpal/MCI/MCI-m06-3.0T (22)/[0-9]*/*/*/*/w*')\n",
    "Mci_12_months=glob.glob('/home/kushpal/Kushpal/MCI/MCI-m12-3.0T (22)/[0-9]*/*/*/*/w*')\n",
    "Mci_24_months=glob.glob('/home/kushpal/Kushpal/MCI/MCI-m24-3.0T (22)/[0-9]*/*/*/*/w*')\n",
    "Mci_36_months=glob.glob('/home/kushpal/Kushpal/MCI/MCI-m36-3.0T (22)/[0-9]*/*/*/*/w*')\n",
    "\n",
    "Ad_bl_3_months=glob.glob('/home/kushpal/Kushpal/AD-bl-3.0T(22)/[0-9]*/*/*/*/w*')\n",
    "Ad_6_months=glob.glob('/home/kushpal/Kushpal/AD-m06-3.0T(22)/[0-9]*/*/*/*/w*')\n",
    "Ad_12_months=glob.glob('/home/kushpal/Kushpal/AD-m12-3.0T(22)/[0-9]*/*/*/*/w*')\n",
    "Ad_24_months=glob.glob('/home/kushpal/Kushpal/AD-m24-3.0T(22)/[0-9]*/*/*/*/w*')\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Label all the images with corresponding categories (0, 1,2), 0 = CN, 1 = MCI, 2 = AD."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "Adni_normal_image_X=[]\n",
    "Adni_normal_image_Y=[]\n",
    "for i in range(len(Adni_normal_6_months)):\n",
    "    newimg = nib.load(Adni_normal_6_months[i])\n",
    "    data = newimg.get_data()\n",
    "    Adni_normal_image_X.append(data)\n",
    "    Adni_normal_image_Y.append(0)\n",
    "for i in range(len(Adni_normal_12_months)):\n",
    "    newimg = nib.load(Adni_normal_12_months[i])\n",
    "    data = newimg.get_data()\n",
    "    Adni_normal_image_X.append(data)\n",
    "    Adni_normal_image_Y.append(0)\n",
    "    \n",
    "for i in range(len(Adni_normal_24_months)):\n",
    "    newimg = nib.load(Adni_normal_24_months[i])\n",
    "    data = newimg.get_data()\n",
    "    Adni_normal_image_X.append(data)\n",
    "    Adni_normal_image_Y.append(0)\n",
    "for i in range(len(Adni_normal_36_months)):\n",
    "    newimg = nib.load(Adni_normal_36_months[i])\n",
    "    data = newimg.get_data()\n",
    "    Adni_normal_image_X.append(data)\n",
    "    Adni_normal_image_Y.append(0)\n",
    "\n",
    "\n",
    "Mci_image_X=[]\n",
    "Mci_image_Y=[]\n",
    "for i in range(len(Mci_bl_3_months)):\n",
    "    newimg = nib.load(Mci_bl_3_months[i])\n",
    "    data = newimg.get_data()\n",
    "    Mci_image_X.append(data)\n",
    "    Mci_image_Y.append(1)\n",
    "for i in range(len(Mci_6_months)):\n",
    "    newimg = nib.load(Mci_6_months[i])\n",
    "    data = newimg.get_data()\n",
    "    Mci_image_X.append(data)\n",
    "    Mci_image_Y.append(1)\n",
    "for i in range(len(Mci_12_months)):\n",
    "    newimg = nib.load(Mci_12_months[i])\n",
    "    data = newimg.get_data()\n",
    "    Mci_image_X.append(data)\n",
    "    Mci_image_Y.append(1)\n",
    "    \n",
    "for i in range(len(Mci_24_months)):\n",
    "    newimg = nib.load(Mci_24_months[i])\n",
    "    data = newimg.get_data()\n",
    "    Mci_image_X.append(data)\n",
    "    Mci_image_Y.append(1)\n",
    "    \n",
    "for i in range(len(Mci_36_months)):\n",
    "    newimg = nib.load(Mci_36_months[i])\n",
    "    data = newimg.get_data()\n",
    "    Mci_image_X.append(data)\n",
    "    Mci_image_Y.append(1)\n",
    "\n",
    "\n",
    "Ad_image_X=[]\n",
    "Ad_image_Y=[]\n",
    "for i in range(len(Ad_bl_3_months)):\n",
    "    newimg = nib.load(Ad_bl_3_months[i])\n",
    "    data = newimg.get_data()\n",
    "    Ad_image_X.append(data)\n",
    "    Ad_image_Y.append(2)\n",
    "for i in range(len(Ad_6_months)):\n",
    "    newimg = nib.load(Ad_6_months[i])\n",
    "    data = newimg.get_data()\n",
    "    Ad_image_X.append(data)\n",
    "    Ad_image_Y.append(2)\n",
    "for i in range(len(Ad_12_months)):\n",
    "    newimg = nib.load(Ad_12_months[i])\n",
    "    data = newimg.get_data()\n",
    "    Ad_image_X.append(data)\n",
    "    Ad_image_Y.append(2)\n",
    "    \n",
    "for i in range(len(Ad_24_months)):\n",
    "    newimg = nib.load(Ad_24_months[i])\n",
    "    data = newimg.get_data()\n",
    "    Ad_image_X.append(data)\n",
    "    Ad_image_Y.append(2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "Total_data_X=[]\n",
    "Total_data_Y=[]\n",
    "for i in range(len(Adni_normal_image_X)):\n",
    "    Total_data_X.append(Adni_normal_image_X[i])\n",
    "    Total_data_Y.append(Adni_normal_image_Y[i])\n",
    "for i in range(len(Mci_image_X)):\n",
    "    Total_data_X.append(Mci_image_X[i])\n",
    "    Total_data_Y.append(Mci_image_Y[i])\n",
    "for i in range(len(Ad_image_X)):\n",
    "    Total_data_X.append(Ad_image_X[i])\n",
    "    Total_data_Y.append(Ad_image_Y[i])"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Total no of 3D images (303 X 121 X 145 X 121)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "303"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(Total_data_X)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Data Conversion from 3D to 2D and introduce 3 channels (RGB) because the predefined model always takes 2D images with 3 channels in Keras."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "Newdata=np.array(Total_data_X)\n",
    "maindata=np.stack([Newdata]*3, axis=-1)\n",
    "\n",
    "maindata=maindata.reshape(303*121,145,121,3)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Label all the converted images with their corresponding category."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "new_y=[]\n",
    "for i in range (len(Total_data_X)):\n",
    "    for j in range(121):\n",
    "        new_y.append(Total_data_Y[i])\n",
    "new_y=np.array(new_y)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### After the conversion of  3D images, total no of 2D images"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "((36663, 145, 121, 3), (36663,))"
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "maindata.shape,new_y.shape"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Split the data into train and test in the 70:30 ratio."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "\n",
    "X_train, X_test, y_train, y_test = train_test_split(maindata, new_y, test_size=0.33)\n",
    "X_train=np.array(X_train)\n",
    "y_train=np.array(y_train)\n",
    "x_train = X_train.reshape(X_train.shape[0], 145, 121, 3)\n",
    "\n",
    "input_shape = (145, 121,3)\n",
    "x_train = x_train.astype('float32')\n",
    "y_train = keras.utils.to_categorical(y_train)\n",
    "y_old_test=y_test\n",
    "y_test = keras.utils.to_categorical(y_test)\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Use the ResNet50 model. "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From /home/kushpal/.local/lib/python3.5/site-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Colocations handled automatically by placer.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/kushpal/.local/lib/python3.5/site-packages/keras_applications/resnet50.py:265: UserWarning: The output shape of `ResNet50(include_top=False)` has been changed since Keras 2.2.0.\n",
      "  warnings.warn('The output shape of `ResNet50(include_top=False)` '\n",
      "/home/kushpal/.local/lib/python3.5/site-packages/ipykernel_launcher.py:12: UserWarning: Update your `Model` call to the Keras 2 API: `Model(inputs=Tensor(\"in..., outputs=Tensor(\"de...)`\n",
      "  if sys.path[0] == '':\n"
     ]
    }
   ],
   "source": [
    "\n",
    "model = applications.ResNet50(weights = \"imagenet\", include_top=False, input_shape =input_shape)\n",
    "\n",
    "\n",
    "# Freeze the layers which you don't want to train. Here I am not freezing any layer.\n",
    "for layer in model.layers:\n",
    "    layer.trainable = True\n",
    "#Adding custom Layers \n",
    "x = model.output\n",
    "x = Flatten()(x)\n",
    "x = Dense(128, activation=\"relu\")(x)\n",
    "predictions = Dense(3, activation=\"softmax\")(x)\n",
    "model= Model(input = model.input, output = predictions)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model: \"model_1\"\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "input_1 (InputLayer)            (None, 145, 121, 3)  0                                            \n",
      "__________________________________________________________________________________________________\n",
      "conv1_pad (ZeroPadding2D)       (None, 151, 127, 3)  0           input_1[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "conv1 (Conv2D)                  (None, 73, 61, 64)   9472        conv1_pad[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "bn_conv1 (BatchNormalization)   (None, 73, 61, 64)   256         conv1[0][0]                      \n",
      "__________________________________________________________________________________________________\n",
      "activation_1 (Activation)       (None, 73, 61, 64)   0           bn_conv1[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "pool1_pad (ZeroPadding2D)       (None, 75, 63, 64)   0           activation_1[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "max_pooling2d_1 (MaxPooling2D)  (None, 37, 31, 64)   0           pool1_pad[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "res2a_branch2a (Conv2D)         (None, 37, 31, 64)   4160        max_pooling2d_1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "bn2a_branch2a (BatchNormalizati (None, 37, 31, 64)   256         res2a_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_2 (Activation)       (None, 37, 31, 64)   0           bn2a_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res2a_branch2b (Conv2D)         (None, 37, 31, 64)   36928       activation_2[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "bn2a_branch2b (BatchNormalizati (None, 37, 31, 64)   256         res2a_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_3 (Activation)       (None, 37, 31, 64)   0           bn2a_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res2a_branch2c (Conv2D)         (None, 37, 31, 256)  16640       activation_3[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "res2a_branch1 (Conv2D)          (None, 37, 31, 256)  16640       max_pooling2d_1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "bn2a_branch2c (BatchNormalizati (None, 37, 31, 256)  1024        res2a_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "bn2a_branch1 (BatchNormalizatio (None, 37, 31, 256)  1024        res2a_branch1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "add_1 (Add)                     (None, 37, 31, 256)  0           bn2a_branch2c[0][0]              \n",
      "                                                                 bn2a_branch1[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "activation_4 (Activation)       (None, 37, 31, 256)  0           add_1[0][0]                      \n",
      "__________________________________________________________________________________________________\n",
      "res2b_branch2a (Conv2D)         (None, 37, 31, 64)   16448       activation_4[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "bn2b_branch2a (BatchNormalizati (None, 37, 31, 64)   256         res2b_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_5 (Activation)       (None, 37, 31, 64)   0           bn2b_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res2b_branch2b (Conv2D)         (None, 37, 31, 64)   36928       activation_5[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "bn2b_branch2b (BatchNormalizati (None, 37, 31, 64)   256         res2b_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_6 (Activation)       (None, 37, 31, 64)   0           bn2b_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res2b_branch2c (Conv2D)         (None, 37, 31, 256)  16640       activation_6[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "bn2b_branch2c (BatchNormalizati (None, 37, 31, 256)  1024        res2b_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "add_2 (Add)                     (None, 37, 31, 256)  0           bn2b_branch2c[0][0]              \n",
      "                                                                 activation_4[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "activation_7 (Activation)       (None, 37, 31, 256)  0           add_2[0][0]                      \n",
      "__________________________________________________________________________________________________\n",
      "res2c_branch2a (Conv2D)         (None, 37, 31, 64)   16448       activation_7[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "bn2c_branch2a (BatchNormalizati (None, 37, 31, 64)   256         res2c_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_8 (Activation)       (None, 37, 31, 64)   0           bn2c_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res2c_branch2b (Conv2D)         (None, 37, 31, 64)   36928       activation_8[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "bn2c_branch2b (BatchNormalizati (None, 37, 31, 64)   256         res2c_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_9 (Activation)       (None, 37, 31, 64)   0           bn2c_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res2c_branch2c (Conv2D)         (None, 37, 31, 256)  16640       activation_9[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "bn2c_branch2c (BatchNormalizati (None, 37, 31, 256)  1024        res2c_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "add_3 (Add)                     (None, 37, 31, 256)  0           bn2c_branch2c[0][0]              \n",
      "                                                                 activation_7[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "activation_10 (Activation)      (None, 37, 31, 256)  0           add_3[0][0]                      \n",
      "__________________________________________________________________________________________________\n",
      "res3a_branch2a (Conv2D)         (None, 19, 16, 128)  32896       activation_10[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn3a_branch2a (BatchNormalizati (None, 19, 16, 128)  512         res3a_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_11 (Activation)      (None, 19, 16, 128)  0           bn3a_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res3a_branch2b (Conv2D)         (None, 19, 16, 128)  147584      activation_11[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn3a_branch2b (BatchNormalizati (None, 19, 16, 128)  512         res3a_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_12 (Activation)      (None, 19, 16, 128)  0           bn3a_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res3a_branch2c (Conv2D)         (None, 19, 16, 512)  66048       activation_12[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res3a_branch1 (Conv2D)          (None, 19, 16, 512)  131584      activation_10[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn3a_branch2c (BatchNormalizati (None, 19, 16, 512)  2048        res3a_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "bn3a_branch1 (BatchNormalizatio (None, 19, 16, 512)  2048        res3a_branch1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "add_4 (Add)                     (None, 19, 16, 512)  0           bn3a_branch2c[0][0]              \n",
      "                                                                 bn3a_branch1[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "activation_13 (Activation)      (None, 19, 16, 512)  0           add_4[0][0]                      \n",
      "__________________________________________________________________________________________________\n",
      "res3b_branch2a (Conv2D)         (None, 19, 16, 128)  65664       activation_13[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn3b_branch2a (BatchNormalizati (None, 19, 16, 128)  512         res3b_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_14 (Activation)      (None, 19, 16, 128)  0           bn3b_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res3b_branch2b (Conv2D)         (None, 19, 16, 128)  147584      activation_14[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn3b_branch2b (BatchNormalizati (None, 19, 16, 128)  512         res3b_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_15 (Activation)      (None, 19, 16, 128)  0           bn3b_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res3b_branch2c (Conv2D)         (None, 19, 16, 512)  66048       activation_15[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn3b_branch2c (BatchNormalizati (None, 19, 16, 512)  2048        res3b_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "add_5 (Add)                     (None, 19, 16, 512)  0           bn3b_branch2c[0][0]              \n",
      "                                                                 activation_13[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "activation_16 (Activation)      (None, 19, 16, 512)  0           add_5[0][0]                      \n",
      "__________________________________________________________________________________________________\n",
      "res3c_branch2a (Conv2D)         (None, 19, 16, 128)  65664       activation_16[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn3c_branch2a (BatchNormalizati (None, 19, 16, 128)  512         res3c_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_17 (Activation)      (None, 19, 16, 128)  0           bn3c_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res3c_branch2b (Conv2D)         (None, 19, 16, 128)  147584      activation_17[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn3c_branch2b (BatchNormalizati (None, 19, 16, 128)  512         res3c_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_18 (Activation)      (None, 19, 16, 128)  0           bn3c_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res3c_branch2c (Conv2D)         (None, 19, 16, 512)  66048       activation_18[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn3c_branch2c (BatchNormalizati (None, 19, 16, 512)  2048        res3c_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "add_6 (Add)                     (None, 19, 16, 512)  0           bn3c_branch2c[0][0]              \n",
      "                                                                 activation_16[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "activation_19 (Activation)      (None, 19, 16, 512)  0           add_6[0][0]                      \n",
      "__________________________________________________________________________________________________\n",
      "res3d_branch2a (Conv2D)         (None, 19, 16, 128)  65664       activation_19[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn3d_branch2a (BatchNormalizati (None, 19, 16, 128)  512         res3d_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_20 (Activation)      (None, 19, 16, 128)  0           bn3d_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res3d_branch2b (Conv2D)         (None, 19, 16, 128)  147584      activation_20[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn3d_branch2b (BatchNormalizati (None, 19, 16, 128)  512         res3d_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_21 (Activation)      (None, 19, 16, 128)  0           bn3d_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res3d_branch2c (Conv2D)         (None, 19, 16, 512)  66048       activation_21[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn3d_branch2c (BatchNormalizati (None, 19, 16, 512)  2048        res3d_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "add_7 (Add)                     (None, 19, 16, 512)  0           bn3d_branch2c[0][0]              \n",
      "                                                                 activation_19[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "activation_22 (Activation)      (None, 19, 16, 512)  0           add_7[0][0]                      \n",
      "__________________________________________________________________________________________________\n",
      "res4a_branch2a (Conv2D)         (None, 10, 8, 256)   131328      activation_22[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4a_branch2a (BatchNormalizati (None, 10, 8, 256)   1024        res4a_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_23 (Activation)      (None, 10, 8, 256)   0           bn4a_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res4a_branch2b (Conv2D)         (None, 10, 8, 256)   590080      activation_23[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4a_branch2b (BatchNormalizati (None, 10, 8, 256)   1024        res4a_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_24 (Activation)      (None, 10, 8, 256)   0           bn4a_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res4a_branch2c (Conv2D)         (None, 10, 8, 1024)  263168      activation_24[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res4a_branch1 (Conv2D)          (None, 10, 8, 1024)  525312      activation_22[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4a_branch2c (BatchNormalizati (None, 10, 8, 1024)  4096        res4a_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "bn4a_branch1 (BatchNormalizatio (None, 10, 8, 1024)  4096        res4a_branch1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "add_8 (Add)                     (None, 10, 8, 1024)  0           bn4a_branch2c[0][0]              \n",
      "                                                                 bn4a_branch1[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "activation_25 (Activation)      (None, 10, 8, 1024)  0           add_8[0][0]                      \n",
      "__________________________________________________________________________________________________\n",
      "res4b_branch2a (Conv2D)         (None, 10, 8, 256)   262400      activation_25[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4b_branch2a (BatchNormalizati (None, 10, 8, 256)   1024        res4b_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_26 (Activation)      (None, 10, 8, 256)   0           bn4b_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res4b_branch2b (Conv2D)         (None, 10, 8, 256)   590080      activation_26[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4b_branch2b (BatchNormalizati (None, 10, 8, 256)   1024        res4b_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_27 (Activation)      (None, 10, 8, 256)   0           bn4b_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res4b_branch2c (Conv2D)         (None, 10, 8, 1024)  263168      activation_27[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4b_branch2c (BatchNormalizati (None, 10, 8, 1024)  4096        res4b_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "add_9 (Add)                     (None, 10, 8, 1024)  0           bn4b_branch2c[0][0]              \n",
      "                                                                 activation_25[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "activation_28 (Activation)      (None, 10, 8, 1024)  0           add_9[0][0]                      \n",
      "__________________________________________________________________________________________________\n",
      "res4c_branch2a (Conv2D)         (None, 10, 8, 256)   262400      activation_28[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4c_branch2a (BatchNormalizati (None, 10, 8, 256)   1024        res4c_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_29 (Activation)      (None, 10, 8, 256)   0           bn4c_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res4c_branch2b (Conv2D)         (None, 10, 8, 256)   590080      activation_29[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4c_branch2b (BatchNormalizati (None, 10, 8, 256)   1024        res4c_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_30 (Activation)      (None, 10, 8, 256)   0           bn4c_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res4c_branch2c (Conv2D)         (None, 10, 8, 1024)  263168      activation_30[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4c_branch2c (BatchNormalizati (None, 10, 8, 1024)  4096        res4c_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "add_10 (Add)                    (None, 10, 8, 1024)  0           bn4c_branch2c[0][0]              \n",
      "                                                                 activation_28[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "activation_31 (Activation)      (None, 10, 8, 1024)  0           add_10[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "res4d_branch2a (Conv2D)         (None, 10, 8, 256)   262400      activation_31[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4d_branch2a (BatchNormalizati (None, 10, 8, 256)   1024        res4d_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_32 (Activation)      (None, 10, 8, 256)   0           bn4d_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res4d_branch2b (Conv2D)         (None, 10, 8, 256)   590080      activation_32[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4d_branch2b (BatchNormalizati (None, 10, 8, 256)   1024        res4d_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_33 (Activation)      (None, 10, 8, 256)   0           bn4d_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res4d_branch2c (Conv2D)         (None, 10, 8, 1024)  263168      activation_33[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4d_branch2c (BatchNormalizati (None, 10, 8, 1024)  4096        res4d_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "add_11 (Add)                    (None, 10, 8, 1024)  0           bn4d_branch2c[0][0]              \n",
      "                                                                 activation_31[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "activation_34 (Activation)      (None, 10, 8, 1024)  0           add_11[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "res4e_branch2a (Conv2D)         (None, 10, 8, 256)   262400      activation_34[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4e_branch2a (BatchNormalizati (None, 10, 8, 256)   1024        res4e_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_35 (Activation)      (None, 10, 8, 256)   0           bn4e_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res4e_branch2b (Conv2D)         (None, 10, 8, 256)   590080      activation_35[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4e_branch2b (BatchNormalizati (None, 10, 8, 256)   1024        res4e_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_36 (Activation)      (None, 10, 8, 256)   0           bn4e_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res4e_branch2c (Conv2D)         (None, 10, 8, 1024)  263168      activation_36[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4e_branch2c (BatchNormalizati (None, 10, 8, 1024)  4096        res4e_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "add_12 (Add)                    (None, 10, 8, 1024)  0           bn4e_branch2c[0][0]              \n",
      "                                                                 activation_34[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "activation_37 (Activation)      (None, 10, 8, 1024)  0           add_12[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "res4f_branch2a (Conv2D)         (None, 10, 8, 256)   262400      activation_37[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4f_branch2a (BatchNormalizati (None, 10, 8, 256)   1024        res4f_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_38 (Activation)      (None, 10, 8, 256)   0           bn4f_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res4f_branch2b (Conv2D)         (None, 10, 8, 256)   590080      activation_38[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4f_branch2b (BatchNormalizati (None, 10, 8, 256)   1024        res4f_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_39 (Activation)      (None, 10, 8, 256)   0           bn4f_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res4f_branch2c (Conv2D)         (None, 10, 8, 1024)  263168      activation_39[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn4f_branch2c (BatchNormalizati (None, 10, 8, 1024)  4096        res4f_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "add_13 (Add)                    (None, 10, 8, 1024)  0           bn4f_branch2c[0][0]              \n",
      "                                                                 activation_37[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "activation_40 (Activation)      (None, 10, 8, 1024)  0           add_13[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "res5a_branch2a (Conv2D)         (None, 5, 4, 512)    524800      activation_40[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn5a_branch2a (BatchNormalizati (None, 5, 4, 512)    2048        res5a_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_41 (Activation)      (None, 5, 4, 512)    0           bn5a_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res5a_branch2b (Conv2D)         (None, 5, 4, 512)    2359808     activation_41[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn5a_branch2b (BatchNormalizati (None, 5, 4, 512)    2048        res5a_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_42 (Activation)      (None, 5, 4, 512)    0           bn5a_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res5a_branch2c (Conv2D)         (None, 5, 4, 2048)   1050624     activation_42[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res5a_branch1 (Conv2D)          (None, 5, 4, 2048)   2099200     activation_40[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn5a_branch2c (BatchNormalizati (None, 5, 4, 2048)   8192        res5a_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "bn5a_branch1 (BatchNormalizatio (None, 5, 4, 2048)   8192        res5a_branch1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "add_14 (Add)                    (None, 5, 4, 2048)   0           bn5a_branch2c[0][0]              \n",
      "                                                                 bn5a_branch1[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "activation_43 (Activation)      (None, 5, 4, 2048)   0           add_14[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "res5b_branch2a (Conv2D)         (None, 5, 4, 512)    1049088     activation_43[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn5b_branch2a (BatchNormalizati (None, 5, 4, 512)    2048        res5b_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_44 (Activation)      (None, 5, 4, 512)    0           bn5b_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res5b_branch2b (Conv2D)         (None, 5, 4, 512)    2359808     activation_44[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn5b_branch2b (BatchNormalizati (None, 5, 4, 512)    2048        res5b_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_45 (Activation)      (None, 5, 4, 512)    0           bn5b_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res5b_branch2c (Conv2D)         (None, 5, 4, 2048)   1050624     activation_45[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn5b_branch2c (BatchNormalizati (None, 5, 4, 2048)   8192        res5b_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "add_15 (Add)                    (None, 5, 4, 2048)   0           bn5b_branch2c[0][0]              \n",
      "                                                                 activation_43[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "activation_46 (Activation)      (None, 5, 4, 2048)   0           add_15[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "res5c_branch2a (Conv2D)         (None, 5, 4, 512)    1049088     activation_46[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn5c_branch2a (BatchNormalizati (None, 5, 4, 512)    2048        res5c_branch2a[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_47 (Activation)      (None, 5, 4, 512)    0           bn5c_branch2a[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res5c_branch2b (Conv2D)         (None, 5, 4, 512)    2359808     activation_47[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn5c_branch2b (BatchNormalizati (None, 5, 4, 512)    2048        res5c_branch2b[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "activation_48 (Activation)      (None, 5, 4, 512)    0           bn5c_branch2b[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "res5c_branch2c (Conv2D)         (None, 5, 4, 2048)   1050624     activation_48[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "bn5c_branch2c (BatchNormalizati (None, 5, 4, 2048)   8192        res5c_branch2c[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "add_16 (Add)                    (None, 5, 4, 2048)   0           bn5c_branch2c[0][0]              \n",
      "                                                                 activation_46[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "activation_49 (Activation)      (None, 5, 4, 2048)   0           add_16[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "flatten_1 (Flatten)             (None, 40960)        0           activation_49[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dense_1 (Dense)                 (None, 128)          5243008     flatten_1[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_2 (Dense)                 (None, 3)            387         dense_1[0][0]                    \n",
      "==================================================================================================\n",
      "Total params: 28,831,107\n",
      "Trainable params: 28,777,987\n",
      "Non-trainable params: 53,120\n",
      "__________________________________________________________________________________________________\n",
      "None\n"
     ]
    }
   ],
   "source": [
    "print(model.summary())"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Compile and train the model with Adam optimizer"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From /home/kushpal/.local/lib/python3.5/site-packages/tensorflow/python/ops/math_ops.py:3066: to_int32 (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use tf.cast instead.\n",
      "Train on 22107 samples, validate on 2457 samples\n",
      "Epoch 1/50\n",
      "22107/22107 [==============================] - 79s 4ms/step - loss: 0.9899 - acc: 0.5139 - val_loss: 0.7580 - val_acc: 0.6516\n",
      "Epoch 2/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.5555 - acc: 0.7451 - val_loss: 0.5472 - val_acc: 0.7444\n",
      "Epoch 3/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.3824 - acc: 0.8195 - val_loss: 0.4694 - val_acc: 0.7733\n",
      "Epoch 4/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.3157 - acc: 0.8429 - val_loss: 0.4232 - val_acc: 0.7880\n",
      "Epoch 5/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2864 - acc: 0.8519 - val_loss: 0.4266 - val_acc: 0.7908\n",
      "Epoch 6/50\n",
      "22107/22107 [==============================] - 64s 3ms/step - loss: 0.2781 - acc: 0.8532 - val_loss: 0.4085 - val_acc: 0.8026\n",
      "Epoch 7/50\n",
      "22107/22107 [==============================] - 64s 3ms/step - loss: 0.2659 - acc: 0.8562 - val_loss: 0.3930 - val_acc: 0.8010\n",
      "Epoch 8/50\n",
      "22107/22107 [==============================] - 64s 3ms/step - loss: 0.2546 - acc: 0.8598 - val_loss: 0.3941 - val_acc: 0.8091\n",
      "Epoch 9/50\n",
      "22107/22107 [==============================] - 64s 3ms/step - loss: 0.2461 - acc: 0.8632 - val_loss: 0.4066 - val_acc: 0.8030\n",
      "Epoch 10/50\n",
      "22107/22107 [==============================] - 64s 3ms/step - loss: 0.2519 - acc: 0.8599 - val_loss: 0.3873 - val_acc: 0.8124\n",
      "Epoch 11/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2441 - acc: 0.8616 - val_loss: 0.3967 - val_acc: 0.8132\n",
      "Epoch 12/50\n",
      "22107/22107 [==============================] - 64s 3ms/step - loss: 0.2486 - acc: 0.8619 - val_loss: 0.3852 - val_acc: 0.8124\n",
      "Epoch 13/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2371 - acc: 0.8672 - val_loss: 0.3762 - val_acc: 0.8185\n",
      "Epoch 14/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2344 - acc: 0.8657 - val_loss: 0.4062 - val_acc: 0.8071\n",
      "Epoch 15/50\n",
      "22107/22107 [==============================] - 64s 3ms/step - loss: 0.2421 - acc: 0.8629 - val_loss: 0.4055 - val_acc: 0.8095\n",
      "Epoch 16/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2378 - acc: 0.8647 - val_loss: 0.3888 - val_acc: 0.8136\n",
      "Epoch 17/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2321 - acc: 0.8663 - val_loss: 0.3844 - val_acc: 0.8185\n",
      "Epoch 18/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2362 - acc: 0.8662 - val_loss: 0.3723 - val_acc: 0.8217\n",
      "Epoch 19/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2326 - acc: 0.8648 - val_loss: 0.3935 - val_acc: 0.8213\n",
      "Epoch 20/50\n",
      "22107/22107 [==============================] - 64s 3ms/step - loss: 0.2322 - acc: 0.8671 - val_loss: 0.3737 - val_acc: 0.8201\n",
      "Epoch 21/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2560 - acc: 0.8592 - val_loss: 0.4028 - val_acc: 0.8128\n",
      "Epoch 22/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2462 - acc: 0.8619 - val_loss: 0.3667 - val_acc: 0.8270\n",
      "Epoch 23/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2389 - acc: 0.8643 - val_loss: 0.4091 - val_acc: 0.8136\n",
      "Epoch 24/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2402 - acc: 0.8639 - val_loss: 0.3698 - val_acc: 0.8311\n",
      "Epoch 25/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2296 - acc: 0.8659 - val_loss: 0.3704 - val_acc: 0.8242\n",
      "Epoch 26/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2275 - acc: 0.8679 - val_loss: 0.3638 - val_acc: 0.8295\n",
      "Epoch 27/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2282 - acc: 0.8675 - val_loss: 0.3498 - val_acc: 0.8311\n",
      "Epoch 28/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2261 - acc: 0.8677 - val_loss: 0.3518 - val_acc: 0.8348\n",
      "Epoch 29/50\n",
      "22107/22107 [==============================] - 64s 3ms/step - loss: 0.2270 - acc: 0.8682 - val_loss: 0.3719 - val_acc: 0.8287\n",
      "Epoch 30/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2264 - acc: 0.8669 - val_loss: 0.3708 - val_acc: 0.8335\n",
      "Epoch 31/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2297 - acc: 0.8671 - val_loss: 0.3982 - val_acc: 0.8234\n",
      "Epoch 32/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2279 - acc: 0.8677 - val_loss: 0.4160 - val_acc: 0.8217\n",
      "Epoch 33/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2431 - acc: 0.8611 - val_loss: 0.3620 - val_acc: 0.8274\n",
      "Epoch 34/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2258 - acc: 0.8674 - val_loss: 0.3745 - val_acc: 0.8258\n",
      "Epoch 35/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2307 - acc: 0.8664 - val_loss: 0.4006 - val_acc: 0.8201\n",
      "Epoch 36/50\n",
      "22107/22107 [==============================] - 64s 3ms/step - loss: 0.2341 - acc: 0.8654 - val_loss: 0.3685 - val_acc: 0.8303\n",
      "Epoch 37/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2265 - acc: 0.8684 - val_loss: 0.4011 - val_acc: 0.8262\n",
      "Epoch 38/50\n",
      "22107/22107 [==============================] - 64s 3ms/step - loss: 0.2368 - acc: 0.8665 - val_loss: 0.3798 - val_acc: 0.8323\n",
      "Epoch 39/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2263 - acc: 0.8673 - val_loss: 0.3538 - val_acc: 0.8348\n",
      "Epoch 40/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2240 - acc: 0.8691 - val_loss: 0.3508 - val_acc: 0.8368\n",
      "Epoch 41/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2246 - acc: 0.8699 - val_loss: 0.3551 - val_acc: 0.8356\n",
      "Epoch 42/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2299 - acc: 0.8678 - val_loss: 0.4160 - val_acc: 0.8254\n",
      "Epoch 43/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2402 - acc: 0.8621 - val_loss: 0.4215 - val_acc: 0.8221\n",
      "Epoch 44/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2292 - acc: 0.8664 - val_loss: 0.4142 - val_acc: 0.8246\n",
      "Epoch 45/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2382 - acc: 0.8643 - val_loss: 0.3784 - val_acc: 0.8335\n",
      "Epoch 46/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2239 - acc: 0.8699 - val_loss: 0.3590 - val_acc: 0.8348\n",
      "Epoch 47/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2240 - acc: 0.8710 - val_loss: 0.3889 - val_acc: 0.8323\n",
      "Epoch 48/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2266 - acc: 0.8699 - val_loss: 0.3926 - val_acc: 0.8335\n",
      "Epoch 49/50\n",
      "22107/22107 [==============================] - 63s 3ms/step - loss: 0.2246 - acc: 0.8681 - val_loss: 0.4233 - val_acc: 0.8327\n",
      "Epoch 50/50\n",
      "22107/22107 [==============================] - 64s 3ms/step - loss: 0.2308 - acc: 0.8681 - val_loss: 0.3996 - val_acc: 0.8274\n"
     ]
    }
   ],
   "source": [
    "model.compile(keras.optimizers.Adam(lr=1e-5), loss='categorical_crossentropy', metrics=['accuracy'])\n",
    "val=model.fit(x=x_train,y=y_train,batch_size=100, epochs=50,validation_split=0.1)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### The model loss and accuracy on test data. "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Number of images in x_test 12099\n",
      "12099/12099 [==============================] - 13s 1ms/step\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "[0.41938977127124855, 0.8200677741986778]"
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "X_test=np.array(X_test)\n",
    "x_test = X_test.reshape(X_test.shape[0],145,121,3)\n",
    "x_test = x_test.astype('float32')\n",
    "print('Number of images in x_test', x_test.shape[0])\n",
    "model.evaluate(x_test,y_test)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Text(0, 0.5, 'Accuracy')"
      ]
     },
     "execution_count": 17,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEWCAYAAACXGLsWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzs3Xd8leX5+PHPlb1ISEjCCBuhbBFSHCCCioIiuGW4a622arXW8e3Pumr9aqt1tNZvXdQJ4sYKWhAoIGoZMiTsMJJAIIOsk3nOuX9/3E/CIWScQE4C4Xq/OK9znn2fJ4fneu75iDEGpZRSqiFBrZ0ApZRSxz8NFkoppRqlwUIppVSjNFgopZRqlAYLpZRSjdJgoZRSqlEaLE5iIhIsIiUi0r2103IsRCRTRMY6n38vIv/nz7pHcZyxIrLx6FJ5YhORG0RkfgPLzxeRXS2YJNXCNFicQJwLe/XLKyJlPtMzmro/Y4zHGBNjjNnTxHREiUiRiIypY9lfRWR2E/b1kIgsqmN+RxGpEpH+TUmbMeYPxpjbmrJNPekKEREjIj199r3EGDPoWPfdwDFjRcQlIp/Xs/w6EVntrLNPRL4QkbN8lvcXkQ9FJE9ECkVknYjcLSJH/D8XkR0icoXP9DnO9609r1BEgo0xbxpjJjrzjzg3R/Fdl4tIufPbzXHS3fFo91drv2Ui0sVn3gQR2e7n9k+IyD+PNR1tkQaLE4hzYY8xxsQAe4BLfOa9W3t9EQkJUDpKgQ+A62sdLxSYCrzZhN29DYwRkW615k8D1hhjNh9LWk8wVwHlwAQRSfZdICL3A88AfwCSgB7AK8AUZ3lf4DsgHRhsjInD/i3OBKLqONZSwDfYjwE21zHvG2OM55i/Wd1uc37L/YB44E/NtN9S4KFm2pdyaLBoQ5y7ovdFZJaIFAPXisiZIvKdiBQ4d6MvOhf1I+4QReQdZ/l8ESkWkW9FpFc9h3sTuFJEInzmTQTcwL+d/f1ORPY6uZDNdRX/GGN2Yy9c19VadD3wlrOfviKyWETyRSRXRN4WkbgGzsE/faZvFJHdznYP1lq33nPjpAlgo3P3e0XtohYRGSQi/3G23yAiF/ssa8q5rHYD8DdgEzDdZ1/xwKPYi+unxphSY0ylMeYzY8wDzmp/AP5jjLnfGLPPObebjDHXGGNK6jhW7WBxNvB0HfOWOmm4RUSW1HdufNJ6v5NT2Csih91M1McYcxD4DBjms58g5/ezw/nbzXbOQ3XO9j0nB1UgIv8VkUSfXb4AXFff+RaRriLyiZPOnSLyK2f+JOB+YIbzvVb7k/6ThQaLtucy4D0gDngfe/H+NZAIjAImAL9oYPvpwO+BBGzu5Q/1rLcMyAcu9Zl3HfCuMcYjIoOc4ww3xsRiA0l9xV1v4hMsnG0HAbOqZwFPAJ2AgUBvJ40NEpEh2IvvdCAF6OLso1pD56b6ojnIybl9VGvfYcC/gC+wd/r3AO+LyCk+q/l7LhGR3sBo4F3ndYPP4lFACPaCWp/zgQ8bWF7bUmCoiMQ5OdDTsOc72WfemRwKDL7qOzddgUjseb4NeFlEYhtLiHOhvwzwLSq6B7jYOVZXoAR40Vl2Eza31BXoAPwSmyOrtgeYCTxSx7GCsH+3ldjfxHjgPhE5zxjzL2zu5l3ne41oLO0nEw0Wbc9yY8znxhivMabMGLPSGPO9McZtjEnHFl2c08D2HxpjVhljqrAXrWF1rWTsoGJv4RRFiUh74BIOFUG5gQhgkIiEGGN2Osevy0dANxEZ6UxfD/zLGJPvHGurMeZr5276APBcI9+h2lXAp8aYb4wxFcDvsIGn+js09dz4GgWEAX82xlQZYxYC87FFP9X8Opc+33mNMWYr9qJ9qhPswF4QDxhjvA1snwDs8zPtGGN2OOuPxgaKNOccfeszLwh7UfVXOfCEcz7mAhXYIqb6/F1ECoEcIBYbuKvdBvzOGJNljCkHHgOuci72VdgAf4pT77aqjtzTk8DlcmSd15lArDHmSef3tB14ncP/bqoOGizangzfCbGVnl+ISLaIFAGPY/+j1Sfb53MpENPAum8B48VWTF4NbDLGbAAwxmwB7nWOd8ApGutU106c/+gfAdc7F4MZzr6rv0MnEZkjIlnOd/hnI9+hWhd8zodznHyf/Tb13NTe9x5z+Eicu7F3q9X8OpciIthg8a6Tzj3Acg7lLvKwd/wN/X/NBzr7mfZq1UVRY7A5RZzjVs/7zgl0/sqtVb/R2O/nl07dyjBs7sz33HUHPneKmQqADc78ZOzffyFQ/Zt4SmrVzxljsoGXsUHGVw+ge/V+nX3fz+E5TlUHDRZtT+1hhP8B/Ii9C4sFHsbn7vqYDmTvxr/FXtyvo1bFtjHmHWPMKKAXEAz8bwO7exN7d3chEA7M81n2NPYudYjzHW708zvsA2oqzkUkBnsHXq2hc9PYcMx7sbkh33R0B7L8SFdtZ2PP0e+dwJUNjMCWnQcD32BzapMb2MdC4IoGltelOliczaFgscxnXl1FUND4uWkSY8w67G/jbz6zM4Hxxpj2Pq8IY0y2kyN41BgzAJsLugz7G6ztaezv6VSfeRnAtlr7bWeMuSQQ360t0WDR9rUDCgGXiAyg4fqKo/EmtvjgdGxdCQAiMkBExolIOFDmvBoqRlkMuLB3g+/VuqNt5ywrFNtq6rd+pu0DYIpTkR2OrffwvRjUe26cO+Q8bP1IXVZgL+D3ikioiJwLXIStJ2qqG4AvsfUxw5zXEGzRzAVOBfBj2DqAySIS6RzzYhF5ytnHw8BYEfnf6hyciPRzKoLru7tfig1Ko5zvA7AWW3Q0hnqChR/n5mi8gQ2+1Y0E/g94Upw+QCKSLCKTnc/nishgJ6dVhC2WOuK35RRjPofNOVT7FqgUkXtFJEJsX6MhIlJdP7Ef6FnrJkChweJkcC/2YlSMvZM+motZQz7AFt185dQnVAvHVhbmYotj4oH/V99OnOKct7HFBG/VWvwIMBJ7YZ+LLbJqlDFmPTaQzcHe8WdzeNFQY+fmEeA9p7ji8lr7rsDW0UxxvuOLwHRjzDZ/0lZNRKKwdSsvOnfN1a90fCq6jTFPAw9gW0XlYe+Qbwc+dZZvxZbH9wPSnOKVOdjmtKX1nJ80oADIMMYUO/M8wGog2tm2PvWem6PhnM+/cqjhwl+wAfRrsS37VgA/dZZ1AT7GBoqN2FzVe9TtOXxuEIwxbmxQHwnswv7t/oENzGB/A2FAvoj891i/V1siRh9+pJRSqhGas1BKKdUoDRZKKaUapcFCKaVUozRYKKWUalRABpprDYmJiaZnz56tnQyllDqhrF69OtcYk9TYem0mWPTs2ZNVq1a1djKUUuqEIiK7/VlPi6GUUko1SoOFUkqpRmmwUEop1SgNFkoppRqlwUIppVSjNFgopZRqlAYLpZRSjWoz/SyUUup45fZ4yS2pJLuonOzCcvYXlVNYVkViTDid4sLpGBtBp9gIEqLDOF4fpaHBQinVqrxeQ1DQ0V8g812VbD9QwvYDJRwothfhojK3815FUXkVocFB9O/UjgGdYxnYJZYBnWOJiwxtxm9xpD15pfx9yXYWbzlATnEFXj+eBhEWHERybDi9EqPpkxRDn+QY+iRFc0pSDEntwls1kGiwUH7bnediQ1YhXdpH0i0+isSY4/MuyBjD/qIKth8oYVj39sSEt97P3O3xsiuvlC3ZxWzOLmJzdjGuCjcRocFEhgYTHhpEZGgwEaHBhAQJlR4vVR4vlW4vVR5DpcdLRZWXsio3pZUeyio9uCrdlFV6qKjy0iEmjC7tIw+94iLo0j6SsJAg3B5DldeL22Nwe7x2X24vpRVuXJUeXBVu+6r0EB8Vyq1j+pDULrzR71RUXsVHqzMBSGoXTmJMOEnt7KtdeEijv4nC0iq+Tc/lm+15fLM9l935pfRKjKZfxxj6dWxX8+rRIYqyKg8HXZUcLK3ioKuSfOe1M8/F9v0lbM8pId9Vedj+Y8JDiIsMpV2Efe+eYPezaPMBPnDSDZDSPpJh3dvz87N7M6xb+6P469ZtZ66LlxZv55MfsggOEiYO7kSPhCiSndxDp7gIOsZGEBcZSm5JBfuLbE4ju7Cc7KIK9hWWsTPXxQerMnBVHnqkebvwEGKdACfivBBEYFCXWP4+Y0R9SWoWbebhR6mpqUaH+wiMTfuK+PuSHXyxfu9hd0eRocF0jY+kW0IUPTpEkdojgdN7J5AY0/gFxxhDdlE5O3Nd7MotZVeey/nsoqCsimARgoMOvYIEIsOC6RwXSUr7SLrG2/eU+EjaR4axdX8x67MK+TGrkPWZheSWVADQNT6Sv1w9jJG9EhpJ0bExxpBTUsGW7GK2ZBezaV8xW/YXsXV/CZVu+8TPIIFeidG0jwqjvMrjvLxUuG0QcHsNYSFBhAUHERocRGiI1HyODg8hKswGmOjwECLDggkLDiK3pIK9BWXsc4o2/Ll7rRYSJESHhxATHsL+onIiQ4O5e3w/rj+zB6HBR1Znuj1eZq/M4LkFW8mrdYGuFh4SRGJMOIkxYSTGhNPBeU+MCSfPVcHy7XlsyCzAayAqLJgzenfglOQYdua62La/mN35pfhzSYqLDKVvcgynOK8+yTGckhRD57gIQupIe83fqLiCtH1FpO0rYtO+YpZty6GgtIrz+idzz/h+DE6Jq3PbjPxSPlydyefr9xIREsyAzrEM6GxzKv07taNDTDjbDxTzt0XbmbtuL2EhQcw4vQe/GNOb5NiIxr9QPemtvunZkWNfrgoPBoPzD2MMBujRIZrfjO93VMcRkdXGmNRG19Ng0TZ4vIZ8V6Vfd4b+Wr07n78v3sHXmw8QHRbMtWf2YNKQLhwoLicjv5TMg2VkHCwlI9/eCZVV2bugvskxnNG7A2f07sCIHvEUllWxI6fksB99eo6LUp+7prCQIHokRNErMZqE6DC8xuDxgsfrxWNsUYWr0s3egjKyDpYddsdVLUjglOQYBqfEMTQljqR2ETz95WYyDpZy+zl9uPv8foSF+Nemo7zKQ9q+ItbuKWBtRgG781yEhwQTERZMVGgwkWE2NxAcBOk5LrZkFx92AU1qF07/Tu3o36kdP+lkLyinJMcQERp8jH+V+lV5vOwvKmdvQTluj5eQ4CBCgoXQIOc9WAgPCSYmPISo8GDCQw6lZUdOCY99nsbSrTn0TY7h0cmDGHVKYs3y/2zN4Y9fpLF1fwkjeyXw0MUD6BofRU5xBbklFeQUO6+SCnKd97ySSnJLKshzVeLxGoKDhNO6tWfUKYmM7pvIqV3bH/H3KKv0sCOnhK37i9mdV0q7iBDaR4WREB1q36PCiI8KIzay8RyMP0oq3Ly5YhevLE2nsKyKCwZ25O7z+zGwSyzlVR6+2pjNnFUZfLM9DxE4q08HgoOC2LyviAPFFTX7qQ6GESHBXH9mD245u3ez/l8MJA0WJwFXhZtl23JYkHaAxVsOkO+qpFdiNGN/ksS4nyRzeu+Ewy4I1bzeQ3f1xeVu3E5RRaXHeXd7mP9jNt/vzCc+KpSbRvXihjN7EhdVfxlvlcfLj1mFfJeez3fpeazalV/nBT2lfSR9kmPonRhNn6RoeiXG0DMxis5xkQT7WW5tjKGwrIrMg2VkFZRx0FVJn+QYBnaOJbpWkVNJhZs/fJ7G+6syGJwSy/PXDOOU5HZH7HNvQRkrd+WzevdB1mYUsGlfEVUe+3+jU2wEfTvGUOXxUlblpazSTVmVh7JKW2TUs0MU/TvF8pOa4GDvNE80xhgWbjrAH/6Vxp78UiYO7sS1Z/Tg1WXpLNmSQ48OUfzPxAFcOKhjky7UXq/9e4WFBB3x9zleFJVXMXP5Ll5bnk5xuZsze3dg495CisrddI2P5OrUblwxoisp7SNrtskrqWBzdjGb9tnixc5xEdx4Vs8T7m+vwaKN2ldYxsK0/SzYdIDvduRR6fESFxnK2J8k0b9TLN/vzOPbHXlUuL1EhQVzVp9EzurToebuPj3HdVguoD6dYiP4+ZjeTBvZjaiwpv8Hrw4eazMK6BATTp+kaHonxhAZFrg764Z8tTGbBz9aT2mlh99dNIAx/ZJYuTOf73fm899deWTklwEQHRbMkK5xDOsWz7Bu9r1T3NEVI5yoyqs8vLo0nZeWbKe8yku7iBDuOrcv15/Vo86bj7aksLSK15en8/EPWaT2iOfq1G6c0bvDMVXAH+80WLQhu/NczP8xmy9/zGZtRgEAPTtEcf6Ajpw/sCOpPeIPK6ctq/TwbXouizfnsGjzAbIKyhCx5fd9kmLonRhD76RoeidFExcZSlhwkC2yCBJCnaKL9pGh9Zb9nqgOFJVz/0frWbIlp2ZeQnQYI3sm8NNeCZzeK4EBnWP9zuG0dVkFZSzdmsOFgzqREB3W2slRAaLB4gTm9Ro2ZxezIG0/X27MZtO+IgCGpMQxYXAnLhzUkT5JMX4VBVRXJMdHhQW0vPxEYYzh8/X7KCqr4ozeCX6fR6XaKn+DxfFZgHgSyioo45ttuSzfnsuKHbnkllQiAqk94nno4gFMGNyJrvFRTd6viNA5LrLxFU8SIsLkU7u0djKUOuEENFiIyATgBSAYeM0Y81St5d2BN4H2zjoPGmPmiUhPYBOwxVn1O2PMbYFMa0uodHvJLixnb2EZewvsa09+KSt3HWRnrguwrWjO7pvEqFMSGdM38aib3SmlVHMKWLAQkWDgJWA8kAmsFJG5xpg0n9UeAuYYY14WkYHAPKCns2yHMWZYoNLXkham7efRzzeSVVB2RBvyDtFhnNqtPdee0YOz+ybSN1mLRZRSx59A5ixGAtuNMekAIjIbmAL4BgsDxDqf44C9AUxPi/N6DS8u2sbzC7cxsHMsV57XlS7tbWeyLu0j6RwXofUISqkTQiCDRQqQ4TOdCZxea51HgX+LyJ1ANHC+z7JeIvIDUAQ8ZIxZVvsAInIrcCtA9+7dmy/lzaCovIrfvL+OhZv2c8XwrvzxssEaGJRSJ6zWbhs5DfinMaYrcBHwtogEAfuA7saY04DfAO+JSGztjY0xrxhjUo0xqUlJSS2a8IZsP1DCpS99w5ItB3hs8iCeuWqoBgql1AktkDmLLKCbz3RXZ56vnwETAIwx34pIBJBojDkAVDjzV4vIDqAfcNy3jf1qYzb3zllHeEgQ795yOqf37tDaSVJKqWMWyJzFSqCviPQSkTBgKjC31jp7gPMARGQAEAHkiEiSU0GOiPQG+gLpAUxrs/jX+r384u3V9E6K5vM7R2ugUEq1GQHLWRhj3CJyB/AVtlnsG8aYjSLyOLDKGDMXuBd4VUTuwVZ232iMMSIyBnhcRKoAL3CbMSY/UGltDh6v4dl/b2VA51jm/OJMLXZSSrUpAe1nYYyZh20O6zvvYZ/PacCoOrb7CPgokGlrbvM27GNnrou/zxiugUIp1eZoD+5mYIzhpcXb6Z0UzYWDOrV2cpQ6eRgDBbth/0b7Ml6I7eK8Uux7eKx9UpA6JhosmsGizQfYnF3MM1edqoPQqeZjDOz/EXK3wk8ugtCTfNgWjxtyNkHWasjeANk/2gBRWeysUP1/r1bP17AY6HEWXPYPiArsQ7AAqCqDor2HXsYLKcOhQ18Iau0GqEdPg8UxMsbwt8XbSWkfyZRhOuaQOkbGwN4fIO0z2DQX8p12HbEpMO53cOo0CKqnmNMY2LUMVr8JfcfDqVNbLt2BULwfMlc6r1X2vFTZYXEIj4OOg+x37DQYOg6G5AEQFAol2c6FOsu+H9wNa96EmRPh2o8hLqV50+muhMVPwPZF9phl9VSvhsdB1xGQkgpdfwrJ/aE0zyewOOkt2Q+hURAZb19RCc57B+h+JsQkN2/6/aSjzh6jFTtymf7q9/xhyiCuO7Nnix9ftSKvB7b9G3qeDeExR7ePqnI4uBPydsCebyFtLhTuAQmGXmNg4BSI6wqLn4S9ayB5IJz/mA0G1UUrVeWw4QP4/v9sTiQoBLxuOO9hGP2bE68I5uBuWPQEbJhjp4NCodMQe4Ht+lN7wY3v1bTvtXMpzJoOEXFw3ceQ9JPmSWvxfphzPWR8B33OtenyLQKL7WJ/J1mrDwW9A05xWW1BIdCuiw0G7nIoOwil+eAuO7ROcDicNgPOugsSejXLV9AhylvIjNe+Y+v+EpbdP04rtk8237wACx62F4aJT0P/SQ1fwDxVsPFTGxTyd9gAUZhJTbFJUKi94AycbIudfItMjIG0T+Hrx21uo+fZcPZvYPcKWPWGvUNNHgRn3AYDL4UvfmMDyOm3wYX/27rFH1Vl8Okv4cAmGHIFDL0G2tcx4kJpPix9Bla+ChIEI2+FAZdAp6EQ2gwDau5bB+9cCd4qmP4BdPvpse0vYyXMuQ7KC2HK32DwFf5tV1EC+9ZC7jYbGKqDS1Ri3X+nqjIbOIr32Vzjuln2ZmDwFTDqbpuzOgYaLFrAD3sOctnfV/C7i/pz65g+LXpsFQBeD3gq/asbyNsBL59l73TLDto7+n4TYOKfIL7H4etWlsIP78CKv9pcQ0QcJPSBDqdAhz7O596Q2A/Cj3zk62HclbD6n/Cfp6E0FxAbWM64zQaQ6mDl9cK//x9893d7Ubn0ZQhp4uM+ywshfQnEdLLpjOrQ9FxKWQHMmmYDZJdhtigJoMcoGzQGTrHp+u5lWP68rX8YNh3G/q75i4vABtq3L4fibLj6Leh3wdHtZ/WbMO+30K4zTH3X5nxaStE++O4lWDUTKkug74Uw+h7oceZR7U6DRQu45c1VrNyVzzcPnkvMcfpsYeUnTxW8c7kNArcstHd79TEG3poMe9fCr76H6GRbBLT4SVu8cM79cOYdUFVq75K/+z97Ye92ui0W6nvBsd/plxfBlvn27jihd/3p/OYFWPgI9B4L17zTeDACqCi232fFX23AqBYeZ4NaQh9I7AtDrrJBpD5F++w5zd0Gl78Cgy+3RUwb5sC69yFvmy1WiYgFV4696J3/KHQc6P95OBolB+DdK20F+aTnYNgMCPbz/6+7AuY/AKtn2lzgFa+3TKV5XUrzYeVrNtDGdYVfLD2qIkcNFgG2ObuICc8v4+7z+3L3+f1a7LgqQL78nb1bCw63FaU3zYeweh429cM78Nmv7IUm9eZD8wsz4csHYdPn9gJekmPvlE8Zb4uMepzVMt/liPS+C3PvtHe/02bVHwgrSmxw++ZFW0nbbyKc+UtbDJK341DRWf4OKMiwRUWnToUx9x1Zfp67zd7Bl+XbO+/eYw9fboytg1n3PhTsgbPugJ6jA/Ht61ZeBO9fCzv/AxHt4ZTzbBA/5XyITjx83aK9tq4hcyVsXwgH0uyd/Lm/r7+xQUuqdNnAnHjKUW2uwSLA7pr1A19v2s83D55L+yh9PnHAlRdB3nZbZBESYYuKQiMhJNLOO5ZK3A0fwkc/s+X7vcfBrKm23uDKfx6ZAyjeDy+NtBXNN35Rdw5hy5ew5H/tXffoe1q2iKI+W7+COTfYytKoRJ/iL+dVkGFzIaW59qI59kFIGVH//koO2PVXvmbLz4dNh7N/a4vgMlfbO/egYJjxoS1+Oh65K2HLF7BtgW2o4MoBxH7vnqPg4C4bJIqcIe2Cw2z9yai7bPFZG6HBIoB25bo499kl/Pzs3vzPRQNa5JgnHWNs/4Jt/7av3d/aism6SDDEdDyyM1Zcir3wNVT0sn8jvHY+dD4VbvgcgkNt8cu/H4JzHrDNVX19cCNs/gJuX2GLYk4kBzbZoJG/A/LS7XvxvkPL+5xr6wqaUvFbnA3Ln7OV7MbAoMtg879sxe21HzdcTHU88XptpXN14MhabSvha1pg/dRWJDe13ucEoMEigB757Edmrcxg+QPjSG53Ej32NHc7FGZAn3GB2b8xtnXPxk9g21e2eALsXXzfC+x/WK/bNiusKrMvd5ktPinOhmKnvXph1qH2+LFd4ZIXoO/5Rx6vrABeGWv384v/QLtOh9Ix9w5b3HTF6zDkSjt/8zyYPQ3OfcgWvbQFlS6nL4ccW6uawixY/hdb8ZvcH2Z8BO06NlsyW5ynyt44nAT8DRZaK3sU1mUWMqJ7/MkTKIqzbbHKmrfBeGyLn9N/0Xz7ryqHHz+yFXX7N9gOSb3OsUU4p4yH9t0a34cvY6CiyLa8mXc/vHuF7cx24ZOHKiO9XvjkFzb43TjvUKAAW6R18XOQv9M2+YzvaXMRX9xrm6ee9etm++qtLiy6eYrJ4lLg4mdtziQ85sS/Az9JAkVTaLBoImMM6TklXHLqSdBbu6LYVnZ++zfbpPSnP7N3kPPvt+XRP73l2PZfnA0rX3f6CeRC0gCbCxhydf2Vy/4Qsc1Te4+F25bBf/5ki0q2fw0XP2PLm5f+GbZ+CRc9A91rP8ARCAmDq9+GV8fB7Om2crp4n21RFKJ1VPWK1mH52yoNFk2U76qkqNxNr8To1k6K/9wV4Mq1/QHK8g/1DC07aCuLfYcUiEywF9q0T2HJU/YiPugy2/KjQx9bKTjnenuXHRQCI25sWlo8bti5BNbOskNaeN3Q70I443abm2ju3sYh4XDe722A+OxXNu29xsDOZTB0asMBL7oDTH8fXhtvi8bO+KXtPazUSUiDRROl59qy8D5JRzm8Q0v74V3bm9dd3vRte4yG8Y8ffoEMCYOr37TNDj//ta1cHn5dw/sxBrLX22aSP35ox76JiLM5lZG3tkwlaOeh8PNFsOJFWPK0LZ+f9FzjwSl5AEx9x57Hcf8v8OlU6jilwaKJdubYYNE7qZVyFhXFtmK2sXJ8rwcWPmovjj3Ptr14fXMP1YOUeSqcXEbB4bmOxL62dUxdF9OQcFtEM3u6bb8fFALDph2+TlWZHV5h9ze2aeqBNDucRb8Lbc/dfhe2fLl2cCicfa/thBUW7X9RV++xR/YTUOoko8GiiXbklhAaLKS0b4Xhovd8Bx/dYtt9n3atrUyM7XzkehXFdr2tX9pilglPNVBh54xu2VShEbaz1ayp8OntdtiB8NhDo4Tu/9EWMQF0HWkrPwdd3nq9XX2102eOKNVUGiyaKD3HRY8O0YQEt+DAbF6PHWDtP0/Ztt8jbrQtk9Z/AGf+Ckb92g6ZAHY4hVlTIWeLrbwd+fPApSs0EqbOgveutuPkgH12QMpwm6bet14HAAAgAElEQVSuP7XDMcckBS4NSqkWEdBgISITgBewz+B+zRjzVK3l3YE3gfbOOg86j2JFRP4H+BngAe4yxnwVyLT6a2eui94tWbldkAEf3wp7VthWQhc/awPDWXfZYZyXPWPHqRlzvx1T54ObbOe1az+0xUiBFhYF0+fYAefie9qhn4+HIRCUUs0qYMFCRIKBl4DxQCawUkTmOs/drvYQMMcY87KIDMQ+r7un83kqMAjoAiwUkX7GGE+g0usPt8fL7jwX5w1opoePuCvhm+ftgHPVQy8k9LG9X0Vsa6G5d9qcxWX/OPxhNgm94MrXbc5i4SPw5QPO/D62BU9L9i4Oi4L+F7Xc8ZRSLS6QOYuRwHZjTDqAiMwGpgC+wcIATvkJccBe5/MUYLYxpgLYKSLbnf19G8D0NirzYBlVHkOfxGZoCeX1wCe32iaZ1Q+rqRbWznZyytkMnYfBlW/U32IoZThcP9f2IUhfbCtwj4d6AaVUmxLIYJECZPhMZwK1ez89CvxbRO4EooHqMRlSgO9qbRuAwe2bZmduM7WE8nph7l02UFzwRzuAXeGeQ+P1VI/sOWCyHVaisU5gInY4i7qGtFBKqWbQ2hXc04B/GmOeFZEzgbdFxO8BakTkVuBWgO7d63jyVjPbkVMCcGwd8oyxw1ivfQfG/o8dmhnskNYJvTkUL5VS6vgRyCY9WYBvZ4CuzjxfPwPmABhjvgUigEQ/t8UY84oxJtUYk5qUFPgWN+m5LuIiQ0mIPobhHr5+HP77D/twnHMeaL7EKaVUAAUyWKwE+opILxEJw1ZYz621zh7gPAARGYANFjnOelNFJFxEegF9gf8GMK1+2ZnjondSNHK0Q1IsfcaOzDniJrjgieYf2kIppQIkYMVQxhi3iNwBfIVtFvuGMWajiDwOrDLGzAXuBV4VkXuwld03Gjtm+kYRmYOtDHcDv2rtllAA6bkljDolsfEV6/Ld/8GiPzjNX/+igUIpdUIJaJ2F02diXq15D/t8TgNG1bPtH4E/BjJ9TeGqcLO/qML/MaGK9trB6nYtte8Fu6H/JLj05WN//rJSSrWw1q7gPmHUtIRqqHK7IAOWPQs7l9rWTGAHzOsxGs66E4Zf7/+D4ZVS6jiiVy4/1bSEqq/ZbGWpHWYjbwf0PgdSb7YPoO80RHs0K6VOeBos/LQz14UI9OxQT7CYd599nvOMD7W/g1KqzdHCcz+l57joEhdJRGgduYQ1b9t+E2Pu00ChlGqTNFj4KT23pO6e2/vW2xFXe4+FsQ+2dLKUUqpFaLDwgzGGnTmuI1tClRfax3RGJsAVr2vdhFKqzdI6Cz8cKK7AVek5fJgPY+DTX0JhBtz4BUQfZf8LpZQ6AWiw8EN1S6jDiqG+fQk2/wsufBK6n9FKKVNKqZahxVB+ODTarFMMtec7WPAwDLgEzvhlK6ZMKaVahgYLP6TnuIgIDaJzbISd8e+H7PMmprykw3YopU4KGiz8kJ5TQs8O0QQFCRzYBJkr7TMoIuJaO2lKKdUiNFj4YWeuT0uoNW9DUCgMndrwRkop1YZosGhEpdtLxsEy2xLKXQHrZtnnTUd3aO2kKaVUi9Fg0Yg9+S48XmNbQm2ZB2X5dkBApZQ6iWiwaER6jk9LqDVvQWxX6D2ulVOllFItS4NFI9Krm82G5sOOxXDaDO2prZQ66WiwaER6TgmJMWHEbp5jZwyb0boJUkqpVqDBohE7c1306RABP7xjBwuM79HaSVJKqRanwaIR6TkuxkdssWNAacW2UuokFdBgISITRGSLiGwXkSPG7xaR50RkrfPaKiIFPss8PsvmBjKd9SksrSLPVcm40i/tyLL9L26NZCilVKsL2ECCIhIMvASMBzKBlSIy1xiTVr2OMeYen/XvBE7z2UWZMWZYoNLnj/TcEuIpolfuEhh5C4SEt2ZylFKq1QQyZzES2G6MSTfGVAKzgSkNrD8NmBXA9DRZeo6Ly4OXE2SqYPh1rZ0cpZRqNYEMFilAhs90pjPvCCLSA+gFLPKZHSEiq0TkOxG5tJ7tbnXWWZWTk9Nc6a6xM6eEa4KX4O0yAjoOavb9K6XUieJ4qeCeCnxojPH4zOthjEkFpgPPi0if2hsZY14xxqQaY1KTkpKaPVHerFX0C8okSHMVSqmTXCCDRRbQzWe6qzOvLlOpVQRljMly3tOBJRxen9Eihuz/jAqJgMFXtPShlVLquBLIYLES6CsivUQkDBsQjmjVJCL9gXjgW5958SIS7nxOBEYBabW3DbQRlavYHDsKImJb+tBKKXVcCVhrKGOMW0TuAL4CgoE3jDEbReRxYJUxpjpwTAVmG2OMz+YDgH+IiBcb0J7ybUXVItwVJJPPpuheLXpYpZQ6HgX0GdzGmHnAvFrzHq41/Wgd260AhgQybY3xFmQSBJRFdWnNZCil1HHheKngPu5U5O0CoCqmzgZcSil1UtFgUY+qvN0AuGO7NbKmUkq1fRos6uE5uAevESSuc2snRSmlWp0Gi/oUZLCfeCIjolo7JUop1eo0WNQjuCiTLJNITHhA2wAopdQJQYNFPUJLbLCICtOn4imlVKPBQkTuFJH4lkjMccPrIbw0myyTSLTmLJRSyq+cRUfs8OJznOdTSKAT1eqKswkybg0WSinlaDRYGGMeAvoCrwM3AttE5Mm6BvZrMwrtYLlZJpFoLYZSSin/6iycoTiynZcbO5bThyLypwCmrfUU2GCRaRKJCtOchVJKNXolFJFfA9cDucBrwH3GmCoRCQK2AfcHNomtoHAPADlByYSFaBsApZTy57Y5AbjcGLPbd6YxxisikwKTrFZWmIkrOA4Jim7tlCil1HHBn9vm+UB+9YSIxIrI6QDGmE2BSlirKsggP7Qj0VoEpZRSgH/B4mWgxGe6xJnXdhVmkBucTHS4Vm4rpRT4FyzE91kTxhgvAR7avFUZY4f6kCRtNquUUg5/gkW6iNwlIqHO69dAeqAT1mrKDkKViyyStBhKKaUc/gSL24CzsM/PzgROB24NZKJaVYFtCZXpTdRiKKWUcjR662yMOYB99OnJwemQt9udQHvNWSilFOBfP4sI4GfAICCier4x5uYApqv1OB3ydlbFM0pzFkopBfhXDPU20Am4EPgP0BUo9mfnzlhSW0Rku4g8WMfy50RkrfPaKiIFPstuEJFtzusG/75OMyjMgNAosiqjtIJbKaUc/lwNTzHGXCUiU4wxb4rIe8CyxjYSkWDgJWA8tq5jpYjMNcakVa9jjLnHZ/07gdOczwnAI0AqYIDVzrYHm/Ddjk7BHkxcNyqLjVZwK6WUw5+cRZXzXiAig4E4INmP7UYC240x6caYSmA2MKWB9acBs5zPFwILjDH5ToBYAEzw45jHrjADd7uuAJqzUEophz/B4hXneRYPAXOBNOBpP7ZLATJ8pjOdeUcQkR5AL2BRU7YVkVtFZJWIrMrJyfEjSX4oyKAyxh5KR5xVSimrwVtnZ7DAIufufinQO0DpmAp8aIzxNGUjY8wrwCsAqampppHVG1fpgrJ8yqK6AJqzUEqpag3mLJze2kc7qmwW0M1nuqszry5TOVQE1dRtm4/TEqokojOA9rNQSimHP8VQC0XktyLSTUQSql9+bLcS6CsivUQkDBsQ5tZeSUT6Y5+P8a3P7K+AC0Qk3ikCu8CZF1hOH4ui8E4A+iwLpZRy+HM1vMZ5/5XPPEMjRVLGGLeI3IG9yAcDbxhjNorI48AqY0x14JgKzK41/lS+iPwBG3AAHjfG5BNoTu/t/NBOQBYxWgyllFKAfz24ex3tzo0x84B5teY9XGv60Xq2fQN442iPfVQKMyAohIPB8UAWUVrBrZRSgH89uK+va74x5q3mT04rK8iA2BRclXZScxZKKWX5czX8qc/nCOA8YA3Q9oJFYQa0746rwg1AlAYLpZQC/CuGutN3WkTaYzvYtT0FGdB7LK5K24I3KlSLoZRSCvxrDVWbC9uBrm1xV0LxPmjfDVeFm6iwYIKCpLVTpZRSxwV/6iw+x7Z+AhtcBgJzApmoVlGUBRiI60bpQbc2m1VKKR/+XBGf8fnsBnYbYzIDlJ7W4/SxsDkLDzHaIU8ppWr4Eyz2APuMMeUAIhIpIj2NMbsCmrKW5vTeJq4brop8zVkopZQPf+osPgC8PtMeZ17bUp2ziOuKq9KtzWaVUsqHP8EixBliHADnc1jgktRKCjIgphOEhOOq8BClxVBKKVXDn2CRIyKTqydEZAqQG7gktZLCPdDejl3oqnTrg4+UUsqHP1fE24B3ReRvznQmUGev7hNaQQZ0GQaAq8KtI84qpZQPfzrl7QDOEJEYZ7ok4KlqaV6vbTo74BIASis8WsGtlFI+Gi2GEpEnRaS9MabEGFPiDBv+REskrsWU7AdPJbTvjjFGK7iVUqoWf+osJhpjCqonnKfmXRS4JLWCwkPNZsurvHgNWsGtlFI+/AkWwSISXj0hIpFAeAPrn3ic51jQvhuuSjuIoOYslFLqEH+uiO8CX4vITECAG4E3A5moFueTs3C5nBFntc5CKaVq+FPB/bSIrAPOx44R9RXQI9AJa1EFGRARBxGxuPKLAIjWBx8ppVQNf0ed3Y8NFFcB5wKbApai1lCYAXHdASh1iqGitRhKKaVq1BssRKSfiDwiIpuBv2LHiBJjzDhjzN/q267WPiaIyBYR2S4iD9azztUikiYiG0XkPZ/5HhFZ67zm1rVtsynIqOmQV1JRHSw0Z6GUUtUaun3eDCwDJhljtgOIyD3+7lhEgoGXgPHYjnwrRWSuMSbNZ52+wP8Ao4wxB0Uk2WcXZcaYYf5/laNkjM1Z9BoDQKnz4CPNWSil1CENFUNdDuwDFovIqyJyHraC218jge3GmHRnPKnZwJRa6/wceMlpjosx5kAT9t88ygugsuTInIVWcCulVI16g4Ux5lNjzFSgP7AYuBtIFpGXReQCP/adAmT4TGc683z1A/qJyDci8p2ITPBZFiEiq5z5l/r1bY6GBMOF/wu9zgGgtELrLJRSqjZ/WkO5gPeA90QkHlvJ/QDw72Y6fl9gLNAVWCoiQ5xOgD2MMVki0htYJCIbnKFHaojIrcCtAN27dz+6FETEwpm/rJmsef62toZSSqkaTXoGtzHmoDHmFWPMeX6sngV085nu6szzlQnMNcZUGWN2AluxwQNjTJbzng4sAU6rIz2vGGNSjTGpSUlJTfkq9XJVuAkOEsJDjubx5Eop1TYF8oq4EugrIr1EJAyYCtRu1fQpNleBiCRii6XSnfGnwn3mjwLSaAGllR6iw4IRaUr1jFJKtW0BK5g3xrhF5A5sJ75g4A1jzEYReRxYZYyZ6yy7QETSsE/gu88YkyciZwH/EBEvNqA95duKKpBKKtxaX6GUUrUE9KpojJkHzKs172Gfzwb4jfPyXWcFMCSQaatPaaUGC6WUqk0L5mspqfDoUB9KKVWLBotaSrUYSimljqDBopaSCreOOKuUUrVosKiltNKj40IppVQtGixq0QpupZQ6kgaLWkoq3FrBrZRStWiw8OHxGsqrvJqzUEqpWjRY+Kh+/raOOKuUUofTYOGjtEKfZaGUUnXRYOGjJmehraGUUuowGix8uJxnWWg/C6WUOpwGCx+ummIozVkopZQvDRY+XPpIVaWUqpMGCx+H6iw0WCillC8NFj60GEoppeqmwcJHaaVWcCulVF00WPioyVnocB9KKXUYDRY+XJVuwkOCCAnW06KUUr70qujDVeEmRiu3lVLqCAENFiIyQUS2iMh2EXmwnnWuFpE0EdkoIu/5zL9BRLY5rxsCmc5qrgo3UVq5rZRSRwjYbbSIBAMvAeOBTGCliMw1xqT5rNMX+B9glDHmoIgkO/MTgEeAVMAAq51tDwYqvQCuSo/2sVBKqToEMmcxEthujEk3xlQCs4Eptdb5OfBSdRAwxhxw5l8ILDDG5DvLFgATAphWwOYstI+FUkodKZDBIgXI8JnOdOb56gf0E5FvROQ7EZnQhG0RkVtFZJWIrMrJyTnmBLsqPURpSyillDpCa1dwhwB9gbHANOBVEWnv78bGmFeMManGmNSkpKRjTkypVnArpVSdAhkssoBuPtNdnXm+MoG5xpgqY8xOYCs2ePizbbNzVbi1Q55SStUhkMFiJdBXRHqJSBgwFZhba51PsbkKRCQRWyyVDnwFXCAi8SISD1zgzAsoV6WHGG0NpZRSRwjYbbQxxi0id2Av8sHAG8aYjSLyOLDKGDOXQ0EhDfAA9xlj8gBE5A/YgAPwuDEmP1BpddLrNJ3VnIVSStUW0CujMWYeMK/WvId9PhvgN86r9rZvAG8EMn2+Kj1e3F6jdRZKKVWH1q7gPm5UP39bW0MppdSRNFg4SvTBR0opVS8NFo7SyupnWWiwUEqp2jRYOKpzFjo2lFJKHUmDhaP6wUdawa2UUkfSYOFwVecstIJbKaWOoMHCUf2UPM1ZKKXUkTRYOPT520opVT8NFo6S6udvawW3UkodQYOFo7TSjQhEhmqwUEqp2jRYOEoq3ESHhSAirZ0UpZQ67miwcJRWeLQISiml6qHBwlFS6dahPpRSqh4aLBylFW7tva2UUvXQW2mHq9KjOQvVJlRVVZGZmUl5eXlrJ0UdRyIiIujatSuhoaFHtb1eHR2uCjcdYyNaOxlKHbPMzEzatWtHz549tcGGAuzD3fLy8sjMzKRXr15HtQ8thnKUVnp0xFnVJpSXl9OhQwcNFKqGiNChQ4djym1qsHDYprNaZ6HaBg0UqrZj/U1osHCUVrg1Z6GUUvUIaLAQkQkiskVEtovIg3Usv1FEckRkrfO6xWeZx2f+3ECm0+s1lFZ5NGehVDPIy8tj2LBhDBs2jE6dOpGSklIzXVlZ6dc+brrpJrZs2dLgOi+99BLvvvtucyRZ+SFgt9IiEgy8BIwHMoGVIjLXGJNWa9X3jTF31LGLMmPMsECl77ADVXkwBqI0Z6HUMevQoQNr164F4NFHHyUmJobf/va3h61jjMEYQ1BQ3ferM2fObPQ4v/rVr449sS3M7XYTEnJiXmcCmeqRwHZjTDqAiMwGpgC1g0WrczkjzmoxlGprHvt8I2l7i5p1nwO7xPLIJYOavN327duZPHkyp512Gj/88AMLFizgscceY82aNZSVlXHNNdfw8MMPAzB69Gj+9re/MXjwYBITE7ntttuYP38+UVFRfPbZZyQnJ/PQQw+RmJjI3XffzejRoxk9ejSLFi2isLCQmTNnctZZZ+Fyubj++uvZtGkTAwcOZNeuXbz22msMG3b4fegjjzzCvHnzKCsrY/To0bz88suICFu3buW2224jLy+P4OBgPv74Y3r27MmTTz7JrFmzCAoKYtKkSfzxj3+sSfOwYcPIzs5m9OjRbN++nddee41//etfFBYWEhQUxCeffMKll15KQUEBbrebJ598kkmTJgE2SD733HOICMOHD+f555/ntNNOY+vWrYSEhHDw4EFGjBhRM92SAlkMlQJk+ExnOvNqu0JE1ovIhyLSzWd+hIisEpHvROTSug4gIrc666zKyck56oRWP8tCi6GUCqzNmzdzzz33kJaWRkpKCk899RSrVq1i3bp1LFiwgLS0I+8lCwsLOeecc1i3bh1nnnkmb7zxRp37Nsbw3//+lz//+c88/vjjAPz1r3+lU6dOpKWl8fvf/54ffvihzm1//etfs3LlSjZs2EBhYSFffvklANOmTeOee+5h3bp1rFixguTkZD7//HPmz5/Pf//7X9atW8e9997b6Pf+4Ycf+Pjjj/n666+JjIzk008/Zc2aNSxcuJB77rkHgHXr1vH000+zZMkS1q1bx7PPPktcXByjRo2qSc+sWbO46qqrWiV30tq30p8Ds4wxFSLyC+BN4FxnWQ9jTJaI9AYWicgGY8wO342NMa8ArwCkpqaao01E9VPyNGeh2pqjyQEEUp8+fUhNTa2ZnjVrFq+//jput5u9e/eSlpbGwIEDD9smMjKSiRMnAjBixAiWLVtW574vv/zymnV27doFwPLly3nggQcAOPXUUxk0qO7z8fXXX/PnP/+Z8vJycnNzGTFiBGeccQa5ublccsklgO3UBrBw4UJuvvlmIiMjAUhISGj0e19wwQXEx8cDNqg9+OCDLF++nKCgIDIyMsjNzWXRokVcc801Nfurfr/lllt48cUXmTRpEjNnzuTtt99u9HiBEMicRRbgm1Po6syrYYzJM8ZUOJOvASN8lmU57+nAEuC0QCW0JlhoD26lAio6Orrm87Zt23jhhRdYtGgR69evZ8KECXX2AwgLC6v5HBwcjNvtrnPf4eHhja5Tl9LSUu644w4++eQT1q9fz80333xU/RFCQkLwer0AR2zv+73feustCgsLWbNmDWvXriUxMbHB451zzjls3bqVxYsXExoaSv/+/ZuctuYQyGCxEugrIr1EJAyYChzWqklEOvtMTgY2OfPjRSTc+ZwIjCKAdR2llfrgI6VaWlFREe3atSM2NpZ9+/bx1VdfNfsxRo0axZw5cwDYsGFDncVcZWVlBAUFkZiYSHFxMR999BEA8fHxJCUl8fnnnwM2AJSWljJ+/HjeeOMNysrKAMjPzwegZ8+erF69GoAPP/yw3jQVFhaSnJxMSEgICxYsICvL3kOfe+65vP/++zX7q34HuPbaa5kxYwY33XTTMZ2PYxGwYGGMcQN3AF9hg8AcY8xGEXlcRCY7q90lIhtFZB1wF3CjM38AsMqZvxh4qo5WVM1GK7iVannDhw9n4MCB9O/fn+uvv55Ro0Y1+zHuvPNOsrKyGDhwII899hgDBw4kLi7usHU6dOjADTfcwMCBA5k4cSKnn356zbJ3332XZ599lqFDhzJ69GhycnKYNGkSEyZMIDU1lWHDhvHcc88BcN999/HCCy8wfPhwDh48WG+arrvuOlasWMGQIUOYPXs2ffv2BWwx2f3338+YMWMYNmwY9913X802M2bMoLCwkGuuuaY5T0+TiDFHXdR/XElNTTWrVq06qm3fX7mHBz7awPIHxtE1PqqZU6ZUy9q0aRMDBgxo7WQcF9xuN263m4iICLZt28YFF1zAtm3bTrjmq7Nnz+arr77yq0lxQ+r6bYjIamNMaj2b1DixzliAVLeGitGchVJtSklJCeeddx5utxtjDP/4xz9OuEBx++23s3DhwpoWUa3lxDprAVJdwR2lFdxKtSnt27evqUc4Ub388sutnQRAx4YC7LMswoKDCAvR06GUUnXRqyNQWqlPyVNKqYZosKB6eHItglJKqfposABKKzzax0IppRqgwQLbz0Irt5VqHuPGjTuig93zzz/P7bff3uB2MTExAOzdu5crr7yyznXGjh1LY03kn3/+eUpLS2umL7roIgoKCvxJul+GDRvG1KlTm21/JwoNFtjWUNpsVqnmMW3aNGbPnn3YvNmzZzNt2jS/tu/SpUuDPaAbUztYzJs3j/bt2x/1/nxt2rQJj8fDsmXLcLlczbLPujRluJKWoldIbD+LxJjw1k6GUs1v/oOQvaF599lpCEx8qt7FV155JQ899BCVlZWEhYWxa9cu9u7dy9lnn01JSQlTpkzh4MGDVFVV8cQTTzBlypTDtt+1axeTJk3ixx9/pKysjJtuuol169bRv3//miE2wPY/WLlyJWVlZVx55ZU89thjvPjii+zdu5dx48aRmJjI4sWL6dmzJ6tWrSIxMZG//OUvNaPW3nLLLdx9993s2rWLiRMnMnr0aFasWEFKSgqfffZZzUCBvmbNmsV1113Hpk2b+Oyzz5g+fTpgh1+/7bbbyMnJITg4mA8++IA+ffrw9NNP88477xAUFMTEiRN56qmnGDt2LM888wypqank5uaSmprKrl27+Oc//8nHH39MSUkJHo+HL774ot5z9dZbb/HMM88gIgwdOpS///3vDB06lK1btxIaGkpRURGnnnpqzXRz0GCBLYbSnIVSzSMhIYGRI0cyf/58pkyZwuzZs7n66qsRESIiIvjkk0+IjY0lNzeXM844g8mTJ9f7fOiXX36ZqKgoNm3axPr16xk+fHjNsj/+8Y8kJCTg8Xg477zzWL9+PXfddRd/+ctfWLx4MYmJiYfta/Xq1cycOZPvv/8eYwynn34655xzDvHx8Wzbto1Zs2bx6quvcvXVV/PRRx9x7bXXHpGe999/nwULFrB582b++te/1gSLGTNm8OCDD3LZZZdRXl6O1+tl/vz5fPbZZ3z//fdERUUdNtZTfdasWcP69etJSEjA7XbXea7S0tJ44oknWLFiBYmJieTn59OuXTvGjh3LF198waWXXsrs2bO5/PLLmy1QgAYLwA4kqE1nVZvUQA4gkKqLoqqDxeuvvw7Y4bl/97vfsXTpUoKCgsjKymL//v106tSpzv0sXbqUu+66C4ChQ4cydOjQmmVz5szhlVdewe12s2/fPtLS0g5bXtvy5cu57LLLakaAvfzyy1m2bBmTJ0+mV69eNQ9E8h3i3Fd17qR79+6kpKRw8803k5+fT2hoKFlZWVx22WXA4UOZ33TTTURF2SGE/BnKfPz48TXr1XeuFi1axFVXXVUTDH2HMv/Tn/7EpZdeysyZM3n11VcbPV5TaJ0F2nRWqeY2ZcoUvv76a9asWUNpaSkjRtinD7z77rvk5OSwevVq1q5dS8eOHY9qOPCdO3fyzDPP8PXXX7N+/Xouvvjio9pPterhzaH+Ic5nzZrF5s2b6dmzJ3369KGoqKhmhNqm8Hco86aeq1GjRrFr1y6WLFmCx+Nh8ODBTU5bQ076YFHl8VLp9uqIs0o1o5iYGMaNG8fNN998WMV29fDcoaGhLF68mN27dze4nzFjxvDee+8B8OOPP7J+/XrADm8eHR1NXFwc+/fvZ/78+TXbtGvXjuLi4iP2dfbZZ/Ppp59SWlqKy+Xik08+4eyzz/br+3i9XubMmcOGDRvYtWsXu3bt4rPPPmPWrFm0a4duSbwAAAkFSURBVNeOrl278umnnwJQUVFRM5T5zJkzayrbj3Yo89rn6txzz+WDDz4gLy/vsP0CXH/99UyfPj0gQ5mf9MGi1BlEMEofqapUs5o2bRrr1q07LFjMmDGDVatWMWTIEN56661GH+Rz++23U1JSwoABA3j44Ydrciinnnoqp512Gv3792f69OmHDW9+6623MmHCBMaNG3fYvoYPH86NN97IyJEjOf3007nllls47TT/nqm2bNkyUlJS6NKlS828MWPGkJaWxr59+3j77bd58cUXGTp0KGeddRbZ2dlMmDCByZMn1wxl/swzzwDw29/+lv/f3r3H2FGWcRz//ixLFrxhYSWEbW25NKSNdFsqtqGmtKBUJbQqEqXGViVopCumUgWaaKwhxku8FSxBhDYGkWYVWkkb2bQlkKjQ7YVeoSCp2gbp7rZ4iVgp/fnHvKc9bnb37HbP6Skzzyc56cyceee8z3Z2n5l3zjyzdOlSJkyYQFdXV5+f2dfPaty4cSxatIhp06Yxfvx4FixY8H9tDh48OOBvng1G4UuU//3fr3H7I9u4btIIpo1pqkHPQjixokR5cbW1tbFy5co+H70aJcqH4O2nN3DX9RMrrxhCCCex1tZW1qxZw+rVq2uy/cInixBCyIMlS5bUdPuFv2YRQh7lZXg5VM9Q94maJgtJMyU9J+kFSbf28v48SZ2StqTXDWXvzZX0fHrNrWU/Q8iTxsZGuru7I2GEo2zT3d199B6Q41GzYShJw4C7gPcDe4ENklbZ3tlj1Ydsz+/RdjjwDWASYGBjatv3U9BDCAA0Nzezd+9eOjs7692VcBJpbGykubn5uNvX8prFpcALtl8EkPQrYBbQM1n05iqg3faB1LYdmAk8WKO+hpAbDQ0NjB49ut7dCDlTy2Goc4G/ls3vTct6+pikrZLaJI0YZNsQQggnQL0vcP8WGGX7YqAdWD6YxpJulNQhqSNOuUMIoXZqmSz2ASPK5pvTsqNsd9s+lGbvBS4ZaNvU/h7bk2xPamqKG+pCCKFWanYHt6RTgN3AFWR/6DcA19veUbbOObZfStMfAb5me3K6wL0RKN0ttwm4pHQNo4/P6wT6LzTTv7OAvu+9z6+Iu1gi7mIZSNzvsl3xaLtmF7htH5Y0H/gdMAy4z/YOSYuBDturgC9JugY4DBwA5qW2ByR9iyzBACzuL1GkNkM6tZDUMZBb3vMm4i6WiLtYqhl3bmpDDVXsTMUScRdLxD109b7AHUII4Q0gksUx99S7A3UScRdLxF0sVYs7hqFCCCFUFGcWIYQQKopkEUIIoaLCJ4tKlXHzRNJ9kvZL2l62bLik9lTdt13SO+rZx2qTNELSekk7Je2QdHNanve4GyU9LemZFPc30/LRkp5K+/tDkk6td19rQdIwSZslPZrmixL3HknbUhXvjrSsKvt6oZNFWWXcDwJjgU9KGlvfXtXUMrKCjOVuBdbavhBYm+bz5DDwFdtjgcnATen/OO9xHwJm2B4PtAAzJU0GvgP80PYFwEHgc3XsYy3dDOwqmy9K3ADTbbeUfWW2Kvt6oZMFZZVxbf8XKFXGzSXbT5Dd/FhuFsdqci0HZp/QTtWY7Zdsb0rT/yT7A3Iu+Y/btv+VZhvSy8AMoC0tz13cAJKagQ+TlRBCkihA3P2oyr5e9GQR1W3h7FLJFeBvwNn17EwtSRoFTACeogBxp6GYLcB+skKdfwJesX04rZLX/f1HwFeBI2n+TIoRN2QHBI9J2ijpxrSsKvt6PIM7HGXbknL5XWpJbwF+DXzZ9j+yg81MXuO2/TrQIukM4GHgojp3qeYkXQ3st71R0uX17k8dTLW9T9I7gXZJz5a/OZR9vehnFgOqbptzL0s6B7LCjmRHobkiqYEsUTxg+zdpce7jLrH9CrAemAKckYp8Qj7398uAayTtIRtWngH8mPzHDYDtfenf/WQHCJdSpX296MliA3Bh+qbEqcAngFV17tOJtgooPeN8LrCyjn2pujRe/XNgl+0flL2V97ib0hkFkk4je7zxLrKkcW1aLXdx277NdrPtUWS/z+tszyHncQNIerOkt5amgQ8A26nSvl74O7glfYhsjLNUGfeOOnepZiQ9CFxOVrb4ZbLnnD8CrABGkpV4v65Shd83EklTgSeBbRwbw76d7LpFnuO+mOxi5jCyg8IVthdLOo/siHs4sBn4VNkzZXIlDUPdYvvqIsSdYnw4zZ4C/NL2HZLOpAr7euGTRQghhMqKPgwVQghhACJZhBBCqCiSRQghhIoiWYQQQqgokkUIIYSKIlmEAEj6tqTpkmZLum2QbZtSRdPNkt7X473HU1XjLenV1td2jrPfeySdVc1thtCbSBYhZN4L/BGYBjwxyLZXANtsT7D9ZC/vz0lVQFtsX9vL+yGc9CJZhEKT9D1JW4H3AH8AbgCWSvp6L+uOkrRO0lZJayWNlNQCfBeYlc4cThvg5y6TdLekDkm7U02j0nMo7k/PJNgsaXpaPkzS9yVtT5/fWra5VkmbUpvc138K9RGFBEOh2V4oaQXwaWAB8Ljty/pYfQmw3PZySZ8FfmJ7dkosk2zP76PdA5JeTdPtthem6VFktXvOB9ZLugC4KeuW353+8D8maQzwmbR+i+3DkoaXbb/L9kRJXwRuIUt4IVRVJIsQYCLwDFlV1l39rDcF+Gia/gXZGcVAzLHd0cvyFbaPAM9LejF9/lSypITtZyX9GRgDXAncXSqz3aNcQ6k44say/oVQVZEsQmGlIaRlZFVIu4DTs8XaAkyx/Wo/zauhZ62d4629U6px9DrxOx1qJK5ZhMKyvcV2C7Cb7LG664Cr0oXo3hLF78kqmQLMIStQOBQfl/QmSecD5wHPpW3OAUjDTyPT8nbg86Uy2z2GoUKouTgKCYUmqQk4aPuIpIts7+xn9VbgfkkLgU6y6wgDUX7Nosv2lWn6L8DTwNuAL9j+j6Sfkl1g30b2/PB5tg9JupdsOGqrpNeAnwF3DibWEIYiqs6GUAeSlgGP2q7qfRch1EoMQ4UQQqgozixCCCFUFGcWIYQQKopkEUIIoaJIFiGEECqKZBFCCKGiSBYhhBAq+h8g2O4KBQPoDwAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "plt.plot(val.history['acc'])\n",
    "plt.plot(val.history['val_acc'])\n",
    "plt.legend(['Training accuracy' ,'Validation Accuracy'])\n",
    "plt.title(\"Train vs Validation ACC With ResNet\")\n",
    "plt.xlabel(\"# of Epoch\")\n",
    "plt.ylabel(\"Accuracy\")"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.5.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
