{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Using LittleVGG for Emotion Detection"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Training Emotion Detector"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Found 28273 images belonging to 6 classes.\n",
      "Found 3534 images belonging to 6 classes.\n"
     ]
    }
   ],
   "source": [
    "from __future__ import print_function\n",
    "import keras\n",
    "from keras.preprocessing.image import ImageDataGenerator\n",
    "from keras.models import Sequential\n",
    "from keras.layers import Dense, Dropout, Activation, Flatten, BatchNormalization\n",
    "from keras.layers import Conv2D, MaxPooling2D\n",
    "from keras.preprocessing.image import ImageDataGenerator\n",
    "import os\n",
    "\n",
    "num_classes = 6\n",
    "img_rows, img_cols = 48, 48\n",
    "batch_size = 16\n",
    "\n",
    "train_data_dir = './fer2013/train'\n",
    "validation_data_dir = './fer2013/validation'\n",
    "\n",
    "# Let's use some data augmentaiton \n",
    "train_datagen = ImageDataGenerator(\n",
    "      rescale=1./255,\n",
    "      rotation_range=30,\n",
    "      shear_range=0.3,\n",
    "      zoom_range=0.3,\n",
    "      width_shift_range=0.4,\n",
    "      height_shift_range=0.4,\n",
    "      horizontal_flip=True,\n",
    "      fill_mode='nearest')\n",
    " \n",
    "validation_datagen = ImageDataGenerator(rescale=1./255)\n",
    " \n",
    "train_generator = train_datagen.flow_from_directory(\n",
    "        train_data_dir,\n",
    "        color_mode = 'grayscale',\n",
    "        target_size=(img_rows, img_cols),\n",
    "        batch_size=batch_size,\n",
    "        class_mode='categorical',\n",
    "        shuffle=True)\n",
    " \n",
    "validation_generator = validation_datagen.flow_from_directory(\n",
    "        validation_data_dir,\n",
    "        color_mode = 'grayscale',\n",
    "        target_size=(img_rows, img_cols),\n",
    "        batch_size=batch_size,\n",
    "        class_mode='categorical',\n",
    "        shuffle=True)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Our Keras Imports"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Using TensorFlow backend.\n"
     ]
    }
   ],
   "source": [
    "from keras.models import Sequential\n",
    "from keras.layers.normalization import BatchNormalization\n",
    "from keras.layers.convolutional import Conv2D, MaxPooling2D\n",
    "from keras.layers.advanced_activations import ELU\n",
    "from keras.layers.core import Activation, Flatten, Dropout, Dense"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Keras LittleVGG Model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "conv2d_1 (Conv2D)            (None, 48, 48, 32)        320       \n",
      "_________________________________________________________________\n",
      "activation_1 (Activation)    (None, 48, 48, 32)        0         \n",
      "_________________________________________________________________\n",
      "batch_normalization_1 (Batch (None, 48, 48, 32)        128       \n",
      "_________________________________________________________________\n",
      "conv2d_2 (Conv2D)            (None, 48, 48, 32)        9248      \n",
      "_________________________________________________________________\n",
      "activation_2 (Activation)    (None, 48, 48, 32)        0         \n",
      "_________________________________________________________________\n",
      "batch_normalization_2 (Batch (None, 48, 48, 32)        128       \n",
      "_________________________________________________________________\n",
      "max_pooling2d_1 (MaxPooling2 (None, 24, 24, 32)        0         \n",
      "_________________________________________________________________\n",
      "dropout_1 (Dropout)          (None, 24, 24, 32)        0         \n",
      "_________________________________________________________________\n",
      "conv2d_3 (Conv2D)            (None, 24, 24, 64)        18496     \n",
      "_________________________________________________________________\n",
      "activation_3 (Activation)    (None, 24, 24, 64)        0         \n",
      "_________________________________________________________________\n",
      "batch_normalization_3 (Batch (None, 24, 24, 64)        256       \n",
      "_________________________________________________________________\n",
      "conv2d_4 (Conv2D)            (None, 24, 24, 64)        36928     \n",
      "_________________________________________________________________\n",
      "activation_4 (Activation)    (None, 24, 24, 64)        0         \n",
      "_________________________________________________________________\n",
      "batch_normalization_4 (Batch (None, 24, 24, 64)        256       \n",
      "_________________________________________________________________\n",
      "max_pooling2d_2 (MaxPooling2 (None, 12, 12, 64)        0         \n",
      "_________________________________________________________________\n",
      "dropout_2 (Dropout)          (None, 12, 12, 64)        0         \n",
      "_________________________________________________________________\n",
      "conv2d_5 (Conv2D)            (None, 12, 12, 128)       73856     \n",
      "_________________________________________________________________\n",
      "activation_5 (Activation)    (None, 12, 12, 128)       0         \n",
      "_________________________________________________________________\n",
      "batch_normalization_5 (Batch (None, 12, 12, 128)       512       \n",
      "_________________________________________________________________\n",
      "conv2d_6 (Conv2D)            (None, 12, 12, 128)       147584    \n",
      "_________________________________________________________________\n",
      "activation_6 (Activation)    (None, 12, 12, 128)       0         \n",
      "_________________________________________________________________\n",
      "batch_normalization_6 (Batch (None, 12, 12, 128)       512       \n",
      "_________________________________________________________________\n",
      "max_pooling2d_3 (MaxPooling2 (None, 6, 6, 128)         0         \n",
      "_________________________________________________________________\n",
      "dropout_3 (Dropout)          (None, 6, 6, 128)         0         \n",
      "_________________________________________________________________\n",
      "conv2d_7 (Conv2D)            (None, 6, 6, 256)         295168    \n",
      "_________________________________________________________________\n",
      "activation_7 (Activation)    (None, 6, 6, 256)         0         \n",
      "_________________________________________________________________\n",
      "batch_normalization_7 (Batch (None, 6, 6, 256)         1024      \n",
      "_________________________________________________________________\n",
      "conv2d_8 (Conv2D)            (None, 6, 6, 256)         590080    \n",
      "_________________________________________________________________\n",
      "activation_8 (Activation)    (None, 6, 6, 256)         0         \n",
      "_________________________________________________________________\n",
      "batch_normalization_8 (Batch (None, 6, 6, 256)         1024      \n",
      "_________________________________________________________________\n",
      "max_pooling2d_4 (MaxPooling2 (None, 3, 3, 256)         0         \n",
      "_________________________________________________________________\n",
      "dropout_4 (Dropout)          (None, 3, 3, 256)         0         \n",
      "_________________________________________________________________\n",
      "flatten_1 (Flatten)          (None, 2304)              0         \n",
      "_________________________________________________________________\n",
      "dense_1 (Dense)              (None, 64)                147520    \n",
      "_________________________________________________________________\n",
      "activation_9 (Activation)    (None, 64)                0         \n",
      "_________________________________________________________________\n",
      "batch_normalization_9 (Batch (None, 64)                256       \n",
      "_________________________________________________________________\n",
      "dropout_5 (Dropout)          (None, 64)                0         \n",
      "_________________________________________________________________\n",
      "dense_2 (Dense)              (None, 64)                4160      \n",
      "_________________________________________________________________\n",
      "activation_10 (Activation)   (None, 64)                0         \n",
      "_________________________________________________________________\n",
      "batch_normalization_10 (Batc (None, 64)                256       \n",
      "_________________________________________________________________\n",
      "dropout_6 (Dropout)          (None, 64)                0         \n",
      "_________________________________________________________________\n",
      "dense_3 (Dense)              (None, 6)                 390       \n",
      "_________________________________________________________________\n",
      "activation_11 (Activation)   (None, 6)                 0         \n",
      "=================================================================\n",
      "Total params: 1,328,102\n",
      "Trainable params: 1,325,926\n",
      "Non-trainable params: 2,176\n",
      "_________________________________________________________________\n",
      "None\n"
     ]
    }
   ],
   "source": [
    "model = Sequential()\n",
    "\n",
    "model.add(Conv2D(32, (3, 3), padding = 'same', kernel_initializer=\"he_normal\",\n",
    "                 input_shape = (img_rows, img_cols, 1)))\n",
    "model.add(Activation('elu'))\n",
    "model.add(BatchNormalization())\n",
    "model.add(Conv2D(32, (3, 3), padding = \"same\", kernel_initializer=\"he_normal\", \n",
    "                 input_shape = (img_rows, img_cols, 1)))\n",
    "model.add(Activation('elu'))\n",
    "model.add(BatchNormalization())\n",
    "model.add(MaxPooling2D(pool_size=(2, 2)))\n",
    "model.add(Dropout(0.2))\n",
    "\n",
    "# Block #2: second CONV => RELU => CONV => RELU => POOL\n",
    "# layer set\n",
    "model.add(Conv2D(64, (3, 3), padding=\"same\", kernel_initializer=\"he_normal\"))\n",
    "model.add(Activation('elu'))\n",
    "model.add(BatchNormalization())\n",
    "model.add(Conv2D(64, (3, 3), padding=\"same\", kernel_initializer=\"he_normal\"))\n",
    "model.add(Activation('elu'))\n",
    "model.add(BatchNormalization())\n",
    "model.add(MaxPooling2D(pool_size=(2, 2)))\n",
    "model.add(Dropout(0.2))\n",
    "\n",
    "# Block #3: third CONV => RELU => CONV => RELU => POOL\n",
    "# layer set\n",
    "model.add(Conv2D(128, (3, 3), padding=\"same\", kernel_initializer=\"he_normal\"))\n",
    "model.add(Activation('elu'))\n",
    "model.add(BatchNormalization())\n",
    "model.add(Conv2D(128, (3, 3), padding=\"same\", kernel_initializer=\"he_normal\"))\n",
    "model.add(Activation('elu'))\n",
    "model.add(BatchNormalization())\n",
    "model.add(MaxPooling2D(pool_size=(2, 2)))\n",
    "model.add(Dropout(0.2))\n",
    "\n",
    "# Block #4: third CONV => RELU => CONV => RELU => POOL\n",
    "# layer set\n",
    "model.add(Conv2D(256, (3, 3), padding=\"same\", kernel_initializer=\"he_normal\"))\n",
    "model.add(Activation('elu'))\n",
    "model.add(BatchNormalization())\n",
    "model.add(Conv2D(256, (3, 3), padding=\"same\", kernel_initializer=\"he_normal\"))\n",
    "model.add(Activation('elu'))\n",
    "model.add(BatchNormalization())\n",
    "model.add(MaxPooling2D(pool_size=(2, 2)))\n",
    "model.add(Dropout(0.2))\n",
    "\n",
    "# Block #5: first set of FC => RELU layers\n",
    "model.add(Flatten())\n",
    "model.add(Dense(64, kernel_initializer=\"he_normal\"))\n",
    "model.add(Activation('elu'))\n",
    "model.add(BatchNormalization())\n",
    "model.add(Dropout(0.5))\n",
    "\n",
    "# Block #6: second set of FC => RELU layers\n",
    "model.add(Dense(64, kernel_initializer=\"he_normal\"))\n",
    "model.add(Activation('elu'))\n",
    "model.add(BatchNormalization())\n",
    "model.add(Dropout(0.5))\n",
    "\n",
    "# Block #7: softmax classifier\n",
    "model.add(Dense(num_classes, kernel_initializer=\"he_normal\"))\n",
    "model.add(Activation(\"softmax\"))\n",
    "\n",
    "print(model.summary())"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Training our model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 122,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1/10\n",
      "1767/1767 [==============================] - 820s 464ms/step - loss: 1.9752 - acc: 0.2095 - val_loss: 1.7341 - val_acc: 0.2504\n",
      "\n",
      "Epoch 00001: val_loss improved from inf to 1.73408, saving model to /home/deeplearningcv/DeepLearningCV/Trained Models/emotion_little_vgg_3.h5\n",
      "Epoch 2/10\n",
      "1767/1767 [==============================] - 755s 427ms/step - loss: 1.7395 - acc: 0.2504 - val_loss: 1.6814 - val_acc: 0.2965\n",
      "\n",
      "Epoch 00002: val_loss improved from 1.73408 to 1.68137, saving model to /home/deeplearningcv/DeepLearningCV/Trained Models/emotion_little_vgg_3.h5\n",
      "Epoch 3/10\n",
      "1767/1767 [==============================] - 796s 451ms/step - loss: 1.6683 - acc: 0.3025 - val_loss: 1.5962 - val_acc: 0.3545\n",
      "\n",
      "Epoch 00003: val_loss improved from 1.68137 to 1.59624, saving model to /home/deeplearningcv/DeepLearningCV/Trained Models/emotion_little_vgg_3.h5\n",
      "Epoch 4/10\n",
      "1767/1767 [==============================] - 626s 354ms/step - loss: 1.5520 - acc: 0.3702 - val_loss: 1.4812 - val_acc: 0.4369\n",
      "\n",
      "Epoch 00004: val_loss improved from 1.59624 to 1.48116, saving model to /home/deeplearningcv/DeepLearningCV/Trained Models/emotion_little_vgg_3.h5\n",
      "Epoch 5/10\n",
      "1767/1767 [==============================] - 621s 351ms/step - loss: 1.4572 - acc: 0.4189 - val_loss: 1.4714 - val_acc: 0.4301\n",
      "\n",
      "Epoch 00005: val_loss improved from 1.48116 to 1.47140, saving model to /home/deeplearningcv/DeepLearningCV/Trained Models/emotion_little_vgg_3.h5\n",
      "Epoch 6/10\n",
      "1767/1767 [==============================] - 733s 415ms/step - loss: 1.4033 - acc: 0.4476 - val_loss: 1.3997 - val_acc: 0.4733\n",
      "\n",
      "Epoch 00006: val_loss improved from 1.47140 to 1.39971, saving model to /home/deeplearningcv/DeepLearningCV/Trained Models/emotion_little_vgg_3.h5\n",
      "Epoch 7/10\n",
      "1767/1767 [==============================] - 780s 441ms/step - loss: 1.3610 - acc: 0.4623 - val_loss: 1.4027 - val_acc: 0.4753\n",
      "\n",
      "Epoch 00007: val_loss did not improve from 1.39971\n",
      "Epoch 8/10\n",
      "1767/1767 [==============================] - 619s 351ms/step - loss: 1.3265 - acc: 0.4819 - val_loss: 1.4244 - val_acc: 0.4713\n",
      "\n",
      "Epoch 00008: val_loss did not improve from 1.39971\n",
      "Epoch 9/10\n",
      "1767/1767 [==============================] - 686s 388ms/step - loss: 1.3057 - acc: 0.4924 - val_loss: 1.3889 - val_acc: 0.4818\n",
      "\n",
      "Epoch 00009: val_loss improved from 1.39971 to 1.38890, saving model to /home/deeplearningcv/DeepLearningCV/Trained Models/emotion_little_vgg_3.h5\n",
      "Epoch 10/10\n",
      "1767/1767 [==============================] - 683s 386ms/step - loss: 1.2934 - acc: 0.5009 - val_loss: 1.4140 - val_acc: 0.4741\n",
      "\n",
      "Epoch 00010: val_loss did not improve from 1.38890\n"
     ]
    }
   ],
   "source": [
    "from keras.optimizers import RMSprop, SGD, Adam\n",
    "from keras.callbacks import ModelCheckpoint, EarlyStopping, ReduceLROnPlateau\n",
    "\n",
    "                     \n",
    "checkpoint = ModelCheckpoint(\"/home/deeplearningcv/DeepLearningCV/Trained Models/emotion_little_vgg_3.h5\",\n",
    "                             monitor=\"val_loss\",\n",
    "                             mode=\"min\",\n",
    "                             save_best_only = True,\n",
    "                             verbose=1)\n",
    "\n",
    "earlystop = EarlyStopping(monitor = 'val_loss', \n",
    "                          min_delta = 0, \n",
    "                          patience = 3,\n",
    "                          verbose = 1,\n",
    "                          restore_best_weights = True)\n",
    "\n",
    "reduce_lr = ReduceLROnPlateau(monitor = 'val_loss', factor = 0.2, patience = 3, verbose = 1, min_delta = 0.0001)\n",
    "\n",
    "# we put our call backs into a callback list\n",
    "callbacks = [earlystop, checkpoint, reduce_lr]\n",
    "\n",
    "# We use a very small learning rate \n",
    "model.compile(loss = 'categorical_crossentropy',\n",
    "              optimizer = Adam(lr=0.001),\n",
    "              metrics = ['accuracy'])\n",
    "\n",
    "nb_train_samples = 28273\n",
    "nb_validation_samples = 3534\n",
    "epochs = 10\n",
    "\n",
    "history = model.fit_generator(\n",
    "    train_generator,\n",
    "    steps_per_epoch = nb_train_samples // batch_size,\n",
    "    epochs = epochs,\n",
    "    callbacks = callbacks,\n",
    "    validation_data = validation_generator,\n",
    "    validation_steps = nb_validation_samples // batch_size)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 111,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Found 3534 images belonging to 6 classes.\n",
      "Confusion Matrix\n",
      "[[216   3  39 130  86  17]\n",
      " [119  19  49 173  94  74]\n",
      " [ 30   1 743  61  33  11]\n",
      " [ 95   6 161 203  98  63]\n",
      " [ 60   2  47 240 238   7]\n",
      " [ 28   8  46  48   7 279]]\n",
      "Classification Report\n",
      "              precision    recall  f1-score   support\n",
      "\n",
      "       Angry       0.39      0.44      0.42       491\n",
      "        Fear       0.49      0.04      0.07       528\n",
      "       Happy       0.68      0.85      0.76       879\n",
      "     Neutral       0.24      0.32      0.27       626\n",
      "         Sad       0.43      0.40      0.41       594\n",
      "    Surprise       0.62      0.67      0.64       416\n",
      "\n",
      "   micro avg       0.48      0.48      0.48      3534\n",
      "   macro avg       0.48      0.45      0.43      3534\n",
      "weighted avg       0.48      0.48      0.45      3534\n",
      "\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAekAAAHOCAYAAAC8Z/EZAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4wLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvqOYd8AAAIABJREFUeJzt3XncZGV55//Pl2ZpBGURJQgoJqLEDQQ0uCRxzYgawQQX4oxoSDCJ4zKaxUkyCRn95afjOLgkGntEWca4EgMugzAILswgmwgioqgwgCA2CAiySPc1f5zzSNE8T3fTXVWn6u7P+/U6r+ecu06dusqlr7ruc5/7TlUhSZJmz2ZDByBJkhZnkpYkaUaZpCVJmlEmaUmSZpRJWpKkGWWSliRpRpmkJUmaUSZpSZJmlElakqQZtfnQAUiStLH+zTO2qetvWDX265534R1fqKrnjv3C68kkLUmae9ffsIqzv/DQsV932S7f3WnsF70P7O6WJGlGWUlLkuZeAatZPXQYY2eSliQ1oFhV7SVpu7slSZpRVtKSpLnXdXfX0GGMnZW0JEkzykpaktQEB45JkjSDimJV2d0tSZKmxEpaktQEB45JkqSpsZKWJM29AlZZSUuSpGmxkpYkNaHFe9ImaUnS3CvwESxJkjQ9VtKSpCa0N9+YlbQkSTPLSlqSNPeKavIRLJO0JGn+FaxqL0fb3S1J0oZK8qgkF4xsNyd5Q5Idk5ya5Lv93x3685PkPUkuS3Jhkn3Xdn2TtCRp7hXdwLFxb+v83KpLq2qfqtoH2A/4GfBp4M3AaVW1J3BafwxwILBnvx0BvH9t1zdJS5I0Hs8CvldVVwAHAcf27ccCB/f7BwHHVecsYPskuyx1Qe9JS5IaEFaRoYN4GfDRfn/nqrqm378W2Lnf3xW4cuQ9V/Vt17AIk7Qkae4VsHoyA8d2SnLuyPGKqlqx5klJtgReCPzHe8VWVUk2KDqTtCRJS1tZVfuvx3kHAudX1Y/64x8l2aWqrum7s6/r268Gdh95325926K8Jy1JasKqvst7nNt9cCh3d3UDnAQc1u8fBpw40v6KfpT3AcBNI93i92IlLUnSRkiyDfAc4NUjzW8DPpHkcOAK4CV9++eB5wGX0Y0Ef9Xarm2SliTNvYLBBo5V1a3AA9dou55utPea5xbwmvW9tt3dkiTNKCtpSVITVtfgj2CNnUlakjT3huzuniS7uyVJmlFW0pKkuVeEVQ3Wne19I0mSGmElLUlqggPHJEmaQa0OHGs+SW++fJvaapsdhw5japbdcOvQIUxNNl82dAhTVVttOXQIU5VV67OabxvqjjuHDmFqbq9bubNuby+bTkjzSXqrbXbkMc97w9BhTM12Hzlr6BCmZtn2m86PL4C7HrX7uk9qyObXbzo/OFdffuW6T2rEWXf8zwldOayq9oZZtfeNJElqRPOVtCSpfQWsbrDuNElLkprQ4sCx9n52SJLUCCtpSdLcq3LgmCRJmiIraUlSE1Y3eE/aJC1JmnvdjGPtdQ63940kSWqElbQkqQEOHJMkSVNkJS1JmnutzjjW3jeSJKkRVtKSpCasKh/BkiRp5hTxESxJkjQ9VtKSpCas9hEsSZI0LVbSkqS51+q0oCZpSdLcK9Lk6O72fnZIktQIK2lJUhOccUySJE2NlbQkae5V4SpY6yvJwUkqyV6TuL4kSfcUVk9gG9qkfnYcCny1/7vRkljxS5I2OWNP0km2BZ4GHA68rG97epIzknwqybeTfCRJ+tee17edl+Q9ST7btx+Z5PgkZwLHJ/lykn1GPuerSfYed/ySpPlTdN3d496GNokIDgJOrqrvANcn2a9vfwLwBuDRwC8DT02yHPgAcGBV7Qc8aI1rPRp4dlUdChwNvBIgySOB5VX1jQnEL0nSTJhEkj4U+Fi//zHu7vI+u6quqqrVwAXAHsBewPer6gf9OR9d41onVdVt/f4ngRck2QL4feCYpQJIckSSc5Oce9cdt27s95EkzYFVbDb2bWhjvdebZEfgmcDjkhSwjK4X4nPAHSOnrlrPz/5Fhq2qnyU5la5Sfwmw31JvqqoVwAqAbR64e93HryFJ0kwY94CsQ4Djq+rVCw1JvgT8+hLnXwr8cpI9qupy4KXruP4Hgc8AX6mqn4whXklSA4qwusFpQcedpA8F3r5G2wnAHwPfW/PkqrotyZ8AJye5FThnbRevqvOS3Ax8eEzxSpIaMQvd0+M21iRdVc9YpO09wHvWaPv3I4enV9Ve/WjvfwTO7c85cs1rJXkI3X30U8YYtiRJM2kWfnb8YZILgIuB7ehGe99LklcAXwP+qh98JkkS0A1+Wl2bjX0b2uCThFTVUcBR63HeccBxk49IkqTZMHiSliRp44VVMzCN57iZpCVJc2+hu7s17X0jSZIaYSUtSWpCi93dVtKSJM0ok7Qkae5VZbBHsJJsP7LK4yVJnpxkxySnJvlu/3eH/tz0Kz5eluTCJPuu7domaUlSEwZcqvLddKs/7gXsDVwCvBk4rar2BE7rjwEOBPbstyOA96/twiZpSZI2UJLtgN+gW06Zqrqzqm6kWwzq2P60Y4GD+/2DgOOqcxawfZJdlrq+SVqSNPcKWE3GvgE7LSx93G9HrPHRDwd+DHw4ydeTfDDJNsDOVXVNf861wM79/q7AlSPvv6pvW5SjuyVJWtrKqtp/La9vDuwLvLaqvpbk3dzdtQ1AVVW/fPN9ZpKWJDUg9+Ue8jhdBVxVVV/rjz9Fl6R/lGSXqrqm786+rn/9amD3kffv1rctyu5uSZI2UFVdC1yZ5FF907OAbwEnAYf1bYcBJ/b7JwGv6Ed5HwDcNNItfi9W0pKkuddNCzrYZCavBT6SZEvg+8Cr6IrgTyQ5HLgCeEl/7ueB5wGXAT/rz12SSVqS1IRVA3UOV9UFwGL3rZ+1yLkFvGZ9r213tyRJM8pKWpI094oM2d09MVbSkiTNKCtpSVITVjdYd5qkJUlzrwpW2d0tSZKmxUpaktQEB45JkqSpab6SXnb7arb/zq1DhzE1Wb586BCmZ/Pm/+d7D3fusOXQIUzV5jfdPnQIU7Ns1yVXKmxOrtpiItftHsFqr+7ctP6VkyQ1axV2d0uSpCmxkpYkzb2BF9iYGCtpSZJmlJW0JKkBbQ4ca+8bSZLUCCtpSVITVjc4utskLUmae87dLUmSpspKWpLUBAeOSZKkqbGSliTNvW7u7vbuSZukJUlNaHF0t93dkiTNKCtpSdLcc+5uSZI0VVbSkqQmtPgIlklakjT/qs3R3e397JAkqRFW0pKkuVf4CJYkSZoiK2lJUhO8Jy1JkqbGSlqSNPdanczEJC1JakKLSdrubkmSZtRUK+kkq4CLRpoOrqrLpxmDJKk9LlU5HrdV1T7juliSAKmq1eO6piRJs2Lw7u4ky5K8I8k5SS5M8uq+fdskpyU5P8lFSQ7q2/dIcmmS44BvArsPGb8kaTasJmPfhjbtSnrrJBf0+z+oqhcBhwM3VdUTk2wFnJnkFOBK4EVVdXOSnYCzkpzUv3dP4LCqOmvK8UuSZlG1OXBsFrq7fwt4fJJD+uPt6JLwVcDfJ/kNYDWwK7Bzf84Va0vQSY4AjgBYvuV2YwxfkqTpmYVHsAK8tqq+cI/G5JXAg4D9qurnSS4Hlvcv37q2C1bVCmAFwAO22bXGHbAkaba0+pz04PekgS8Af5xkC4Akj0yyDV1FfV2foJ8BPGzIICVJmrZZqKQ/COwBnN+P1v4xcDDwEeAzSS4CzgW+PViEkqSZ12IlPdUkXVXbLtK2GvjLflvTk5e41GPHGZckab61+pz0LHR3S5KkRcxCd7ckSRutrKQlSdK0WElLkpowCzOEjZuVtCRJM8pKWpI092rAaUH7ybZ+CqwC7qqq/ZPsCHyc7hHjy4GXVNVP+keN3w08D/gZ8MqqOn+pa1tJS5KaUJWxb/fBM6pqn6ravz9+M3BaVe0JnNYfAxxIN/X1nnTTV79/bRc1SUuSNH4HAcf2+8fSTdK10H5cdc4Ctk+yy1IXsbtbktSAQSczKeCUJAV8oF8/YuequqZ//VruXiBqV7pVHhdc1bddwyJM0pIkLW2nJOeOHK/ok/Cop1XV1UkeDJya5B7TWFdV9Qn8PjNJS5KaMKHJTFaO3Gde4nPr6v7vdUk+DTwJ+FGSXarqmr47+7r+9KuB3UfevlvftijvSUuS5t7CUpXj3tYlyTZJ7r+wD/wW8E3gJOCw/rTDgBP7/ZOAV6RzAHDTSLf4vVhJS5K04XYGPt09WcXmwD9X1clJzgE+keRw4ArgJf35n6d7/OoyukewXrW2i5ukJUnzr7pnpaf+sVXfB/ZepP164FmLtBfwmvW9vt3dkiTNKCtpSVITWpy72yQtSZp7hUtVSpKkKbKSliQ1YNAZxybGSlqSpBllJS1JasIQj2BNmpW0JEkzykpaktSEFkd3N5+kc8cdbHbpFUOHMTWrbr996BCm5gvfP2voEKbq+U96/tAhTFXdcsvQIUzNqltuHTqEqam77pzMdavNJG13tyRJM6r5SlqStGnwESxJkjQ1VtKSpCa0+AiWSVqS1AQHjkmSpKmxkpYkzb0iVtKSJGl6rKQlSU1ocNyYSVqS1ABnHJMkSdNkJS1JakOD/d1W0pIkzSgraUlSE1q8J22SliQ1ocVpQe3uliRpRllJS5LmXtFmd7eVtCRJM8pKWpI0/wqwkpYkSdNiJS1JakKLo7tN0pKkNjSYpO3uliRpRllJS5IaEB/BkiRJ02MlLUlqg/ekl5bkljWOX5nkH8Z1fUmSllTdjGPj3oZmd7ckSTNqKkk6yW8n+VqSryf5X0l27tuPTHJ8kv+T5LtJ/rBvf3qSLyf5XJJLk/xTks2S/H6Sd41c9w+THDWN7yBJmnE1gW1g47wnvXWSC0aOdwRO6ve/ChxQVZXkD4A/B97Uv/Z44ABgG+DrST7Xtz8JeDRwBXAy8DvAJ4C/SvJnVfVz4FXAq8f4HSRJmhnjTNK3VdU+CwdJXgns3x/uBnw8yS7AlsAPRt53YlXdBtyW5HS65HwjcHZVfb+/1keBp1XVp5J8EXhBkkuALarqojUDSXIEcATA8mwzxq8oSZpdw99DHrdp3ZN+L/APVfU4usp3+chra3Yo1DraPwi8kq6K/vBiH1ZVK6pq/6raf8vNli92iiSpNQ12d08rSW8HXN3vH7bGawclWZ7kgcDTgXP69icleXiSzYCX0nWZU1VfA3YHfg/46KQDlyRpKNNK0kcCn0xyHrByjdcuBE4HzgLeUlU/7NvPAf4BuISue/zTI+/5BHBmVf1kkkFLkuZIg5X02O5JV9W2axwfAxzT758InLjEWy+sqlcs0n5zVb1gifc8DXBUtySpaXP1nHSS7ZN8h26Q2mlDxyNJmhEFVMa/DWzQaUGr6sgl2s8Azlik/UbgkRMNSpKkGeHc3ZKkJtQM3EMeN5O0JKkNDSbpubonLUnSpsQkLUlqw0ADx5Is69em+Gx//PB+vYrLknw8yZZ9+1b98WX963us69omaUmSNs7r6eb0WPB24KiqegTwE+Dwvv1w4Cd9+1H9eWtlkpYkNSE1/m2dn5nsBjyfbspqkgR4JvCp/pRjgYP7/YP6Y/rXn9WfvySTtCRp/k1itrEuSe+U5NyR7Yg1PvlddCs7ru6PHwjcWFV39cdXAbv2+7sCVwL0r9/Un78kR3dLkrS0lVW1/2IvJHkBcF1VnZfk6ZP4cJO0JKkBg8wQ9lTghUmeR7e64wOAdwPbJ9m8r5Z34+4Fpq6mWyDqqiSb0y0+df3aPsDubkmSNkBV/ceq2q2q9gBeBnyxql5Ot2jUIf1ph3H32hUncfdKkIf056/1zrdJWpLUhtlZBesvgDcmuYzunvPRffvRwAP79jcCb17XhezuliS1YcAZx0bXnKiq7wNPWuSc24EX35frWklLkjSjrKQlSW1w7m5JkjQtVtKSpPlXDPEI1sRZSUuSNKOspCVJTVifubbnjUlaktSGBpO03d2SJM0ok7QkSTPKJC1J0oxq/570FlvA7rsMHcX0fOunQ0cwNc941R8MHcJU3fmU9v/vOuoB37px6BCmZtlNtwwdwtTk2i0md+0G70lvWv+vlyS1y+ekJUnStFhJS5Lm38YtLTmzrKQlSZpRVtKSpDY0WEmbpCVJTWhxdLfd3ZIkzSgraUlSG6ykJUnStFhJS5LaYCUtSZKmxUpakjT3Um2O7jZJS5La4NzdkiRpWqykJUltaLC720pakqQZZSUtSWqCA8ckSZpVDSZpu7slSZpRVtKSpPnX6HPSVtKSJM0oK2lJUhsarKRN0pKkNjSYpO3uliRpRllJS5Ka4MAxSZI0NRuUpJNUkneOHP9pkiM38FrbJ/mTDXzv5Ul22pD3SpI06za0kr4D+J0xJcjtgUWTdBK74yVJm6wNTdJ3ASuA/7DmC0kelOSEJOf021P79iOT/OnIed9MsgfwNuBXklyQ5B1Jnp7kK0lOAr7Vn/uvSc5LcnGSIzYwZklSy2oC28A2plL9R+DCJP9ljfZ3A0dV1VeTPBT4AvCra7nOm4HHVtU+AEmeDuzbt/2gP+f3q+qGJFsD5yQ5oaqu34jYJUktaXTGsQ1O0lV1c5LjgNcBt4289Gzg0UkWjh+QZNv7ePmzRxI0wOuSvKjf3x3YE1gySffV9hEAy7d4wH38aEmSZsPG3vN9F3A+8OGRts2AA6rq9tETk9zFPbvXl6/lureOvO/pdIn/yVX1syRnrOO9VNUKuu54ttt6lwZ/W0mS7qXBf+036hGsqroB+ARw+EjzKcBrFw6S7NPvXk7XjU2SfYGH9+0/Be6/lo/ZDvhJn6D3Ag7YmJglSZoX43hO+p3A6Cjv1wH7J7kwybeAP+rbTwB2THIx8O+B7wD095bP7AeSvWOR658MbJ7kErpBZmeNIWZJUmscONapqm1H9n8E3G/keCXw0kXecxvwW0tc7/fWaDpj5LU7gAOXeN8e9yFsSVKjQpsDx5xxTJKkGeVkIZKkNlhJS5KkaTFJS5LmXz+Zybi3tUmyPMnZSb7Rz4j5d337w5N8LcllST6eZMu+fav++LL+9T3W9bVM0pKkNkx/dPcdwDOram9gH+C5SQ4A3k438+YjgJ9w92PKh9M9UvwI4Kj+vLUySUuStAGqc0t/uEW/FfBM4FN9+7HAwf3+Qf0x/evPysj0nIsxSUuS2jDAc9JJliW5ALgOOBX4HnBjVd3Vn3IVsGu/vytwJUD/+k3AA9d2fZO0JElL2ynJuSPbPVZirKpV/QJRuwFPAvYa54f7CJYkqQkTmsxkZVXtv66TqurGJKcDTwa2T7J5Xy3vBlzdn3Y13SJRVyXZnG7a67Wu6GglLUnSBkjyoCTb9/tbA88BLgFOBw7pTzsMOLHfP6k/pn/9i1W11p8WVtKSpDZMfzKTXYBjkyyjK3o/UVWf7det+FiStwJfB47uzz8aOD7JZcANwMvW9QEmaUnS/BtgQYyquhB4wiLt36e7P71m++3Ai+/LZ9jdLUnSjLKSliQ1wVWwJEnS1FhJS5La0GAlbZKWJDXB7m5JkjQ1VtKSpDZYSUuSpGmxkpYkzb8BJjOZBpO0JGnupd9aY3e3JEkzykpaktQGu7vn0KrV5KZbho5ieta+6llTtj7/iqFDmKqfvvARQ4cwVSuftMPQIUzNjsd8d+gQpqZW/XzoEOZK+0lakrRJcDITSZI0NVbSkqQ2NFhJm6QlSW1oMEnb3S1J0oyykpYkzb9y4JgkSZoiK2lJUhsarKRN0pKkJtjdLUmSpsZKWpLUBitpSZI0LVbSkqQmtHhP2iQtSZp/hd3dkiRpeqykJUltsJKWJEnTYiUtSZp7oc2BY1bSkiTNKCtpSVIbGqykTdKSpCak2svSdndLkjSjrKQlSfPPyUwkSdI0WUlLkprgI1hjluSvklyc5MIkFyT5tfV83x5Jvjnp+CRJc6QmsA1ssEo6yZOBFwD7VtUdSXYCthwqHkmSZs2Q3d27ACur6g6AqloJkORvgN8Gtgb+N/Dqqqok+wEf6t97ygDxSpJmmN3d43UKsHuS7yR5X5Lf7Nv/oaqeWFWPpUvUL+jbPwy8tqr2HiJYSZKmbbAkXVW3APsBRwA/Bj6e5JXAM5J8LclFwDOBxyTZHti+qr7cv/34tV07yRFJzk1y7p2rb5vcl5AkzQ7vSY9XVa0CzgDO6JPyq4HHA/tX1ZVJjgSWb8B1VwArALbbcucZ+I9ZkjRRZXf3WCV5VJI9R5r2AS7t91cm2RY4BKCqbgRuTPK0/vWXTy9SSZKGMWQlvS3w3r4r+y7gMrqu7xuBbwLXAueMnP8q4ENJCgeOSZLW1GAlPViSrqrzgKcs8tJf99ti548OGvvzCYUmSdJMcMYxSdLcC23ekzZJS5La4FKVkiRpWkzSkqQmpMa/rfMzk92TnJ7kW/1aFK/v23dMcmqS7/Z/d+jbk+Q9SS7r163Yd23XN0lLkrTh7gLeVFWPBg4AXpPk0cCbgdOqak/gtP4Y4EBgz347Anj/2i5ukpYkzb9JzDa2HpV0VV1TVef3+z8FLgF2BQ4Cju1POxY4uN8/CDiuOmcB2yfZZanrm6QlSRqDJHsATwC+BuxcVdf0L10L7Nzv7wpcOfK2q/q2RTm6W5LUhKyeyGV3SnLuyPGKfurpe352N0vmCcAbqurmJL94rV/JcYOGnpukJUltmMwTWCurav+1nZBkC7oE/ZGq+pe++UdJdqmqa/ru7Ov69quB3Ufevlvftii7uyVJ2kDpSuajgUuq6r+NvHQScFi/fxhw4kj7K/pR3gcAN410i9+LlbQkqQkDzTj2VODfARcluaBv+0vgbcAnkhwOXAG8pH/t88Dz6Nar+BnduhRLMklLkrSBquqrdLOSLuZZi5xfwGvW9/omaUnS/CuanBbUJC1JakKLC2w4cEySpBllJS1JaoOVtCRJmhYraUnS3Att3pM2SUuS5l9Vk6O77e6WJGlGWUlLkprQYne3lbQkSTPKSlqS1AYraUmSNC3tV9KrVrH6ppuHjmJ6Nls2dASakAd/9ntDhzBVq358/dAhTM21r/u1oUOYmp9/5MyJXbvFe9LtJ2lJUvsKWN1elra7W5KkGWUlLUlqQ3uFtJW0JEmzykpaktQEB45JkjSrnLtbkiRNi5W0JKkJLXZ3W0lLkjSjrKQlSfOvaPIRLJO0JGnuBYgDxyRJ0rRYSUuS2rB66ADGz0pakqQZZSUtSWqC96QlSdLUWElLkuafj2BJkjSryrm7JUnS9FhJS5Ka4NzdkiRpaqykJUltaPCetElakjT/CuKMY5IkaVqspCVJbWiwu9tKWpKkGbVeSTrJXyW5OMmFSS5I8muTCCbJ55NsP4lrS5IaVxPYBrbO7u4kTwZeAOxbVXck2QnYcn0unmTzqrprPc7r1+uu563PdSVJWtOmusDGLsDKqroDoKpWVtUPk1zeJ2yS7J/kjH7/yCTHJzkTOD7JK5OcmOSMJN9N8rf9eXskuTTJccA3gd0XrplkmySfS/KNJN9M8tL+Pfsl+VKS85J8Icku4/+PRJKk2bA+SfoUugT6nSTvS/Kb6/GeRwPPrqpD++MnAb8LPB54cZL9+/Y9gfdV1WOq6oqR9z8X+GFV7V1VjwVOTrIF8F7gkKraD/gQ8P+tRyySpE1B1fi3ga0zSVfVLcB+wBHAj4GPJ3nlOt52UlXdNnJ8alVd37f9C/C0vv2KqjprkfdfBDwnyduT/HpV3QQ8CngscGqSC4C/BnZb7MOTHJHk3CTn3lm3r+srSpI0k9brEayqWgWcAZyR5CLgMOAu7k7yy9d4y61rXmKJ4zXPW/i87yTZF3ge8NYkpwGfBi6uqievR7wrgBUA2y3bafifQpKkySpgU5zMJMmjkuw50rQPcAVwOV2FDV1X9to8J8mOSbYGDgbOXMdnPgT4WVX9D+AdwL7ApcCD+oFsJNkiyWPWFb8kSfNqfSrpbYH39o9G3QVcRtf1/avA0UneQldlr83ZwAl03dP/o6rOTbLHWs5/HPCOJKuBnwN/XFV3JjkEeE+S7frY3wVcvB7fQZLUsFBNju5eZ5KuqvOApyzy0leARy5y/pGLnHtVVR28xnmX091jHm3bo9/9Qr+tee0LgN9YV8ySpE1Qg0naGcckSZpRE5+7u6qOAY6Z9OdIkjZxVtKSJGlBkg8luS7JN0fadkxyaj+B16lJdujbk+Q9SS7rp9ned13XN0lLkubfwiNY497W7Ri6CbhGvRk4rar2BE7rjwEOpJvEa0+6AdjvX9fFTdKSpCakauzbulTVl4Eb1mg+CDi23z+W7tHjhfbjqnMWsP26prc2SUuStLSdFmaw7Lcj1uM9O1fVNf3+tcDO/f6uwJUj513Vty1p4gPHJEmaiskMHFtZVfuv+7TFVVUl2eDArKQlSRqvHy10Y/d/r+vbrwZ2Hzlvt75tSSZpSVIDJrAC1oZX5ifRrXFB//fEkfZX9KO8DwBuGukWX5Td3ZKk+VcM8px0ko8CT6e7d30V8LfA24BPJDmcbq2Ll/Snf55u4ajLgJ8Br1rX9U3SkiRtoKo6dImXnrXIuQW85r5c3yQtSWrDprhUpSRJGoaVtCSpCS0uVWklLUnSjLKSliS1ocFK2iQtSZp/BaxuL0nb3S1J0oyykpYkNWCjZgibWVbSkiTNKCtpSVIbGqykTdKSpDY0mKTt7pYkaUZZSUuS5l+jj2A1n6RvXn39ylNu/vAVU/7YnYCVU/7MIQ3zfa9b9ykT4H+37Rruux718SE+dajv+7ABPnNuNZ+kq+pB0/7MJOdW1f7T/tyhbErfd1P6rrBpfd9N6btCi9+3oNpbBqv5JC1J2kQ4cEySJE2LlfRkrBg6gCnblL7vpvRdYdP6vpvSd4XWvm+jA8dSDXYPSJI2LdttuXM95ZcOHft1T77y3ecNee/eSlqS1IYGi07vSUuSNKOspCVJbbCS1mKSPG7oGKYpnd2HjkOS7tYvVTnubWBW0uPxviRbAccAH6mqmwaOZ6KqqpJ8HtgkfpwkeSfwoaq6eOhYJinJjmt7vapumFYsk5bkIrrxwIuqqsdPMZypSbIz8PfAQ6rqwCSPBp5cVUcPHJqFda+YAAAKx0lEQVSWYJIeg6r69SR7Ar8PnJfkbODDVXXqwKFN0vlJnlhV5wwdyBRcAqxIsjnwYeCjjf4QO48ucWWR1wr45emGM1Ev6P++pv97fP/35QPEMk3H0P1v+K/64+8AHwfmP0kXsLq9Gcd8BGuMkiwDDgbeA9xM94/dX1bVvwwa2AQk+TbwCOAK4Fa671qtViAASR4FvAo4FDgT+O9VdfqwUWljJPl6VT1hjbbzq2rfoWKapCTnVNUTR793kguqap+hY9tY223x4HrKTi8e+3VPvvZ9PoI175I8nu4f7+cDpwK/XVXnJ3kI8H+A5pI08G+GDmCa+h9ge/XbSuAbwBuTvLqqXjZocBOQZAdgT2D5QltVfXm4iCYmSZ5aVWf2B0+h7bE6tyZ5IH1Xf5IDgHZ6hRosOk3S4/Fe4IN0VfNtC41V9cMkfz1cWJNTVVcAJHkwI/+QtyjJUcBvA6cBf19VZ/cvvT3JpcNFNhlJ/gB4PbAbcAFwAN2PzWcOGdeEHA58KMl2dL1BP6G7bdWqNwInAb+S5EzgQcAhw4Y0RiZpramvsK6uquMXe32p9nmX5IXAO4GH0C0a+TC6e7ePGTKuCbkQ+OuqunWR15407WCm4PXAE4GzquoZSfaiG2zUnKo6D9i7T9I0OtbgF/oevt8EHkX3o+TSqvr5wGFpLUzSG6mqViXZPcmWVXXn0PFM0VvoKqz/VVVPSPIM4N8OHNOkHAO8KMnT6LoJv1pVn4Zm/1G/vapuT0KSrarq2/39+CYleT7dj8vlSTdmrqr+86BBTUiSFwMnV9XFfS/fvkneWlXnDx3bxqsm5+42SY/HD4Azk5xEN4gKgKr6b8OFNHE/r6rrk2yWZLOqOj3Ju4YOakL+kW6Q3Ef741cneXZVvWYt75lnVyXZHvhX4NQkP6EbINicJP8E3A94Bt0tq0OAs9f6pvn2n6rqk/0PzmcB/xV4P/Brw4alpZikx+N7/bYZcP+BY5mWG5NsC3wF+EiS6xj5gdKYZwK/Wv2jEEmOBZp9ZrqqXtTvHpnkdGA74OQBQ5qkp1TV45NcWFV/1z8T/z+HDmqCVvV/n0/3dMLnkrx1yIDGpqCqvUewTNJjUFV/N3QMAzgIuA14A92zpdsBTXYRApcBD+XuanL3vq05/RiLi6tqL4Cq+tLAIU3awkDPn/VPY9wA7DJgPJN2dZIPAM+hG/i4FS2NZre7W4tJ8hnuPXvRTcC5wAeq6vbpRzVZVXVrkocBe1bVsUnuBywbOq4JuT9wST9JDXSDqs7tb29QVS8cLLIx68dYXJrkoVX1f4eOZwo+23ft/xe6yVyg6/Zu1UuA5wL/tapuTLIL8GcDx6S1MEmPx/fpHmVYuGf5UuCnwCOB/w78u4HimpgkfwgcAewI/AqwK/BPdPe5WvM3QwcwZTsAF/c/SkbHWDTzYyTJE4Erq+ot/fG2wEXAt4GjhoxtEpI8oKpupntc8oy+bUfgDrpiog0+gqUlPKWqnjhy/JmRmX1avXf5GrrHj74GUFXf7Z+Zbk5VfSnJL9F93wLOqaprBw5rkv7T0AFMwQeAZwMk+Q3gbcBrgX2AFbT07HDnn+mmQl1s6tfWpnxtikl6PLYd7R5M8lBg2/61Vh/LuqOq7lx4ZKWf17q9n7H8YnKPvwG+SPeP23uT/Oeq+tCwkU3M86rqL0YbkrwdaOn+9LKRBUNeCqyoqhOAE5JcMGBcE1FVL0j3f9bfbPY2RlWTc3e3M2BgWG8Cvprk9CRn0I14/tMk2wDHDhrZ5HwpyV8CWyd5DvBJ4DMDxzQpfwY8oapeWVWHAfsBf7GO98yz5yzSduDUo5isZf0PS+hu0Xxx5LUmi5f+6YTPDR2H7psm/8c4bVX1+X4VrL36pktHBou1+uzwm+mmVLwIeDXwedodcHM93RiDBT/t25qS5I+BP6GbMvLCkZfuD/zvYaKamI/S/dBcSTfC+ysASR5BS3NZ31vbq9c1eE/aVbDGpJ+Yfw9GfvhU1XGDBTQhm9Co319Ichzd2tkn0nXpH0Q3VeiF0M6kNf3UmDsA/z/dj7AFP21pLekF/eISuwCnLEz5muSRwLZtzMB1by2vXrfdsp3qgK2fP/brnnLrca6CNe+SHE83wvkC7p4soIDmkjTdLFT7AiQ5oap+d+B4pmFhspoFJ/Z/m5q4pp/i9KYka3blb5tk29Z+nFXVWYu0fWeIWKZok1q9rgUm6fHYH3h0bRrdEqOjQjeJEaGb4GQ1n+PuEcDLgYcDl9Lm4imblKq6Ism+wMI89Ge202tQTXZ3m6TH45vALwHXDB3IFNQS+81K8iDgz+kXYVhor6oWl26kqh43etz/o/4nA4WjMUryN8CLuXuN+w8n+WRVtTE1aINM0uOxE/CtfvKHO/q2qqqDBoxpUvZOcjNdlbV1vw9339t6wHChTcxHgI/TPWf6R8BhwI8HjWiK+uUNXYChDS8H9l4Y2JrkbXS36eY/SRdOC6olHTmyH+DXgZcNE8pkVVWrU3+uzQOr6ugkr+/nsv5SkjZHxwJJ3jhyuBndGIQfDhSOxuuHdL1BC0+fbAVcPVw4Y+YCG1pMPyPVE4Dfo+tK+gHdFJlqw8/7v9f0aw//kG461FaNDoi7i+4e9QkDxaLxuoluytdT6WrP5wBnJ3kPQFW9bsjgdG8m6Y3QP65xaL+tpOsSTVU9Y9DANG5v7R9PehPwXuABwH8YNqTJWRgol+R+VfWzoePRWH263xacMVAcY1dA2d2tNXybbhKEF1TVZQBJmv3He1NVVZ/td28Cmv8BluTJwNF0U9s+NMnewKurysFjc6xfhvS3qurlQ8ei9WeS3ji/Q3fv+fQkJwMf456PKGmOJXkvaxnB3nDX4LvonqddWIrzG/0iFJpj/TKkD0uyZVW1t6ZAlfekdU9V9a/Av/ZzdB8EvAF4cJL3A5+uqlMGDVAba3QJv78D/naoQKatqq5cWDylt2qpczVXvg+c2a+FProMaROz5tndrUX1Uwr+M/DPSXagGzz2F4BJeo5V1S8WR0nyhtHjxl3ZT3NbSbYAXg9cMnBMGo+F2fM2o7EZ81plkh6zqvoJ3Xq0K4aORWPV3k/0pf0R8G5gV7rHc06hWz9cc6752fMa7O52gQ1pPSQ5v6r2HToOaWMkOZ1FfnC2MHtePy5opwlcemVVPXcC110vJmlpCUl+yt3/oN0PWHgcqcnZ1fopI5dSVfWWqQWjiUiy38jhcuB3gbuq6s8HCknrYJKWBECSNy3SvA3duuEPrKptpxySpiDJ2VX1pKHj0OK8Jy0JgKp658J+kvvTDRh7Fd2jhe9c6n2aH0lGZ8rbjG4Fv+0GCkfrwSQt6Rf6f8TfSLcQw7HAvv1gSLXhPO6+hXMXcDldT4lmlElaEgBJ3kE3Qc8K4HFVdcvAIWlMkjwRuLKqHt4fH0Z3P/py4FsDhqZ18J60JACSrKZbavUu7jkCuMmBcpuSJOcDz66qG/rZ4z4GvBbYB/jVqjpk0AC1JCtpSQBU1WZDx6CJWVZVN/T7LwVWVNUJwAlJLhgwLq2D/6eUpPYtS7JQlD0L+OLIaxZrM8z/ciSpfR8FvpRkJXAb3ep9JHkE3epumlHek5akTUCSA4BdgFP69QZI8khg26o6f9DgtCSTtCRJM8p70pIkzSiTtCRJM8okLUnSjDJJS5I0o0zSkiTNqP8HJN8qPEc/tDIAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 576x576 with 2 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "import sklearn\n",
    "from sklearn.metrics import classification_report, confusion_matrix\n",
    "import numpy as np\n",
    "\n",
    "nb_train_samples = 28273\n",
    "nb_validation_samples = 3534\n",
    "\n",
    "# We need to recreate our validation generator with shuffle = false\n",
    "validation_generator = validation_datagen.flow_from_directory(\n",
    "        validation_data_dir,\n",
    "        color_mode = 'grayscale',\n",
    "        target_size=(img_rows, img_cols),\n",
    "        batch_size=batch_size,\n",
    "        class_mode='categorical',\n",
    "        shuffle=False)\n",
    "\n",
    "class_labels = validation_generator.class_indices\n",
    "class_labels = {v: k for k, v in class_labels.items()}\n",
    "classes = list(class_labels.values())\n",
    "\n",
    "#Confution Matrix and Classification Report\n",
    "Y_pred = model.predict_generator(validation_generator, nb_validation_samples // batch_size+1)\n",
    "y_pred = np.argmax(Y_pred, axis=1)\n",
    "\n",
    "print('Confusion Matrix')\n",
    "print(confusion_matrix(validation_generator.classes, y_pred))\n",
    "print('Classification Report')\n",
    "target_names = list(class_labels.values())\n",
    "print(classification_report(validation_generator.classes, y_pred, target_names=target_names))\n",
    "\n",
    "plt.figure(figsize=(8,8))\n",
    "cnf_matrix = confusion_matrix(validation_generator.classes, y_pred)\n",
    "\n",
    "plt.imshow(cnf_matrix, interpolation='nearest')\n",
    "plt.colorbar()\n",
    "tick_marks = np.arange(len(classes))\n",
    "_ = plt.xticks(tick_marks, classes, rotation=90)\n",
    "_ = plt.yticks(tick_marks, classes)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Loading our saved model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "from keras.models import load_model\n",
    "\n",
    "classifier = load_model('/home/deeplearningcv/DeepLearningCV/Trained Models/emotion_little_vgg_3.h5')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Get our class labels"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Found 3534 images belonging to 6 classes.\n",
      "{0: 'Angry', 1: 'Fear', 2: 'Happy', 3: 'Neutral', 4: 'Sad', 5: 'Surprise'}\n"
     ]
    }
   ],
   "source": [
    "validation_generator = validation_datagen.flow_from_directory(\n",
    "        validation_data_dir,\n",
    "        color_mode = 'grayscale',\n",
    "        target_size=(img_rows, img_cols),\n",
    "        batch_size=batch_size,\n",
    "        class_mode='categorical',\n",
    "        shuffle=False)\n",
    "\n",
    "class_labels = validation_generator.class_indices\n",
    "class_labels = {v: k for k, v in class_labels.items()}\n",
    "classes = list(class_labels.values())\n",
    "print(class_labels)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Let's test on some of validation images"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/deeplearningcv/anaconda3/envs/cv/lib/python3.6/site-packages/keras_preprocessing/image.py:492: UserWarning: grayscale is deprecated. Please use color_mode = \"grayscale\"\n",
      "  warnings.warn('grayscale is deprecated. Please use '\n"
     ]
    }
   ],
   "source": [
    "from keras.models import load_model\n",
    "from keras.optimizers import RMSprop, SGD, Adam\n",
    "from keras.preprocessing import image\n",
    "import numpy as np\n",
    "import os\n",
    "import cv2\n",
    "import numpy as np\n",
    "from os import listdir\n",
    "from os.path import isfile, join\n",
    "import re\n",
    "\n",
    "def draw_test(name, pred, im, true_label):\n",
    "    BLACK = [0,0,0]\n",
    "    expanded_image = cv2.copyMakeBorder(im, 160, 0, 0, 300 ,cv2.BORDER_CONSTANT,value=BLACK)\n",
    "    cv2.putText(expanded_image, \"predited - \"+ pred, (20, 60) , cv2.FONT_HERSHEY_SIMPLEX,1, (0,0,255), 2)\n",
    "    cv2.putText(expanded_image, \"true - \"+ true_label, (20, 120) , cv2.FONT_HERSHEY_SIMPLEX,1, (0,255,0), 2)\n",
    "    cv2.imshow(name, expanded_image)\n",
    "\n",
    "\n",
    "def getRandomImage(path, img_width, img_height):\n",
    "    \"\"\"function loads a random images from a random folder in our test path \"\"\"\n",
    "    folders = list(filter(lambda x: os.path.isdir(os.path.join(path, x)), os.listdir(path)))\n",
    "    random_directory = np.random.randint(0,len(folders))\n",
    "    path_class = folders[random_directory]\n",
    "    file_path = path + path_class\n",
    "    file_names = [f for f in listdir(file_path) if isfile(join(file_path, f))]\n",
    "    random_file_index = np.random.randint(0,len(file_names))\n",
    "    image_name = file_names[random_file_index]\n",
    "    final_path = file_path + \"/\" + image_name\n",
    "    return image.load_img(final_path, target_size = (img_width, img_height),grayscale=True), final_path, path_class\n",
    "\n",
    "# dimensions of our images\n",
    "img_width, img_height = 48, 48\n",
    "\n",
    "# We use a very small learning rate \n",
    "model.compile(loss = 'categorical_crossentropy',\n",
    "              optimizer = RMSprop(lr = 0.001),\n",
    "              metrics = ['accuracy'])\n",
    "\n",
    "files = []\n",
    "predictions = []\n",
    "true_labels = []\n",
    "\n",
    "# predicting images\n",
    "for i in range(0, 10):\n",
    "    path = './fer2013/validation/' \n",
    "    img, final_path, true_label = getRandomImage(path, img_width, img_height)\n",
    "    files.append(final_path)\n",
    "    true_labels.append(true_label)\n",
    "    x = image.img_to_array(img)\n",
    "    x = x * 1./255\n",
    "    x = np.expand_dims(x, axis=0)\n",
    "    images = np.vstack([x])\n",
    "    classes = model.predict_classes(images, batch_size = 10)\n",
    "    predictions.append(classes)\n",
    "    \n",
    "for i in range(0, len(files)):\n",
    "    image = cv2.imread((files[i]))\n",
    "    image = cv2.resize(image, None, fx=3, fy=3, interpolation = cv2.INTER_CUBIC)\n",
    "    draw_test(\"Prediction\", class_labels[predictions[i][0]], image, true_labels[i])\n",
    "    cv2.waitKey(0)\n",
    "\n",
    "cv2.destroyAllWindows()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Test on a single image"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "from keras.models import load_model\n",
    "from keras.preprocessing import image\n",
    "import numpy as np\n",
    "import os\n",
    "import cv2\n",
    "import numpy as np\n",
    "from os import listdir\n",
    "from os.path import isfile, join\n",
    "from keras.preprocessing.image import img_to_array\n",
    "\n",
    "face_classifier = cv2.CascadeClassifier('./Haarcascades/haarcascade_frontalface_default.xml')\n",
    "\n",
    "def face_detector(img):\n",
    "    # Convert image to grayscale\n",
    "    gray = cv2.cvtColor(img.copy(),cv2.COLOR_BGR2GRAY)\n",
    "    faces = face_classifier.detectMultiScale(gray, 1.3, 5)\n",
    "    if faces is ():\n",
    "        return (0,0,0,0), np.zeros((48,48), np.uint8), img\n",
    "    \n",
    "    allfaces = []   \n",
    "    rects = []\n",
    "    for (x,y,w,h) in faces:\n",
    "        cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),2)\n",
    "        roi_gray = gray[y:y+h, x:x+w]\n",
    "        roi_gray = cv2.resize(roi_gray, (48, 48), interpolation = cv2.INTER_AREA)\n",
    "        allfaces.append(roi_gray)\n",
    "        rects.append((x,w,y,h))\n",
    "    return rects, allfaces, img\n",
    "\n",
    "img = cv2.imread(\"rajeev.jpg\")\n",
    "rects, faces, image = face_detector(img)\n",
    "\n",
    "i = 0\n",
    "for face in faces:\n",
    "    roi = face.astype(\"float\") / 255.0\n",
    "    roi = img_to_array(roi)\n",
    "    roi = np.expand_dims(roi, axis=0)\n",
    "\n",
    "    # make a prediction on the ROI, then lookup the class\n",
    "    preds = classifier.predict(roi)[0]\n",
    "    label = class_labels[preds.argmax()]   \n",
    "\n",
    "    #Overlay our detected emotion on our pic\n",
    "    label_position = (rects[i][0] + int((rects[i][1]/2)), abs(rects[i][2] - 10))\n",
    "    i =+ 1\n",
    "    cv2.putText(image, label, label_position , cv2.FONT_HERSHEY_SIMPLEX,1, (0,255,0), 2)\n",
    "    \n",
    "cv2.imshow(\"Emotion Detector\", image)\n",
    "cv2.waitKey(0)\n",
    "\n",
    "cv2.destroyAllWindows()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Let's try this on our webcam\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [],
   "source": [
    "import cv2\n",
    "import numpy as np\n",
    "from time import sleep\n",
    "from keras.preprocessing.image import img_to_array\n",
    "\n",
    "face_classifier = cv2.CascadeClassifier('./Haarcascades/haarcascade_frontalface_default.xml')\n",
    "\n",
    "def face_detector(img):\n",
    "    # Convert image to grayscale\n",
    "    gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)\n",
    "    faces = face_classifier.detectMultiScale(gray, 1.3, 5)\n",
    "    if faces is ():\n",
    "        return (0,0,0,0), np.zeros((48,48), np.uint8), img\n",
    "    \n",
    "    for (x,y,w,h) in faces:\n",
    "        cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),2)\n",
    "        roi_gray = gray[y:y+h, x:x+w]\n",
    "\n",
    "    try:\n",
    "        roi_gray = cv2.resize(roi_gray, (48, 48), interpolation = cv2.INTER_AREA)\n",
    "    except:\n",
    "        return (x,w,y,h), np.zeros((48,48), np.uint8), img\n",
    "    return (x,w,y,h), roi_gray, img\n",
    "\n",
    "cap = cv2.VideoCapture(0)\n",
    "\n",
    "while True:\n",
    "\n",
    "    ret, frame = cap.read()\n",
    "    rect, face, image = face_detector(frame)\n",
    "    if np.sum([face]) != 0.0:\n",
    "        roi = face.astype(\"float\") / 255.0\n",
    "        roi = img_to_array(roi)\n",
    "        roi = np.expand_dims(roi, axis=0)\n",
    "\n",
    "        # make a prediction on the ROI, then lookup the class\n",
    "        preds = classifier.predict(roi)[0]\n",
    "        label = class_labels[preds.argmax()]  \n",
    "        label_position = (rect[0] + int((rect[1]/2)), rect[2] + 25)\n",
    "        cv2.putText(image, label, label_position , cv2.FONT_HERSHEY_SIMPLEX,2, (0,255,0), 3)\n",
    "    else:\n",
    "        cv2.putText(image, \"No Face Found\", (20, 60) , cv2.FONT_HERSHEY_SIMPLEX,2, (0,255,0), 3)\n",
    "        \n",
    "    cv2.imshow('All', image)\n",
    "    if cv2.waitKey(1) == 13: #13 is the Enter Key\n",
    "        break\n",
    "        \n",
    "cap.release()\n",
    "cv2.destroyAllWindows()      "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [],
   "source": [
    "cap.release()\n",
    "cv2.destroyAllWindows()      "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
