{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Training our Fruit Classifer\n",
    "### Experimenting with Callbacks\n",
    "- Let's create our data generators"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Found 41322 images belonging to 81 classes.\n",
      "Found 13877 images belonging to 81 classes.\n"
     ]
    }
   ],
   "source": [
    "from __future__ import print_function\n",
    "import keras\n",
    "from keras.preprocessing.image import ImageDataGenerator\n",
    "from keras.models import Sequential\n",
    "from keras.layers import Dense, Dropout, Activation, Flatten\n",
    "from keras.layers import Conv2D, MaxPooling2D\n",
    "import os\n",
    "\n",
    "num_classes = 81\n",
    "img_rows, img_cols = 32, 32\n",
    "batch_size = 16\n",
    "\n",
    "from keras.preprocessing.image import ImageDataGenerator\n",
    "\n",
    "train_data_dir = './fruits-360/train'\n",
    "validation_data_dir = './fruits-360/validation'\n",
    "\n",
    "# Let's use some data augmentaiton \n",
    "train_datagen = ImageDataGenerator(\n",
    "      rescale=1./255,\n",
    "      rotation_range=30,\n",
    "      width_shift_range=0.3,\n",
    "      height_shift_range=0.3,\n",
    "      horizontal_flip=True,\n",
    "      fill_mode='nearest')\n",
    " \n",
    "validation_datagen = ImageDataGenerator(rescale=1./255)\n",
    " \n",
    "train_generator = train_datagen.flow_from_directory(\n",
    "        train_data_dir,\n",
    "        target_size=(img_rows, img_cols),\n",
    "        batch_size=batch_size,\n",
    "        class_mode='categorical',\n",
    "        shuffle=True)\n",
    " \n",
    "validation_generator = validation_datagen.flow_from_directory(\n",
    "        validation_data_dir,\n",
    "        target_size=(img_rows, img_cols),\n",
    "        batch_size=batch_size,\n",
    "        class_mode='categorical',\n",
    "        shuffle=False)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Let's define our model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "model = Sequential()\n",
    "\n",
    "# Padding = 'same'  results in padding the input such that\n",
    "# the output has the same length as the original input\n",
    "model.add(Conv2D(32, (3, 3), padding='same',\n",
    "                 input_shape= (img_rows, img_cols, 3)))\n",
    "model.add(Activation('relu'))\n",
    "model.add(Conv2D(32, (3, 3)))\n",
    "model.add(Activation('relu'))\n",
    "model.add(MaxPooling2D(pool_size=(2, 2)))\n",
    "model.add(Dropout(0.25))\n",
    "\n",
    "model.add(Conv2D(64, (3, 3), padding='same'))\n",
    "model.add(Activation('relu'))\n",
    "model.add(Conv2D(64, (3, 3)))\n",
    "model.add(Activation('relu'))\n",
    "model.add(MaxPooling2D(pool_size=(2, 2)))\n",
    "model.add(Dropout(0.25))\n",
    "\n",
    "model.add(Flatten())\n",
    "model.add(Dense(512))\n",
    "model.add(Activation('relu'))\n",
    "model.add(Dropout(0.5))\n",
    "model.add(Dense(num_classes))\n",
    "model.add(Activation('softmax'))\n",
    "\n",
    "# initiate RMSprop optimizer and configure some parameters\n",
    "#opt = keras.optimizers.rmsprop(lr=0.0001, decay=1e-6)\n",
    "print(model.summary())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1/5\n",
      "2582/2582 [==============================] - 269s 104ms/step - loss: 1.7532 - acc: 0.4740 - val_loss: 0.5560 - val_acc: 0.8069\n",
      "\n",
      "Epoch 00001: val_loss improved from inf to 0.55598, saving model to /home/deeplearningcv/DeepLearningCV/Trained Models/fruits_fresh_cnn.h5\n",
      "Epoch 2/5\n",
      "2582/2582 [==============================] - 262s 101ms/step - loss: 0.6594 - acc: 0.7921 - val_loss: 0.4869 - val_acc: 0.8577\n",
      "\n",
      "Epoch 00002: val_loss improved from 0.55598 to 0.48694, saving model to /home/deeplearningcv/DeepLearningCV/Trained Models/fruits_fresh_cnn.h5\n",
      "Epoch 3/5\n",
      "2582/2582 [==============================] - 206s 80ms/step - loss: 0.5250 - acc: 0.8455 - val_loss: 0.2939 - val_acc: 0.9148\n",
      "\n",
      "Epoch 00003: val_loss improved from 0.48694 to 0.29386, saving model to /home/deeplearningcv/DeepLearningCV/Trained Models/fruits_fresh_cnn.h5\n",
      "Epoch 4/5\n",
      "2582/2582 [==============================] - 263s 102ms/step - loss: 0.5116 - acc: 0.8599 - val_loss: 0.3469 - val_acc: 0.9279\n",
      "\n",
      "Epoch 00004: val_loss did not improve from 0.29386\n",
      "Epoch 5/5\n",
      "2582/2582 [==============================] - 275s 106ms/step - loss: 0.5639 - acc: 0.8580 - val_loss: 0.6419 - val_acc: 0.8394\n",
      "\n",
      "Epoch 00005: val_loss did not improve from 0.29386\n"
     ]
    }
   ],
   "source": [
    "from keras.optimizers import RMSprop, SGD\n",
    "from keras.callbacks import ModelCheckpoint, EarlyStopping, ReduceLROnPlateau\n",
    "\n",
    "                     \n",
    "checkpoint = ModelCheckpoint(\"/home/deeplearningcv/DeepLearningCV/Trained Models/fruits_fresh_cnn_1.h5\",\n",
    "                             monitor=\"val_loss\",\n",
    "                             mode=\"min\",\n",
    "                             save_best_only = True,\n",
    "                             verbose=1)\n",
    "\n",
    "earlystop = EarlyStopping(monitor = 'val_loss', \n",
    "                          min_delta = 0, \n",
    "                          patience = 3,\n",
    "                          verbose = 1,\n",
    "                          restore_best_weights = True)\n",
    "\n",
    "reduce_lr = ReduceLROnPlateau(monitor = 'val_loss',\n",
    "                              factor = 0.2,\n",
    "                              patience = 3,\n",
    "                              verbose = 1,\n",
    "                              min_delta = 0.0001)\n",
    "\n",
    "# we put our call backs into a callback list\n",
    "callbacks = [earlystop, checkpoint, reduce_lr]\n",
    "\n",
    "# We use a very small learning rate \n",
    "model.compile(loss = 'categorical_crossentropy',\n",
    "              optimizer = RMSprop(lr = 0.001),\n",
    "              metrics = ['accuracy'])\n",
    "\n",
    "nb_train_samples = 41322\n",
    "nb_validation_samples = 13877\n",
    "epochs = 10\n",
    "\n",
    "history = model.fit_generator(\n",
    "    train_generator,\n",
    "    steps_per_epoch = nb_train_samples // batch_size,\n",
    "    epochs = epochs,\n",
    "    callbacks = callbacks,\n",
    "    validation_data = validation_generator,\n",
    "    validation_steps = nb_validation_samples // batch_size)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Displaying our Confusion Matrix"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Confusion Matrix\n",
      "[[136   0   0 ...   0   0  21]\n",
      " [ 21 137   0 ...   0   0   0]\n",
      " [  0  21  68 ...   0   0   0]\n",
      " ...\n",
      " [  0   0   0 ... 143   0   0]\n",
      " [  0   0   0 ...  21 106   0]\n",
      " [  0   0   0 ...   0  21 228]]\n",
      "Classification Report\n",
      "                     precision    recall  f1-score   support\n",
      "\n",
      "     Apple Braeburn       0.49      0.83      0.61       164\n",
      "     Apple Golden 1       0.82      0.84      0.83       164\n",
      "     Apple Golden 2       0.69      0.41      0.52       164\n",
      "     Apple Golden 3       0.56      0.97      0.71       161\n",
      " Apple Granny Smith       0.58      0.87      0.70       164\n",
      "        Apple Red 1       0.71      0.84      0.77       164\n",
      "        Apple Red 2       0.94      0.60      0.73       164\n",
      "        Apple Red 3       0.82      0.85      0.84       144\n",
      "Apple Red Delicious       0.87      0.87      0.87       166\n",
      "   Apple Red Yellow       0.80      0.78      0.79       164\n",
      "            Apricot       0.56      0.87      0.68       164\n",
      "            Avocado       0.88      0.48      0.62       143\n",
      "       Avocado ripe       0.87      0.87      0.87       166\n",
      "             Banana       0.72      0.63      0.67       166\n",
      "         Banana Red       0.44      0.80      0.57       166\n",
      "       Cactus fruit       0.52      0.56      0.54       166\n",
      "       Cantaloupe 1       0.87      0.87      0.87       164\n",
      "       Cantaloupe 2       0.89      0.87      0.88       164\n",
      "          Carambula       0.63      0.68      0.65       166\n",
      "           Cherry 1       0.52      0.87      0.65       164\n",
      "           Cherry 2       1.00      0.28      0.44       246\n",
      "     Cherry Rainier       0.90      0.80      0.85       246\n",
      "   Cherry Wax Black       0.87      0.87      0.87       164\n",
      "     Cherry Wax Red       0.86      0.81      0.84       164\n",
      "  Cherry Wax Yellow       0.87      0.87      0.87       164\n",
      "         Clementine       0.95      0.83      0.88       166\n",
      "              Cocos       0.87      0.87      0.87       166\n",
      "              Dates       0.87      0.87      0.87       166\n",
      "         Granadilla       0.55      0.83      0.66       166\n",
      "         Grape Pink       0.87      0.87      0.87       164\n",
      "        Grape White       0.87      0.87      0.87       166\n",
      "      Grape White 2       0.86      0.77      0.82       166\n",
      "    Grapefruit Pink       0.85      0.74      0.79       166\n",
      "   Grapefruit White       0.96      0.26      0.41       164\n",
      "              Guava       0.96      0.66      0.78       166\n",
      "        Huckleberry       0.87      0.87      0.87       166\n",
      "               Kaki       0.73      0.87      0.79       166\n",
      "               Kiwi       0.76      0.33      0.46       156\n",
      "           Kumquats       1.00      0.54      0.70       166\n",
      "              Lemon       0.51      0.76      0.61       164\n",
      "        Lemon Meyer       0.87      0.87      0.87       166\n",
      "              Limes       0.86      0.80      0.83       166\n",
      "             Lychee       0.87      0.87      0.87       166\n",
      "          Mandarine       1.00      0.22      0.36       166\n",
      "              Mango       0.87      0.87      0.87       166\n",
      "           Maracuja       0.86      0.75      0.80       166\n",
      " Melon Piel de Sapo       0.85      0.91      0.88       246\n",
      "           Mulberry       0.87      0.87      0.87       164\n",
      "          Nectarine       0.80      0.70      0.75       164\n",
      "             Orange       0.73      0.32      0.44       160\n",
      "             Papaya       0.63      0.65      0.64       164\n",
      "      Passion Fruit       0.87      0.87      0.87       166\n",
      "              Peach       0.52      0.77      0.62       164\n",
      "         Peach Flat       0.43      0.87      0.57       164\n",
      "               Pear       0.72      0.87      0.79       164\n",
      "         Pear Abate       0.34      0.82      0.48       166\n",
      "       Pear Monster       0.86      0.77      0.81       166\n",
      "      Pear Williams       0.83      0.41      0.55       166\n",
      "             Pepino       0.76      0.63      0.69       166\n",
      "           Physalis       0.87      0.87      0.87       164\n",
      " Physalis with Husk       0.87      0.29      0.44       164\n",
      "          Pineapple       0.89      0.70      0.79       166\n",
      "     Pineapple Mini       0.97      0.21      0.34       163\n",
      "       Pitahaya Red       0.87      0.86      0.86       166\n",
      "               Plum       0.82      0.85      0.84       151\n",
      "        Pomegranate       0.35      0.37      0.36       164\n",
      "             Quince       0.87      0.87      0.87       166\n",
      "           Rambutan       0.87      0.87      0.87       164\n",
      "          Raspberry       0.87      0.87      0.87       166\n",
      "              Salak       0.46      0.31      0.37       162\n",
      "         Strawberry       0.64      0.87      0.74       164\n",
      "   Strawberry Wedge       0.93      0.85      0.89       246\n",
      "          Tamarillo       0.82      0.87      0.85       166\n",
      "            Tangelo       0.80      0.87      0.84       166\n",
      "           Tomato 1       0.85      0.91      0.88       246\n",
      "           Tomato 2       0.89      0.73      0.80       225\n",
      "           Tomato 3       0.57      0.91      0.70       246\n",
      "           Tomato 4       1.00      0.31      0.47       160\n",
      "  Tomato Cherry Red       0.87      0.87      0.87       164\n",
      "      Tomato Maroon       0.83      0.83      0.83       127\n",
      "             Walnut       0.92      0.92      0.92       249\n",
      "\n",
      "          micro avg       0.74      0.74      0.74     13877\n",
      "          macro avg       0.78      0.74      0.73     13877\n",
      "       weighted avg       0.79      0.74      0.74     13877\n",
      "\n"
     ]
    }
   ],
   "source": [
    "from sklearn.metrics import classification_report, confusion_matrix\n",
    "\n",
    "#Confution Matrix and Classification Report\n",
    "Y_pred = model.predict_generator(validation_generator, nb_validation_samples // batch_size+1)\n",
    "y_pred = np.argmax(Y_pred, axis=1)\n",
    "print('Confusion Matrix')\n",
    "print(confusion_matrix(validation_generator.classes, y_pred))\n",
    "print('Classification Report')\n",
    "target_names = list(class_labels.values())\n",
    "print(classification_report(validation_generator.classes, y_pred, target_names=target_names))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "import sklearn\n",
    "from sklearn.metrics import classification_report, confusion_matrix\n",
    "import numpy as np\n",
    "from keras.models import load_model\n",
    "\n",
    "img_row, img_height, img_depth = 32,32,3\n",
    "model = load_model('/home/deeplearningcv/DeepLearningCV/Trained Models/fruits_fresh_cnn.h5')\n",
    "\n",
    "class_labels = validation_generator.class_indices\n",
    "class_labels = {v: k for k, v in class_labels.items()}\n",
    "classes = list(class_labels.values())\n",
    "\n",
    "nb_train_samples = 41322\n",
    "nb_validation_samples = 13877\n",
    "\n",
    "#Confution Matrix and Classification Report\n",
    "Y_pred = model.predict_generator(validation_generator, nb_validation_samples // batch_size+1)\n",
    "y_pred = np.argmax(Y_pred, axis=1)\n",
    "\n",
    "target_names = list(class_labels.values())\n",
    "\n",
    "plt.figure(figsize=(20,20))\n",
    "cnf_matrix = confusion_matrix(validation_generator.classes, y_pred)\n",
    "\n",
    "plt.imshow(cnf_matrix, interpolation='nearest')\n",
    "plt.colorbar()\n",
    "tick_marks = np.arange(len(classes))\n",
    "_ = plt.xticks(tick_marks, classes, rotation=90)\n",
    "_ = plt.yticks(tick_marks, classes)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Testing our fruit classifier"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "from keras.models import load_model\n",
    "from keras.preprocessing import image\n",
    "import numpy as np\n",
    "import os\n",
    "import cv2\n",
    "import numpy as np\n",
    "from os import listdir\n",
    "from os.path import isfile, join\n",
    "import re\n",
    "\n",
    "def draw_test(name, pred, im, true_label):\n",
    "    BLACK = [0,0,0]\n",
    "    expanded_image = cv2.copyMakeBorder(im, 160, 0, 0, 500 ,cv2.BORDER_CONSTANT,value=BLACK)\n",
    "    cv2.putText(expanded_image, \"predited - \"+ pred, (20, 60) , cv2.FONT_HERSHEY_SIMPLEX,1, (0,0,255), 2)\n",
    "    cv2.putText(expanded_image, \"true - \"+ true_label, (20, 120) , cv2.FONT_HERSHEY_SIMPLEX,1, (0,255,0), 2)\n",
    "    cv2.imshow(name, expanded_image)\n",
    "\n",
    "\n",
    "def getRandomImage(path, img_width, img_height):\n",
    "    \"\"\"function loads a random images from a random folder in our test path \"\"\"\n",
    "    folders = list(filter(lambda x: os.path.isdir(os.path.join(path, x)), os.listdir(path)))\n",
    "    random_directory = np.random.randint(0,len(folders))\n",
    "    path_class = folders[random_directory]\n",
    "    file_path = path + path_class\n",
    "    file_names = [f for f in listdir(file_path) if isfile(join(file_path, f))]\n",
    "    random_file_index = np.random.randint(0,len(file_names))\n",
    "    image_name = file_names[random_file_index]\n",
    "    final_path = file_path + \"/\" + image_name\n",
    "    return image.load_img(final_path, target_size = (img_width, img_height)), final_path, path_class\n",
    "\n",
    "# dimensions of our images\n",
    "img_width, img_height = 32, 32\n",
    "\n",
    "\n",
    "files = []\n",
    "predictions = []\n",
    "true_labels = []\n",
    "# predicting images\n",
    "for i in range(0, 10):\n",
    "    path = './fruits-360/validation/' \n",
    "    img, final_path, true_label = getRandomImage(path, img_width, img_height)\n",
    "    files.append(final_path)\n",
    "    true_labels.append(true_label)\n",
    "    x = image.img_to_array(img)\n",
    "    x = x * 1./255\n",
    "    x = np.expand_dims(x, axis=0)\n",
    "    images = np.vstack([x])\n",
    "    classes = model.predict_classes(images, batch_size = 10)\n",
    "    predictions.append(classes)\n",
    "    \n",
    "for i in range(0, len(files)):\n",
    "    image = cv2.imread((files[i]))\n",
    "    draw_test(\"Prediction\", class_labels[predictions[i][0]], image, true_labels[i])\n",
    "    cv2.waitKey(0)\n",
    "\n",
    "cv2.destroyAllWindows()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
