{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "name": "Exercise 7 - Question.ipynb",
      "version": "0.3.2",
      "provenance": [],
      "collapsed_sections": []
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    },
    "accelerator": "GPU"
  },
  "cells": [
    {
      "cell_type": "code",
      "metadata": {
        "id": "lbFmQdsZs5eW",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "# Import all the necessary files!\n",
        "import os\n",
        "import tensorflow as tf\n",
        "from tensorflow.keras import layers\n",
        "from tensorflow.keras import Model"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab_type": "code",
        "id": "1xJZ5glPPCRz",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 11789
        },
        "outputId": "1183ad0b-7a79-417f-e2cb-d9a895bab476"
      },
      "source": [
        "# Download the inception v3 weights\n",
        "!wget --no-check-certificate \\\n",
        "    https://storage.googleapis.com/mledu-datasets/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5 \\\n",
        "    -O /tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5\n",
        "\n",
        "# Import the inception model  \n",
        "from tensorflow.keras.applications.inception_v3 import InceptionV3\n",
        "\n",
        "# Create an instance of the inception model from the local pre-trained weights\n",
        "local_weights_file = '/tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5'\n",
        "\n",
        "pre_trained_model = InceptionV3(input_shape=(150,150,3),\n",
        "                                include_top=False,\n",
        "                                weights=None) # Your Code Here\n",
        "\n",
        "pre_trained_model.load_weights(local_weights_file)\n",
        "\n",
        "# Make all the layers in the pre-trained model non-trainable\n",
        "for layer in pre_trained_model.layers:\n",
        "  # Your Code Here\n",
        "  layer.trainable = False\n",
        "  \n",
        "# Print the model summary\n",
        "pre_trained_model.summary()\n",
        "\n",
        "# Expected Output is extremely large, but should end with:\n",
        "\n",
        "#batch_normalization_v1_281 (Bat (None, 3, 3, 192)    576         conv2d_281[0][0]                 \n",
        "#__________________________________________________________________________________________________\n",
        "#activation_273 (Activation)     (None, 3, 3, 320)    0           batch_normalization_v1_273[0][0] \n",
        "#__________________________________________________________________________________________________\n",
        "#mixed9_1 (Concatenate)          (None, 3, 3, 768)    0           activation_275[0][0]             \n",
        "#                                                                 activation_276[0][0]             \n",
        "#__________________________________________________________________________________________________\n",
        "#concatenate_5 (Concatenate)     (None, 3, 3, 768)    0           activation_279[0][0]             \n",
        "#                                                                 activation_280[0][0]             \n",
        "#__________________________________________________________________________________________________\n",
        "#activation_281 (Activation)     (None, 3, 3, 192)    0           batch_normalization_v1_281[0][0] \n",
        "#__________________________________________________________________________________________________\n",
        "#mixed10 (Concatenate)           (None, 3, 3, 2048)   0           activation_273[0][0]             \n",
        "#                                                                 mixed9_1[0][0]                   \n",
        "#                                                                 concatenate_5[0][0]              \n",
        "#                                                                 activation_281[0][0]             \n",
        "#==================================================================================================\n",
        "#Total params: 21,802,784\n",
        "#Trainable params: 0\n",
        "#Non-trainable params: 21,802,784"
      ],
      "execution_count": 4,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "--2019-05-10 04:21:53--  https://storage.googleapis.com/mledu-datasets/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5\n",
            "Resolving storage.googleapis.com (storage.googleapis.com)... 173.194.79.128, 2a00:1450:4013:c01::80\n",
            "Connecting to storage.googleapis.com (storage.googleapis.com)|173.194.79.128|:443... connected.\n",
            "HTTP request sent, awaiting response... 200 OK\n",
            "Length: 87910968 (84M) [application/x-hdf]\n",
            "Saving to: ‘/tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5’\n",
            "\n",
            "/tmp/inception_v3_w 100%[===================>]  83.84M   120MB/s    in 0.7s    \n",
            "\n",
            "2019-05-10 04:21:54 (120 MB/s) - ‘/tmp/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5’ saved [87910968/87910968]\n",
            "\n",
            "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/resource_variable_ops.py:435: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n",
            "Instructions for updating:\n",
            "Colocations handled automatically by placer.\n",
            "__________________________________________________________________________________________________\n",
            "Layer (type)                    Output Shape         Param #     Connected to                     \n",
            "==================================================================================================\n",
            "input_1 (InputLayer)            (None, 150, 150, 3)  0                                            \n",
            "__________________________________________________________________________________________________\n",
            "conv2d (Conv2D)                 (None, 74, 74, 32)   864         input_1[0][0]                    \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1 (BatchNo (None, 74, 74, 32)   96          conv2d[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "activation (Activation)         (None, 74, 74, 32)   0           batch_normalization_v1[0][0]     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_1 (Conv2D)               (None, 72, 72, 32)   9216        activation[0][0]                 \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_1 (Batch (None, 72, 72, 32)   96          conv2d_1[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "activation_1 (Activation)       (None, 72, 72, 32)   0           batch_normalization_v1_1[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_2 (Conv2D)               (None, 72, 72, 64)   18432       activation_1[0][0]               \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_2 (Batch (None, 72, 72, 64)   192         conv2d_2[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "activation_2 (Activation)       (None, 72, 72, 64)   0           batch_normalization_v1_2[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "max_pooling2d (MaxPooling2D)    (None, 35, 35, 64)   0           activation_2[0][0]               \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_3 (Conv2D)               (None, 35, 35, 80)   5120        max_pooling2d[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_3 (Batch (None, 35, 35, 80)   240         conv2d_3[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "activation_3 (Activation)       (None, 35, 35, 80)   0           batch_normalization_v1_3[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_4 (Conv2D)               (None, 33, 33, 192)  138240      activation_3[0][0]               \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_4 (Batch (None, 33, 33, 192)  576         conv2d_4[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "activation_4 (Activation)       (None, 33, 33, 192)  0           batch_normalization_v1_4[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "max_pooling2d_1 (MaxPooling2D)  (None, 16, 16, 192)  0           activation_4[0][0]               \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_8 (Conv2D)               (None, 16, 16, 64)   12288       max_pooling2d_1[0][0]            \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_8 (Batch (None, 16, 16, 64)   192         conv2d_8[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "activation_8 (Activation)       (None, 16, 16, 64)   0           batch_normalization_v1_8[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_6 (Conv2D)               (None, 16, 16, 48)   9216        max_pooling2d_1[0][0]            \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_9 (Conv2D)               (None, 16, 16, 96)   55296       activation_8[0][0]               \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_6 (Batch (None, 16, 16, 48)   144         conv2d_6[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_9 (Batch (None, 16, 16, 96)   288         conv2d_9[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "activation_6 (Activation)       (None, 16, 16, 48)   0           batch_normalization_v1_6[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "activation_9 (Activation)       (None, 16, 16, 96)   0           batch_normalization_v1_9[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d (AveragePooli (None, 16, 16, 192)  0           max_pooling2d_1[0][0]            \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_5 (Conv2D)               (None, 16, 16, 64)   12288       max_pooling2d_1[0][0]            \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_7 (Conv2D)               (None, 16, 16, 64)   76800       activation_6[0][0]               \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_10 (Conv2D)              (None, 16, 16, 96)   82944       activation_9[0][0]               \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_11 (Conv2D)              (None, 16, 16, 32)   6144        average_pooling2d[0][0]          \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_5 (Batch (None, 16, 16, 64)   192         conv2d_5[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_7 (Batch (None, 16, 16, 64)   192         conv2d_7[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_10 (Batc (None, 16, 16, 96)   288         conv2d_10[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_11 (Batc (None, 16, 16, 32)   96          conv2d_11[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_5 (Activation)       (None, 16, 16, 64)   0           batch_normalization_v1_5[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "activation_7 (Activation)       (None, 16, 16, 64)   0           batch_normalization_v1_7[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "activation_10 (Activation)      (None, 16, 16, 96)   0           batch_normalization_v1_10[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_11 (Activation)      (None, 16, 16, 32)   0           batch_normalization_v1_11[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed0 (Concatenate)            (None, 16, 16, 256)  0           activation_5[0][0]               \n",
            "                                                                 activation_7[0][0]               \n",
            "                                                                 activation_10[0][0]              \n",
            "                                                                 activation_11[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_15 (Conv2D)              (None, 16, 16, 64)   16384       mixed0[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_15 (Batc (None, 16, 16, 64)   192         conv2d_15[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_15 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_15[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_13 (Conv2D)              (None, 16, 16, 48)   12288       mixed0[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_16 (Conv2D)              (None, 16, 16, 96)   55296       activation_15[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_13 (Batc (None, 16, 16, 48)   144         conv2d_13[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_16 (Batc (None, 16, 16, 96)   288         conv2d_16[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_13 (Activation)      (None, 16, 16, 48)   0           batch_normalization_v1_13[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_16 (Activation)      (None, 16, 16, 96)   0           batch_normalization_v1_16[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d_1 (AveragePoo (None, 16, 16, 256)  0           mixed0[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_12 (Conv2D)              (None, 16, 16, 64)   16384       mixed0[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_14 (Conv2D)              (None, 16, 16, 64)   76800       activation_13[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_17 (Conv2D)              (None, 16, 16, 96)   82944       activation_16[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_18 (Conv2D)              (None, 16, 16, 64)   16384       average_pooling2d_1[0][0]        \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_12 (Batc (None, 16, 16, 64)   192         conv2d_12[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_14 (Batc (None, 16, 16, 64)   192         conv2d_14[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_17 (Batc (None, 16, 16, 96)   288         conv2d_17[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_18 (Batc (None, 16, 16, 64)   192         conv2d_18[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_12 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_12[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_14 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_14[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_17 (Activation)      (None, 16, 16, 96)   0           batch_normalization_v1_17[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_18 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_18[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed1 (Concatenate)            (None, 16, 16, 288)  0           activation_12[0][0]              \n",
            "                                                                 activation_14[0][0]              \n",
            "                                                                 activation_17[0][0]              \n",
            "                                                                 activation_18[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_22 (Conv2D)              (None, 16, 16, 64)   18432       mixed1[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_22 (Batc (None, 16, 16, 64)   192         conv2d_22[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_22 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_22[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_20 (Conv2D)              (None, 16, 16, 48)   13824       mixed1[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_23 (Conv2D)              (None, 16, 16, 96)   55296       activation_22[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_20 (Batc (None, 16, 16, 48)   144         conv2d_20[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_23 (Batc (None, 16, 16, 96)   288         conv2d_23[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_20 (Activation)      (None, 16, 16, 48)   0           batch_normalization_v1_20[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_23 (Activation)      (None, 16, 16, 96)   0           batch_normalization_v1_23[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d_2 (AveragePoo (None, 16, 16, 288)  0           mixed1[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_19 (Conv2D)              (None, 16, 16, 64)   18432       mixed1[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_21 (Conv2D)              (None, 16, 16, 64)   76800       activation_20[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_24 (Conv2D)              (None, 16, 16, 96)   82944       activation_23[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_25 (Conv2D)              (None, 16, 16, 64)   18432       average_pooling2d_2[0][0]        \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_19 (Batc (None, 16, 16, 64)   192         conv2d_19[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_21 (Batc (None, 16, 16, 64)   192         conv2d_21[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_24 (Batc (None, 16, 16, 96)   288         conv2d_24[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_25 (Batc (None, 16, 16, 64)   192         conv2d_25[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_19 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_19[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_21 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_21[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_24 (Activation)      (None, 16, 16, 96)   0           batch_normalization_v1_24[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_25 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_25[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed2 (Concatenate)            (None, 16, 16, 288)  0           activation_19[0][0]              \n",
            "                                                                 activation_21[0][0]              \n",
            "                                                                 activation_24[0][0]              \n",
            "                                                                 activation_25[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_27 (Conv2D)              (None, 16, 16, 64)   18432       mixed2[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_27 (Batc (None, 16, 16, 64)   192         conv2d_27[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_27 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_27[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_28 (Conv2D)              (None, 16, 16, 96)   55296       activation_27[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_28 (Batc (None, 16, 16, 96)   288         conv2d_28[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_28 (Activation)      (None, 16, 16, 96)   0           batch_normalization_v1_28[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_26 (Conv2D)              (None, 7, 7, 384)    995328      mixed2[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_29 (Conv2D)              (None, 7, 7, 96)     82944       activation_28[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_26 (Batc (None, 7, 7, 384)    1152        conv2d_26[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_29 (Batc (None, 7, 7, 96)     288         conv2d_29[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_26 (Activation)      (None, 7, 7, 384)    0           batch_normalization_v1_26[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_29 (Activation)      (None, 7, 7, 96)     0           batch_normalization_v1_29[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "max_pooling2d_2 (MaxPooling2D)  (None, 7, 7, 288)    0           mixed2[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "mixed3 (Concatenate)            (None, 7, 7, 768)    0           activation_26[0][0]              \n",
            "                                                                 activation_29[0][0]              \n",
            "                                                                 max_pooling2d_2[0][0]            \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_34 (Conv2D)              (None, 7, 7, 128)    98304       mixed3[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_34 (Batc (None, 7, 7, 128)    384         conv2d_34[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_34 (Activation)      (None, 7, 7, 128)    0           batch_normalization_v1_34[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_35 (Conv2D)              (None, 7, 7, 128)    114688      activation_34[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_35 (Batc (None, 7, 7, 128)    384         conv2d_35[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_35 (Activation)      (None, 7, 7, 128)    0           batch_normalization_v1_35[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_31 (Conv2D)              (None, 7, 7, 128)    98304       mixed3[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_36 (Conv2D)              (None, 7, 7, 128)    114688      activation_35[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_31 (Batc (None, 7, 7, 128)    384         conv2d_31[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_36 (Batc (None, 7, 7, 128)    384         conv2d_36[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_31 (Activation)      (None, 7, 7, 128)    0           batch_normalization_v1_31[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_36 (Activation)      (None, 7, 7, 128)    0           batch_normalization_v1_36[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_32 (Conv2D)              (None, 7, 7, 128)    114688      activation_31[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_37 (Conv2D)              (None, 7, 7, 128)    114688      activation_36[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_32 (Batc (None, 7, 7, 128)    384         conv2d_32[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_37 (Batc (None, 7, 7, 128)    384         conv2d_37[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_32 (Activation)      (None, 7, 7, 128)    0           batch_normalization_v1_32[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_37 (Activation)      (None, 7, 7, 128)    0           batch_normalization_v1_37[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d_3 (AveragePoo (None, 7, 7, 768)    0           mixed3[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_30 (Conv2D)              (None, 7, 7, 192)    147456      mixed3[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_33 (Conv2D)              (None, 7, 7, 192)    172032      activation_32[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_38 (Conv2D)              (None, 7, 7, 192)    172032      activation_37[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_39 (Conv2D)              (None, 7, 7, 192)    147456      average_pooling2d_3[0][0]        \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_30 (Batc (None, 7, 7, 192)    576         conv2d_30[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_33 (Batc (None, 7, 7, 192)    576         conv2d_33[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_38 (Batc (None, 7, 7, 192)    576         conv2d_38[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_39 (Batc (None, 7, 7, 192)    576         conv2d_39[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_30 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_30[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_33 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_33[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_38 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_38[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_39 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_39[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed4 (Concatenate)            (None, 7, 7, 768)    0           activation_30[0][0]              \n",
            "                                                                 activation_33[0][0]              \n",
            "                                                                 activation_38[0][0]              \n",
            "                                                                 activation_39[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_44 (Conv2D)              (None, 7, 7, 160)    122880      mixed4[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_44 (Batc (None, 7, 7, 160)    480         conv2d_44[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_44 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_44[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_45 (Conv2D)              (None, 7, 7, 160)    179200      activation_44[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_45 (Batc (None, 7, 7, 160)    480         conv2d_45[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_45 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_45[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_41 (Conv2D)              (None, 7, 7, 160)    122880      mixed4[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_46 (Conv2D)              (None, 7, 7, 160)    179200      activation_45[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_41 (Batc (None, 7, 7, 160)    480         conv2d_41[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_46 (Batc (None, 7, 7, 160)    480         conv2d_46[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_41 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_41[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_46 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_46[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_42 (Conv2D)              (None, 7, 7, 160)    179200      activation_41[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_47 (Conv2D)              (None, 7, 7, 160)    179200      activation_46[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_42 (Batc (None, 7, 7, 160)    480         conv2d_42[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_47 (Batc (None, 7, 7, 160)    480         conv2d_47[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_42 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_42[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_47 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_47[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d_4 (AveragePoo (None, 7, 7, 768)    0           mixed4[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_40 (Conv2D)              (None, 7, 7, 192)    147456      mixed4[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_43 (Conv2D)              (None, 7, 7, 192)    215040      activation_42[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_48 (Conv2D)              (None, 7, 7, 192)    215040      activation_47[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_49 (Conv2D)              (None, 7, 7, 192)    147456      average_pooling2d_4[0][0]        \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_40 (Batc (None, 7, 7, 192)    576         conv2d_40[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_43 (Batc (None, 7, 7, 192)    576         conv2d_43[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_48 (Batc (None, 7, 7, 192)    576         conv2d_48[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_49 (Batc (None, 7, 7, 192)    576         conv2d_49[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_40 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_40[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_43 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_43[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_48 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_48[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_49 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_49[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed5 (Concatenate)            (None, 7, 7, 768)    0           activation_40[0][0]              \n",
            "                                                                 activation_43[0][0]              \n",
            "                                                                 activation_48[0][0]              \n",
            "                                                                 activation_49[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_54 (Conv2D)              (None, 7, 7, 160)    122880      mixed5[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_54 (Batc (None, 7, 7, 160)    480         conv2d_54[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_54 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_54[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_55 (Conv2D)              (None, 7, 7, 160)    179200      activation_54[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_55 (Batc (None, 7, 7, 160)    480         conv2d_55[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_55 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_55[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_51 (Conv2D)              (None, 7, 7, 160)    122880      mixed5[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_56 (Conv2D)              (None, 7, 7, 160)    179200      activation_55[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_51 (Batc (None, 7, 7, 160)    480         conv2d_51[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_56 (Batc (None, 7, 7, 160)    480         conv2d_56[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_51 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_51[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_56 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_56[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_52 (Conv2D)              (None, 7, 7, 160)    179200      activation_51[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_57 (Conv2D)              (None, 7, 7, 160)    179200      activation_56[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_52 (Batc (None, 7, 7, 160)    480         conv2d_52[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_57 (Batc (None, 7, 7, 160)    480         conv2d_57[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_52 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_52[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_57 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_57[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d_5 (AveragePoo (None, 7, 7, 768)    0           mixed5[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_50 (Conv2D)              (None, 7, 7, 192)    147456      mixed5[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_53 (Conv2D)              (None, 7, 7, 192)    215040      activation_52[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_58 (Conv2D)              (None, 7, 7, 192)    215040      activation_57[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_59 (Conv2D)              (None, 7, 7, 192)    147456      average_pooling2d_5[0][0]        \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_50 (Batc (None, 7, 7, 192)    576         conv2d_50[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_53 (Batc (None, 7, 7, 192)    576         conv2d_53[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_58 (Batc (None, 7, 7, 192)    576         conv2d_58[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_59 (Batc (None, 7, 7, 192)    576         conv2d_59[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_50 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_50[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_53 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_53[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_58 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_58[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_59 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_59[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed6 (Concatenate)            (None, 7, 7, 768)    0           activation_50[0][0]              \n",
            "                                                                 activation_53[0][0]              \n",
            "                                                                 activation_58[0][0]              \n",
            "                                                                 activation_59[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_64 (Conv2D)              (None, 7, 7, 192)    147456      mixed6[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_64 (Batc (None, 7, 7, 192)    576         conv2d_64[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_64 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_64[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_65 (Conv2D)              (None, 7, 7, 192)    258048      activation_64[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_65 (Batc (None, 7, 7, 192)    576         conv2d_65[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_65 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_65[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_61 (Conv2D)              (None, 7, 7, 192)    147456      mixed6[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_66 (Conv2D)              (None, 7, 7, 192)    258048      activation_65[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_61 (Batc (None, 7, 7, 192)    576         conv2d_61[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_66 (Batc (None, 7, 7, 192)    576         conv2d_66[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_61 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_61[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_66 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_66[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_62 (Conv2D)              (None, 7, 7, 192)    258048      activation_61[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_67 (Conv2D)              (None, 7, 7, 192)    258048      activation_66[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_62 (Batc (None, 7, 7, 192)    576         conv2d_62[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_67 (Batc (None, 7, 7, 192)    576         conv2d_67[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_62 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_62[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_67 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_67[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d_6 (AveragePoo (None, 7, 7, 768)    0           mixed6[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_60 (Conv2D)              (None, 7, 7, 192)    147456      mixed6[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_63 (Conv2D)              (None, 7, 7, 192)    258048      activation_62[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_68 (Conv2D)              (None, 7, 7, 192)    258048      activation_67[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_69 (Conv2D)              (None, 7, 7, 192)    147456      average_pooling2d_6[0][0]        \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_60 (Batc (None, 7, 7, 192)    576         conv2d_60[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_63 (Batc (None, 7, 7, 192)    576         conv2d_63[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_68 (Batc (None, 7, 7, 192)    576         conv2d_68[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_69 (Batc (None, 7, 7, 192)    576         conv2d_69[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_60 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_60[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_63 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_63[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_68 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_68[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_69 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_69[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed7 (Concatenate)            (None, 7, 7, 768)    0           activation_60[0][0]              \n",
            "                                                                 activation_63[0][0]              \n",
            "                                                                 activation_68[0][0]              \n",
            "                                                                 activation_69[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_72 (Conv2D)              (None, 7, 7, 192)    147456      mixed7[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_72 (Batc (None, 7, 7, 192)    576         conv2d_72[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_72 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_72[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_73 (Conv2D)              (None, 7, 7, 192)    258048      activation_72[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_73 (Batc (None, 7, 7, 192)    576         conv2d_73[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_73 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_73[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_70 (Conv2D)              (None, 7, 7, 192)    147456      mixed7[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_74 (Conv2D)              (None, 7, 7, 192)    258048      activation_73[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_70 (Batc (None, 7, 7, 192)    576         conv2d_70[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_74 (Batc (None, 7, 7, 192)    576         conv2d_74[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_70 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_70[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_74 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_74[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_71 (Conv2D)              (None, 3, 3, 320)    552960      activation_70[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_75 (Conv2D)              (None, 3, 3, 192)    331776      activation_74[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_71 (Batc (None, 3, 3, 320)    960         conv2d_71[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_75 (Batc (None, 3, 3, 192)    576         conv2d_75[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_71 (Activation)      (None, 3, 3, 320)    0           batch_normalization_v1_71[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_75 (Activation)      (None, 3, 3, 192)    0           batch_normalization_v1_75[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "max_pooling2d_3 (MaxPooling2D)  (None, 3, 3, 768)    0           mixed7[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "mixed8 (Concatenate)            (None, 3, 3, 1280)   0           activation_71[0][0]              \n",
            "                                                                 activation_75[0][0]              \n",
            "                                                                 max_pooling2d_3[0][0]            \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_80 (Conv2D)              (None, 3, 3, 448)    573440      mixed8[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_80 (Batc (None, 3, 3, 448)    1344        conv2d_80[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_80 (Activation)      (None, 3, 3, 448)    0           batch_normalization_v1_80[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_77 (Conv2D)              (None, 3, 3, 384)    491520      mixed8[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_81 (Conv2D)              (None, 3, 3, 384)    1548288     activation_80[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_77 (Batc (None, 3, 3, 384)    1152        conv2d_77[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_81 (Batc (None, 3, 3, 384)    1152        conv2d_81[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_77 (Activation)      (None, 3, 3, 384)    0           batch_normalization_v1_77[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_81 (Activation)      (None, 3, 3, 384)    0           batch_normalization_v1_81[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_78 (Conv2D)              (None, 3, 3, 384)    442368      activation_77[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_79 (Conv2D)              (None, 3, 3, 384)    442368      activation_77[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_82 (Conv2D)              (None, 3, 3, 384)    442368      activation_81[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_83 (Conv2D)              (None, 3, 3, 384)    442368      activation_81[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d_7 (AveragePoo (None, 3, 3, 1280)   0           mixed8[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_76 (Conv2D)              (None, 3, 3, 320)    409600      mixed8[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_78 (Batc (None, 3, 3, 384)    1152        conv2d_78[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_79 (Batc (None, 3, 3, 384)    1152        conv2d_79[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_82 (Batc (None, 3, 3, 384)    1152        conv2d_82[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_83 (Batc (None, 3, 3, 384)    1152        conv2d_83[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_84 (Conv2D)              (None, 3, 3, 192)    245760      average_pooling2d_7[0][0]        \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_76 (Batc (None, 3, 3, 320)    960         conv2d_76[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_78 (Activation)      (None, 3, 3, 384)    0           batch_normalization_v1_78[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_79 (Activation)      (None, 3, 3, 384)    0           batch_normalization_v1_79[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_82 (Activation)      (None, 3, 3, 384)    0           batch_normalization_v1_82[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_83 (Activation)      (None, 3, 3, 384)    0           batch_normalization_v1_83[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_84 (Batc (None, 3, 3, 192)    576         conv2d_84[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_76 (Activation)      (None, 3, 3, 320)    0           batch_normalization_v1_76[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed9_0 (Concatenate)          (None, 3, 3, 768)    0           activation_78[0][0]              \n",
            "                                                                 activation_79[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "concatenate (Concatenate)       (None, 3, 3, 768)    0           activation_82[0][0]              \n",
            "                                                                 activation_83[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "activation_84 (Activation)      (None, 3, 3, 192)    0           batch_normalization_v1_84[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed9 (Concatenate)            (None, 3, 3, 2048)   0           activation_76[0][0]              \n",
            "                                                                 mixed9_0[0][0]                   \n",
            "                                                                 concatenate[0][0]                \n",
            "                                                                 activation_84[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_89 (Conv2D)              (None, 3, 3, 448)    917504      mixed9[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_89 (Batc (None, 3, 3, 448)    1344        conv2d_89[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_89 (Activation)      (None, 3, 3, 448)    0           batch_normalization_v1_89[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_86 (Conv2D)              (None, 3, 3, 384)    786432      mixed9[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_90 (Conv2D)              (None, 3, 3, 384)    1548288     activation_89[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_86 (Batc (None, 3, 3, 384)    1152        conv2d_86[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_90 (Batc (None, 3, 3, 384)    1152        conv2d_90[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_86 (Activation)      (None, 3, 3, 384)    0           batch_normalization_v1_86[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_90 (Activation)      (None, 3, 3, 384)    0           batch_normalization_v1_90[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_87 (Conv2D)              (None, 3, 3, 384)    442368      activation_86[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_88 (Conv2D)              (None, 3, 3, 384)    442368      activation_86[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_91 (Conv2D)              (None, 3, 3, 384)    442368      activation_90[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_92 (Conv2D)              (None, 3, 3, 384)    442368      activation_90[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d_8 (AveragePoo (None, 3, 3, 2048)   0           mixed9[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_85 (Conv2D)              (None, 3, 3, 320)    655360      mixed9[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_87 (Batc (None, 3, 3, 384)    1152        conv2d_87[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_88 (Batc (None, 3, 3, 384)    1152        conv2d_88[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_91 (Batc (None, 3, 3, 384)    1152        conv2d_91[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_92 (Batc (None, 3, 3, 384)    1152        conv2d_92[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_93 (Conv2D)              (None, 3, 3, 192)    393216      average_pooling2d_8[0][0]        \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_85 (Batc (None, 3, 3, 320)    960         conv2d_85[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_87 (Activation)      (None, 3, 3, 384)    0           batch_normalization_v1_87[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_88 (Activation)      (None, 3, 3, 384)    0           batch_normalization_v1_88[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_91 (Activation)      (None, 3, 3, 384)    0           batch_normalization_v1_91[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_92 (Activation)      (None, 3, 3, 384)    0           batch_normalization_v1_92[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_93 (Batc (None, 3, 3, 192)    576         conv2d_93[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_85 (Activation)      (None, 3, 3, 320)    0           batch_normalization_v1_85[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed9_1 (Concatenate)          (None, 3, 3, 768)    0           activation_87[0][0]              \n",
            "                                                                 activation_88[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "concatenate_1 (Concatenate)     (None, 3, 3, 768)    0           activation_91[0][0]              \n",
            "                                                                 activation_92[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "activation_93 (Activation)      (None, 3, 3, 192)    0           batch_normalization_v1_93[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed10 (Concatenate)           (None, 3, 3, 2048)   0           activation_85[0][0]              \n",
            "                                                                 mixed9_1[0][0]                   \n",
            "                                                                 concatenate_1[0][0]              \n",
            "                                                                 activation_93[0][0]              \n",
            "==================================================================================================\n",
            "Total params: 21,802,784\n",
            "Trainable params: 0\n",
            "Non-trainable params: 21,802,784\n",
            "__________________________________________________________________________________________________\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "CFsUlwdfs_wg",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 35
        },
        "outputId": "9285d4ae-c3ca-4667-dd09-ab244948db16"
      },
      "source": [
        "last_layer = pre_trained_model.get_layer('mixed7') # Your Code Here\n",
        "print('last layer output shape: ', last_layer.output_shape)\n",
        "last_output = last_layer.output # Your Code Here\n",
        "\n",
        "# Expected Output:\n",
        "# ('last layer output shape: ', (None, 7, 7, 768))"
      ],
      "execution_count": 7,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "last layer output shape:  (None, 7, 7, 768)\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "-bsWZWp5oMq9",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "# Define a Callback class that stops training once accuracy reaches 99.9%\n",
        "class myCallback(tf.keras.callbacks.Callback):\n",
        "  def on_epoch_end(self, epoch, logs={}):\n",
        "    if(logs.get('acc')>0.999):\n",
        "      print(\"\\nReached 99.9% accuracy so cancelling training!\")\n",
        "      self.model.stop_training = True\n",
        "\n",
        "      "
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab_type": "code",
        "id": "BMXb913pbvFg",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 8687
        },
        "outputId": "145f59a3-9b5f-4963-bedb-1ba5520d947c"
      },
      "source": [
        "from tensorflow.keras.optimizers import RMSprop\n",
        "\n",
        "# Flatten the output layer to 1 dimension\n",
        "x = layers.Flatten()(last_output)\n",
        "# Add a fully connected layer with 1,024 hidden units and ReLU activation\n",
        "x = layers.Dense(1024, activation='relu')(x)\n",
        "# Add a dropout rate of 0.2\n",
        "x = layers.Dropout(0.2)(x)                  \n",
        "# Add a final sigmoid layer for classification\n",
        "x = layers.Dense(1, activation='sigmoid')(x)           \n",
        "\n",
        "model = Model(pre_trained_model.input, x) \n",
        "\n",
        "model.compile(optimizer = RMSprop(lr=0.0001), \n",
        "              loss = 'binary_crossentropy', \n",
        "              metrics = ['acc'])\n",
        "\n",
        "model.summary()\n",
        "\n",
        "# Expected output will be large. Last few lines should be:\n",
        "\n",
        "# mixed7 (Concatenate)            (None, 7, 7, 768)    0           activation_248[0][0]             \n",
        "#                                                                  activation_251[0][0]             \n",
        "#                                                                  activation_256[0][0]             \n",
        "#                                                                  activation_257[0][0]             \n",
        "# __________________________________________________________________________________________________\n",
        "# flatten_4 (Flatten)             (None, 37632)        0           mixed7[0][0]                     \n",
        "# __________________________________________________________________________________________________\n",
        "# dense_8 (Dense)                 (None, 1024)         38536192    flatten_4[0][0]                  \n",
        "# __________________________________________________________________________________________________\n",
        "# dropout_4 (Dropout)             (None, 1024)         0           dense_8[0][0]                    \n",
        "# __________________________________________________________________________________________________\n",
        "# dense_9 (Dense)                 (None, 1)            1025        dropout_4[0][0]                  \n",
        "# ==================================================================================================\n",
        "# Total params: 47,512,481\n",
        "# Trainable params: 38,537,217\n",
        "# Non-trainable params: 8,975,264\n"
      ],
      "execution_count": 9,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/layers/core.py:143: calling dropout (from tensorflow.python.ops.nn_ops) with keep_prob is deprecated and will be removed in a future version.\n",
            "Instructions for updating:\n",
            "Please use `rate` instead of `keep_prob`. Rate should be set to `rate = 1 - keep_prob`.\n",
            "__________________________________________________________________________________________________\n",
            "Layer (type)                    Output Shape         Param #     Connected to                     \n",
            "==================================================================================================\n",
            "input_1 (InputLayer)            (None, 150, 150, 3)  0                                            \n",
            "__________________________________________________________________________________________________\n",
            "conv2d (Conv2D)                 (None, 74, 74, 32)   864         input_1[0][0]                    \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1 (BatchNo (None, 74, 74, 32)   96          conv2d[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "activation (Activation)         (None, 74, 74, 32)   0           batch_normalization_v1[0][0]     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_1 (Conv2D)               (None, 72, 72, 32)   9216        activation[0][0]                 \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_1 (Batch (None, 72, 72, 32)   96          conv2d_1[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "activation_1 (Activation)       (None, 72, 72, 32)   0           batch_normalization_v1_1[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_2 (Conv2D)               (None, 72, 72, 64)   18432       activation_1[0][0]               \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_2 (Batch (None, 72, 72, 64)   192         conv2d_2[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "activation_2 (Activation)       (None, 72, 72, 64)   0           batch_normalization_v1_2[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "max_pooling2d (MaxPooling2D)    (None, 35, 35, 64)   0           activation_2[0][0]               \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_3 (Conv2D)               (None, 35, 35, 80)   5120        max_pooling2d[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_3 (Batch (None, 35, 35, 80)   240         conv2d_3[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "activation_3 (Activation)       (None, 35, 35, 80)   0           batch_normalization_v1_3[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_4 (Conv2D)               (None, 33, 33, 192)  138240      activation_3[0][0]               \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_4 (Batch (None, 33, 33, 192)  576         conv2d_4[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "activation_4 (Activation)       (None, 33, 33, 192)  0           batch_normalization_v1_4[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "max_pooling2d_1 (MaxPooling2D)  (None, 16, 16, 192)  0           activation_4[0][0]               \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_8 (Conv2D)               (None, 16, 16, 64)   12288       max_pooling2d_1[0][0]            \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_8 (Batch (None, 16, 16, 64)   192         conv2d_8[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "activation_8 (Activation)       (None, 16, 16, 64)   0           batch_normalization_v1_8[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_6 (Conv2D)               (None, 16, 16, 48)   9216        max_pooling2d_1[0][0]            \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_9 (Conv2D)               (None, 16, 16, 96)   55296       activation_8[0][0]               \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_6 (Batch (None, 16, 16, 48)   144         conv2d_6[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_9 (Batch (None, 16, 16, 96)   288         conv2d_9[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "activation_6 (Activation)       (None, 16, 16, 48)   0           batch_normalization_v1_6[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "activation_9 (Activation)       (None, 16, 16, 96)   0           batch_normalization_v1_9[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d (AveragePooli (None, 16, 16, 192)  0           max_pooling2d_1[0][0]            \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_5 (Conv2D)               (None, 16, 16, 64)   12288       max_pooling2d_1[0][0]            \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_7 (Conv2D)               (None, 16, 16, 64)   76800       activation_6[0][0]               \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_10 (Conv2D)              (None, 16, 16, 96)   82944       activation_9[0][0]               \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_11 (Conv2D)              (None, 16, 16, 32)   6144        average_pooling2d[0][0]          \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_5 (Batch (None, 16, 16, 64)   192         conv2d_5[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_7 (Batch (None, 16, 16, 64)   192         conv2d_7[0][0]                   \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_10 (Batc (None, 16, 16, 96)   288         conv2d_10[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_11 (Batc (None, 16, 16, 32)   96          conv2d_11[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_5 (Activation)       (None, 16, 16, 64)   0           batch_normalization_v1_5[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "activation_7 (Activation)       (None, 16, 16, 64)   0           batch_normalization_v1_7[0][0]   \n",
            "__________________________________________________________________________________________________\n",
            "activation_10 (Activation)      (None, 16, 16, 96)   0           batch_normalization_v1_10[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_11 (Activation)      (None, 16, 16, 32)   0           batch_normalization_v1_11[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed0 (Concatenate)            (None, 16, 16, 256)  0           activation_5[0][0]               \n",
            "                                                                 activation_7[0][0]               \n",
            "                                                                 activation_10[0][0]              \n",
            "                                                                 activation_11[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_15 (Conv2D)              (None, 16, 16, 64)   16384       mixed0[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_15 (Batc (None, 16, 16, 64)   192         conv2d_15[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_15 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_15[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_13 (Conv2D)              (None, 16, 16, 48)   12288       mixed0[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_16 (Conv2D)              (None, 16, 16, 96)   55296       activation_15[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_13 (Batc (None, 16, 16, 48)   144         conv2d_13[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_16 (Batc (None, 16, 16, 96)   288         conv2d_16[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_13 (Activation)      (None, 16, 16, 48)   0           batch_normalization_v1_13[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_16 (Activation)      (None, 16, 16, 96)   0           batch_normalization_v1_16[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d_1 (AveragePoo (None, 16, 16, 256)  0           mixed0[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_12 (Conv2D)              (None, 16, 16, 64)   16384       mixed0[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_14 (Conv2D)              (None, 16, 16, 64)   76800       activation_13[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_17 (Conv2D)              (None, 16, 16, 96)   82944       activation_16[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_18 (Conv2D)              (None, 16, 16, 64)   16384       average_pooling2d_1[0][0]        \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_12 (Batc (None, 16, 16, 64)   192         conv2d_12[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_14 (Batc (None, 16, 16, 64)   192         conv2d_14[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_17 (Batc (None, 16, 16, 96)   288         conv2d_17[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_18 (Batc (None, 16, 16, 64)   192         conv2d_18[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_12 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_12[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_14 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_14[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_17 (Activation)      (None, 16, 16, 96)   0           batch_normalization_v1_17[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_18 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_18[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed1 (Concatenate)            (None, 16, 16, 288)  0           activation_12[0][0]              \n",
            "                                                                 activation_14[0][0]              \n",
            "                                                                 activation_17[0][0]              \n",
            "                                                                 activation_18[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_22 (Conv2D)              (None, 16, 16, 64)   18432       mixed1[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_22 (Batc (None, 16, 16, 64)   192         conv2d_22[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_22 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_22[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_20 (Conv2D)              (None, 16, 16, 48)   13824       mixed1[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_23 (Conv2D)              (None, 16, 16, 96)   55296       activation_22[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_20 (Batc (None, 16, 16, 48)   144         conv2d_20[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_23 (Batc (None, 16, 16, 96)   288         conv2d_23[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_20 (Activation)      (None, 16, 16, 48)   0           batch_normalization_v1_20[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_23 (Activation)      (None, 16, 16, 96)   0           batch_normalization_v1_23[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d_2 (AveragePoo (None, 16, 16, 288)  0           mixed1[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_19 (Conv2D)              (None, 16, 16, 64)   18432       mixed1[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_21 (Conv2D)              (None, 16, 16, 64)   76800       activation_20[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_24 (Conv2D)              (None, 16, 16, 96)   82944       activation_23[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_25 (Conv2D)              (None, 16, 16, 64)   18432       average_pooling2d_2[0][0]        \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_19 (Batc (None, 16, 16, 64)   192         conv2d_19[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_21 (Batc (None, 16, 16, 64)   192         conv2d_21[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_24 (Batc (None, 16, 16, 96)   288         conv2d_24[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_25 (Batc (None, 16, 16, 64)   192         conv2d_25[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_19 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_19[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_21 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_21[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_24 (Activation)      (None, 16, 16, 96)   0           batch_normalization_v1_24[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_25 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_25[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed2 (Concatenate)            (None, 16, 16, 288)  0           activation_19[0][0]              \n",
            "                                                                 activation_21[0][0]              \n",
            "                                                                 activation_24[0][0]              \n",
            "                                                                 activation_25[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_27 (Conv2D)              (None, 16, 16, 64)   18432       mixed2[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_27 (Batc (None, 16, 16, 64)   192         conv2d_27[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_27 (Activation)      (None, 16, 16, 64)   0           batch_normalization_v1_27[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_28 (Conv2D)              (None, 16, 16, 96)   55296       activation_27[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_28 (Batc (None, 16, 16, 96)   288         conv2d_28[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_28 (Activation)      (None, 16, 16, 96)   0           batch_normalization_v1_28[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_26 (Conv2D)              (None, 7, 7, 384)    995328      mixed2[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_29 (Conv2D)              (None, 7, 7, 96)     82944       activation_28[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_26 (Batc (None, 7, 7, 384)    1152        conv2d_26[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_29 (Batc (None, 7, 7, 96)     288         conv2d_29[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_26 (Activation)      (None, 7, 7, 384)    0           batch_normalization_v1_26[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_29 (Activation)      (None, 7, 7, 96)     0           batch_normalization_v1_29[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "max_pooling2d_2 (MaxPooling2D)  (None, 7, 7, 288)    0           mixed2[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "mixed3 (Concatenate)            (None, 7, 7, 768)    0           activation_26[0][0]              \n",
            "                                                                 activation_29[0][0]              \n",
            "                                                                 max_pooling2d_2[0][0]            \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_34 (Conv2D)              (None, 7, 7, 128)    98304       mixed3[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_34 (Batc (None, 7, 7, 128)    384         conv2d_34[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_34 (Activation)      (None, 7, 7, 128)    0           batch_normalization_v1_34[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_35 (Conv2D)              (None, 7, 7, 128)    114688      activation_34[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_35 (Batc (None, 7, 7, 128)    384         conv2d_35[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_35 (Activation)      (None, 7, 7, 128)    0           batch_normalization_v1_35[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_31 (Conv2D)              (None, 7, 7, 128)    98304       mixed3[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_36 (Conv2D)              (None, 7, 7, 128)    114688      activation_35[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_31 (Batc (None, 7, 7, 128)    384         conv2d_31[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_36 (Batc (None, 7, 7, 128)    384         conv2d_36[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_31 (Activation)      (None, 7, 7, 128)    0           batch_normalization_v1_31[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_36 (Activation)      (None, 7, 7, 128)    0           batch_normalization_v1_36[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_32 (Conv2D)              (None, 7, 7, 128)    114688      activation_31[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_37 (Conv2D)              (None, 7, 7, 128)    114688      activation_36[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_32 (Batc (None, 7, 7, 128)    384         conv2d_32[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_37 (Batc (None, 7, 7, 128)    384         conv2d_37[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_32 (Activation)      (None, 7, 7, 128)    0           batch_normalization_v1_32[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_37 (Activation)      (None, 7, 7, 128)    0           batch_normalization_v1_37[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d_3 (AveragePoo (None, 7, 7, 768)    0           mixed3[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_30 (Conv2D)              (None, 7, 7, 192)    147456      mixed3[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_33 (Conv2D)              (None, 7, 7, 192)    172032      activation_32[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_38 (Conv2D)              (None, 7, 7, 192)    172032      activation_37[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_39 (Conv2D)              (None, 7, 7, 192)    147456      average_pooling2d_3[0][0]        \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_30 (Batc (None, 7, 7, 192)    576         conv2d_30[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_33 (Batc (None, 7, 7, 192)    576         conv2d_33[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_38 (Batc (None, 7, 7, 192)    576         conv2d_38[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_39 (Batc (None, 7, 7, 192)    576         conv2d_39[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_30 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_30[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_33 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_33[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_38 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_38[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_39 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_39[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed4 (Concatenate)            (None, 7, 7, 768)    0           activation_30[0][0]              \n",
            "                                                                 activation_33[0][0]              \n",
            "                                                                 activation_38[0][0]              \n",
            "                                                                 activation_39[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_44 (Conv2D)              (None, 7, 7, 160)    122880      mixed4[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_44 (Batc (None, 7, 7, 160)    480         conv2d_44[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_44 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_44[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_45 (Conv2D)              (None, 7, 7, 160)    179200      activation_44[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_45 (Batc (None, 7, 7, 160)    480         conv2d_45[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_45 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_45[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_41 (Conv2D)              (None, 7, 7, 160)    122880      mixed4[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_46 (Conv2D)              (None, 7, 7, 160)    179200      activation_45[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_41 (Batc (None, 7, 7, 160)    480         conv2d_41[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_46 (Batc (None, 7, 7, 160)    480         conv2d_46[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_41 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_41[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_46 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_46[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_42 (Conv2D)              (None, 7, 7, 160)    179200      activation_41[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_47 (Conv2D)              (None, 7, 7, 160)    179200      activation_46[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_42 (Batc (None, 7, 7, 160)    480         conv2d_42[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_47 (Batc (None, 7, 7, 160)    480         conv2d_47[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_42 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_42[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_47 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_47[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d_4 (AveragePoo (None, 7, 7, 768)    0           mixed4[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_40 (Conv2D)              (None, 7, 7, 192)    147456      mixed4[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_43 (Conv2D)              (None, 7, 7, 192)    215040      activation_42[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_48 (Conv2D)              (None, 7, 7, 192)    215040      activation_47[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_49 (Conv2D)              (None, 7, 7, 192)    147456      average_pooling2d_4[0][0]        \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_40 (Batc (None, 7, 7, 192)    576         conv2d_40[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_43 (Batc (None, 7, 7, 192)    576         conv2d_43[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_48 (Batc (None, 7, 7, 192)    576         conv2d_48[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_49 (Batc (None, 7, 7, 192)    576         conv2d_49[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_40 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_40[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_43 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_43[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_48 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_48[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_49 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_49[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed5 (Concatenate)            (None, 7, 7, 768)    0           activation_40[0][0]              \n",
            "                                                                 activation_43[0][0]              \n",
            "                                                                 activation_48[0][0]              \n",
            "                                                                 activation_49[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_54 (Conv2D)              (None, 7, 7, 160)    122880      mixed5[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_54 (Batc (None, 7, 7, 160)    480         conv2d_54[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_54 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_54[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_55 (Conv2D)              (None, 7, 7, 160)    179200      activation_54[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_55 (Batc (None, 7, 7, 160)    480         conv2d_55[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_55 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_55[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_51 (Conv2D)              (None, 7, 7, 160)    122880      mixed5[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_56 (Conv2D)              (None, 7, 7, 160)    179200      activation_55[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_51 (Batc (None, 7, 7, 160)    480         conv2d_51[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_56 (Batc (None, 7, 7, 160)    480         conv2d_56[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_51 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_51[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_56 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_56[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_52 (Conv2D)              (None, 7, 7, 160)    179200      activation_51[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_57 (Conv2D)              (None, 7, 7, 160)    179200      activation_56[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_52 (Batc (None, 7, 7, 160)    480         conv2d_52[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_57 (Batc (None, 7, 7, 160)    480         conv2d_57[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_52 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_52[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_57 (Activation)      (None, 7, 7, 160)    0           batch_normalization_v1_57[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d_5 (AveragePoo (None, 7, 7, 768)    0           mixed5[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_50 (Conv2D)              (None, 7, 7, 192)    147456      mixed5[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_53 (Conv2D)              (None, 7, 7, 192)    215040      activation_52[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_58 (Conv2D)              (None, 7, 7, 192)    215040      activation_57[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_59 (Conv2D)              (None, 7, 7, 192)    147456      average_pooling2d_5[0][0]        \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_50 (Batc (None, 7, 7, 192)    576         conv2d_50[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_53 (Batc (None, 7, 7, 192)    576         conv2d_53[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_58 (Batc (None, 7, 7, 192)    576         conv2d_58[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_59 (Batc (None, 7, 7, 192)    576         conv2d_59[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_50 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_50[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_53 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_53[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_58 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_58[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_59 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_59[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed6 (Concatenate)            (None, 7, 7, 768)    0           activation_50[0][0]              \n",
            "                                                                 activation_53[0][0]              \n",
            "                                                                 activation_58[0][0]              \n",
            "                                                                 activation_59[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_64 (Conv2D)              (None, 7, 7, 192)    147456      mixed6[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_64 (Batc (None, 7, 7, 192)    576         conv2d_64[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_64 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_64[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_65 (Conv2D)              (None, 7, 7, 192)    258048      activation_64[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_65 (Batc (None, 7, 7, 192)    576         conv2d_65[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_65 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_65[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_61 (Conv2D)              (None, 7, 7, 192)    147456      mixed6[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_66 (Conv2D)              (None, 7, 7, 192)    258048      activation_65[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_61 (Batc (None, 7, 7, 192)    576         conv2d_61[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_66 (Batc (None, 7, 7, 192)    576         conv2d_66[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_61 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_61[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_66 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_66[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_62 (Conv2D)              (None, 7, 7, 192)    258048      activation_61[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_67 (Conv2D)              (None, 7, 7, 192)    258048      activation_66[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_62 (Batc (None, 7, 7, 192)    576         conv2d_62[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_67 (Batc (None, 7, 7, 192)    576         conv2d_67[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_62 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_62[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_67 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_67[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "average_pooling2d_6 (AveragePoo (None, 7, 7, 768)    0           mixed6[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_60 (Conv2D)              (None, 7, 7, 192)    147456      mixed6[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_63 (Conv2D)              (None, 7, 7, 192)    258048      activation_62[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_68 (Conv2D)              (None, 7, 7, 192)    258048      activation_67[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "conv2d_69 (Conv2D)              (None, 7, 7, 192)    147456      average_pooling2d_6[0][0]        \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_60 (Batc (None, 7, 7, 192)    576         conv2d_60[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_63 (Batc (None, 7, 7, 192)    576         conv2d_63[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_68 (Batc (None, 7, 7, 192)    576         conv2d_68[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "batch_normalization_v1_69 (Batc (None, 7, 7, 192)    576         conv2d_69[0][0]                  \n",
            "__________________________________________________________________________________________________\n",
            "activation_60 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_60[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_63 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_63[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_68 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_68[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "activation_69 (Activation)      (None, 7, 7, 192)    0           batch_normalization_v1_69[0][0]  \n",
            "__________________________________________________________________________________________________\n",
            "mixed7 (Concatenate)            (None, 7, 7, 768)    0           activation_60[0][0]              \n",
            "                                                                 activation_63[0][0]              \n",
            "                                                                 activation_68[0][0]              \n",
            "                                                                 activation_69[0][0]              \n",
            "__________________________________________________________________________________________________\n",
            "flatten (Flatten)               (None, 37632)        0           mixed7[0][0]                     \n",
            "__________________________________________________________________________________________________\n",
            "dense (Dense)                   (None, 1024)         38536192    flatten[0][0]                    \n",
            "__________________________________________________________________________________________________\n",
            "dropout (Dropout)               (None, 1024)         0           dense[0][0]                      \n",
            "__________________________________________________________________________________________________\n",
            "dense_1 (Dense)                 (None, 1)            1025        dropout[0][0]                    \n",
            "==================================================================================================\n",
            "Total params: 47,512,481\n",
            "Trainable params: 38,537,217\n",
            "Non-trainable params: 8,975,264\n",
            "__________________________________________________________________________________________________\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "HrnL_IQ8knWA",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 419
        },
        "outputId": "92b2a1ff-b142-4196-a77c-b1ec0f0462ee"
      },
      "source": [
        "# Get the Horse or Human dataset\n",
        "!wget --no-check-certificate https://storage.googleapis.com/laurencemoroney-blog.appspot.com/horse-or-human.zip -O /tmp/horse-or-human.zip\n",
        "\n",
        "# Get the Horse or Human Validation dataset\n",
        "!wget --no-check-certificate https://storage.googleapis.com/laurencemoroney-blog.appspot.com/validation-horse-or-human.zip -O /tmp/validation-horse-or-human.zip \n",
        "  \n",
        "from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
        "\n",
        "import os\n",
        "import zipfile\n",
        "\n",
        "local_zip = '/tmp/horse-or-human.zip'\n",
        "zip_ref = zipfile.ZipFile(local_zip, 'r')\n",
        "zip_ref.extractall('/tmp/training')\n",
        "zip_ref.close()\n",
        "\n",
        "local_zip = '/tmp/validation-horse-or-human.zip'\n",
        "zip_ref = zipfile.ZipFile(local_zip, 'r')\n",
        "zip_ref.extractall('/tmp/validation')\n",
        "zip_ref.close()"
      ],
      "execution_count": 10,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "--2019-05-10 04:28:27--  https://storage.googleapis.com/laurencemoroney-blog.appspot.com/horse-or-human.zip\n",
            "Resolving storage.googleapis.com (storage.googleapis.com)... 173.194.79.128, 2a00:1450:4013:c05::80\n",
            "Connecting to storage.googleapis.com (storage.googleapis.com)|173.194.79.128|:443... connected.\n",
            "HTTP request sent, awaiting response... 200 OK\n",
            "Length: 149574867 (143M) [application/zip]\n",
            "Saving to: ‘/tmp/horse-or-human.zip’\n",
            "\n",
            "/tmp/horse-or-human 100%[===================>] 142.65M  46.8MB/s    in 3.0s    \n",
            "\n",
            "2019-05-10 04:28:31 (46.8 MB/s) - ‘/tmp/horse-or-human.zip’ saved [149574867/149574867]\n",
            "\n",
            "--2019-05-10 04:28:33--  https://storage.googleapis.com/laurencemoroney-blog.appspot.com/validation-horse-or-human.zip\n",
            "Resolving storage.googleapis.com (storage.googleapis.com)... 108.177.127.128, 2a00:1450:4013:c05::80\n",
            "Connecting to storage.googleapis.com (storage.googleapis.com)|108.177.127.128|:443... connected.\n",
            "HTTP request sent, awaiting response... 200 OK\n",
            "Length: 11480187 (11M) [application/zip]\n",
            "Saving to: ‘/tmp/validation-horse-or-human.zip’\n",
            "\n",
            "/tmp/validation-hor 100%[===================>]  10.95M  24.0MB/s    in 0.5s    \n",
            "\n",
            "2019-05-10 04:28:33 (24.0 MB/s) - ‘/tmp/validation-horse-or-human.zip’ saved [11480187/11480187]\n",
            "\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "y9okX7_ovskI",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 87
        },
        "outputId": "6364dceb-d869-4363-bb15-f96f7712171a"
      },
      "source": [
        "train_horses_dir = '/tmp/training/horses'# Your Code Here\n",
        "train_humans_dir = '/tmp/training/humans'# Your Code Here\n",
        "validation_horses_dir = '/tmp/validation/horses'# Your Code Here\n",
        "validation_humans_dir = '/tmp/validation/horses'# Your Code Here\n",
        "\n",
        "train_horses_fnames = os.listdir(train_horses_dir)# Your Code Here\n",
        "train_humans_fnames = os.listdir(train_humans_dir)# Your Code Here\n",
        "validation_horses_fnames = os.listdir(validation_horses_dir)# Your Code Here\n",
        "validation_humans_fnames = os.listdir(validation_humans_dir)# Your Code Here\n",
        "\n",
        "print(len(train_horses_fnames))\n",
        "print(len(train_humans_fnames))\n",
        "print(len(validation_horses_fnames))\n",
        "print(len(validation_humans_fnames))\n",
        "\n",
        "# Expected Output:\n",
        "# 500\n",
        "# 527\n",
        "# 128\n",
        "# 128"
      ],
      "execution_count": 11,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "500\n",
            "527\n",
            "128\n",
            "128\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab_type": "code",
        "id": "O4s8HckqGlnb",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 52
        },
        "outputId": "fb948ef0-2cc2-4fc4-91e3-ab82df6a6949"
      },
      "source": [
        "# Define our example directories and files\n",
        "train_dir = '/tmp/training'\n",
        "validation_dir = '/tmp/validation'\n",
        "\n",
        "# Add our data-augmentation parameters to ImageDataGenerator\n",
        "train_datagen = ImageDataGenerator(rescale=1./255.,# Your Code Here\n",
        "                                   rotation_range=40,\n",
        "                                   width_shift_range=0.2,\n",
        "                                   height_shift_range=0.2,\n",
        "                                   shear_range=0.2,\n",
        "                                   zoom_range=0.2,\n",
        "                                   horizontal_flip=True)\n",
        "\n",
        "# Note that the validation data should not be augmented!\n",
        "test_datagen = ImageDataGenerator(rescale = 1.0/255.) # Your Code Here\n",
        "\n",
        "# Flow training images in batches of 20 using train_datagen generator\n",
        "train_generator = train_datagen.flow_from_directory(train_dir,# Your Code Here\n",
        "                                                    batch_size=20,\n",
        "                                                    class_mode='binary',\n",
        "                                                    target_size=(150,150))\n",
        "\n",
        "# Flow validation images in batches of 20 using test_datagen generator\n",
        "validation_generator =  test_datagen.flow_from_directory(validation_dir,# Your Code Here\n",
        "                                                         batch_size=20,\n",
        "                                                         class_mode='binary',\n",
        "                                                         target_size=(150,150))\n",
        "\n",
        "# Expected Output:\n",
        "# Found 1027 images belonging to 2 classes.\n",
        "# Found 256 images belonging to 2 classes."
      ],
      "execution_count": 14,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Found 1027 images belonging to 2 classes.\n",
            "Found 256 images belonging to 2 classes.\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab_type": "code",
        "id": "Blhq2MAUeyGA",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 2516
        },
        "outputId": "bd0adffe-53ee-439e-c813-c1a18b366a8f"
      },
      "source": [
        "# Run this and see how many epochs it should take before the callback\n",
        "# fires, and stops training at 99.9% accuracy\n",
        "# (It should take less than 100 epochs)\n",
        "\n",
        "callbacks = myCallback() # Your Code Here\n",
        "history = model.fit_generator(train_generator,\n",
        "                             validation_data=validation_generator,\n",
        "                             steps_per_epoch=100,\n",
        "                             epochs=100,\n",
        "                             validation_steps=50,\n",
        "                             verbose=2,\n",
        "                             callbacks=[callbacks])"
      ],
      "execution_count": 15,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/math_ops.py:3066: to_int32 (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.\n",
            "Instructions for updating:\n",
            "Use tf.cast instead.\n",
            "Epoch 1/100\n",
            "13/13 [==============================] - 2s 158ms/step - loss: 0.0098 - acc: 1.0000\n",
            " - 16s - loss: 0.2247 - acc: 0.9104 - val_loss: 0.0098 - val_acc: 1.0000\n",
            "Epoch 2/100\n",
            "13/13 [==============================] - 1s 104ms/step - loss: 0.0023 - acc: 1.0000\n",
            " - 11s - loss: 0.1152 - acc: 0.9601 - val_loss: 0.0023 - val_acc: 1.0000\n",
            "Epoch 3/100\n",
            "13/13 [==============================] - 1s 102ms/step - loss: 1.0643e-04 - acc: 1.0000\n",
            " - 11s - loss: 0.0692 - acc: 0.9766 - val_loss: 1.0643e-04 - val_acc: 1.0000\n",
            "Epoch 4/100\n",
            "13/13 [==============================] - 2s 120ms/step - loss: 8.8854e-05 - acc: 1.0000\n",
            " - 13s - loss: 0.0621 - acc: 0.9757 - val_loss: 8.8854e-05 - val_acc: 1.0000\n",
            "Epoch 5/100\n",
            "13/13 [==============================] - 1s 108ms/step - loss: 0.0201 - acc: 0.9922\n",
            " - 12s - loss: 0.0516 - acc: 0.9834 - val_loss: 0.0201 - val_acc: 0.9922\n",
            "Epoch 6/100\n",
            "13/13 [==============================] - 1s 106ms/step - loss: 4.1058e-04 - acc: 1.0000\n",
            " - 11s - loss: 0.0609 - acc: 0.9834 - val_loss: 4.1058e-04 - val_acc: 1.0000\n",
            "Epoch 7/100\n",
            "13/13 [==============================] - 1s 105ms/step - loss: 2.1470e-04 - acc: 1.0000\n",
            " - 11s - loss: 0.0377 - acc: 0.9844 - val_loss: 2.1470e-04 - val_acc: 1.0000\n",
            "Epoch 8/100\n",
            "13/13 [==============================] - 1s 102ms/step - loss: 0.0012 - acc: 1.0000\n",
            " - 11s - loss: 0.0457 - acc: 0.9873 - val_loss: 0.0012 - val_acc: 1.0000\n",
            "Epoch 9/100\n",
            "13/13 [==============================] - 1s 104ms/step - loss: 0.0819 - acc: 0.9805\n",
            " - 11s - loss: 0.0254 - acc: 0.9903 - val_loss: 0.0819 - val_acc: 0.9805\n",
            "Epoch 10/100\n",
            "13/13 [==============================] - 1s 101ms/step - loss: 0.2573 - acc: 0.9609\n",
            " - 11s - loss: 0.0350 - acc: 0.9854 - val_loss: 0.2573 - val_acc: 0.9609\n",
            "Epoch 11/100\n",
            "13/13 [==============================] - 2s 116ms/step - loss: 0.0710 - acc: 0.9844\n",
            " - 13s - loss: 0.0237 - acc: 0.9883 - val_loss: 0.0710 - val_acc: 0.9844\n",
            "Epoch 12/100\n",
            "13/13 [==============================] - 1s 107ms/step - loss: 0.0570 - acc: 0.9922\n",
            " - 12s - loss: 0.0366 - acc: 0.9903 - val_loss: 0.0570 - val_acc: 0.9922\n",
            "Epoch 13/100\n",
            "13/13 [==============================] - 1s 103ms/step - loss: 0.1066 - acc: 0.9844\n",
            " - 11s - loss: 0.0325 - acc: 0.9883 - val_loss: 0.1066 - val_acc: 0.9844\n",
            "Epoch 14/100\n",
            "13/13 [==============================] - 1s 105ms/step - loss: 0.1561 - acc: 0.9688\n",
            " - 12s - loss: 0.0480 - acc: 0.9903 - val_loss: 0.1561 - val_acc: 0.9688\n",
            "Epoch 15/100\n",
            "13/13 [==============================] - 1s 106ms/step - loss: 0.2501 - acc: 0.9609\n",
            " - 12s - loss: 0.0445 - acc: 0.9873 - val_loss: 0.2501 - val_acc: 0.9609\n",
            "Epoch 16/100\n",
            "13/13 [==============================] - 1s 106ms/step - loss: 0.2466 - acc: 0.9648\n",
            " - 11s - loss: 0.0355 - acc: 0.9951 - val_loss: 0.2466 - val_acc: 0.9648\n",
            "Epoch 17/100\n",
            "13/13 [==============================] - 1s 107ms/step - loss: 0.0908 - acc: 0.9922\n",
            " - 11s - loss: 0.0142 - acc: 0.9932 - val_loss: 0.0908 - val_acc: 0.9922\n",
            "Epoch 18/100\n",
            "13/13 [==============================] - 1s 111ms/step - loss: 0.0918 - acc: 0.9844\n",
            " - 12s - loss: 0.0392 - acc: 0.9912 - val_loss: 0.0918 - val_acc: 0.9844\n",
            "Epoch 19/100\n",
            "13/13 [==============================] - 1s 102ms/step - loss: 0.0640 - acc: 0.9922\n",
            " - 11s - loss: 0.0122 - acc: 0.9951 - val_loss: 0.0640 - val_acc: 0.9922\n",
            "Epoch 20/100\n",
            "13/13 [==============================] - 1s 107ms/step - loss: 0.2128 - acc: 0.9609\n",
            " - 12s - loss: 0.0175 - acc: 0.9922 - val_loss: 0.2128 - val_acc: 0.9609\n",
            "Epoch 21/100\n",
            "13/13 [==============================] - 1s 102ms/step - loss: 0.1350 - acc: 0.9805\n",
            " - 11s - loss: 0.0259 - acc: 0.9942 - val_loss: 0.1350 - val_acc: 0.9805\n",
            "Epoch 22/100\n",
            "13/13 [==============================] - 1s 101ms/step - loss: 0.5632 - acc: 0.9453\n",
            " - 11s - loss: 0.0170 - acc: 0.9951 - val_loss: 0.5632 - val_acc: 0.9453\n",
            "Epoch 23/100\n",
            "13/13 [==============================] - 1s 104ms/step - loss: 0.3106 - acc: 0.9648\n",
            " - 11s - loss: 0.0349 - acc: 0.9903 - val_loss: 0.3106 - val_acc: 0.9648\n",
            "Epoch 24/100\n",
            "13/13 [==============================] - 1s 102ms/step - loss: 0.3169 - acc: 0.9609\n",
            " - 11s - loss: 0.0129 - acc: 0.9961 - val_loss: 0.3169 - val_acc: 0.9609\n",
            "Epoch 25/100\n",
            "13/13 [==============================] - 1s 115ms/step - loss: 0.6584 - acc: 0.9375\n",
            " - 13s - loss: 0.0390 - acc: 0.9922 - val_loss: 0.6584 - val_acc: 0.9375\n",
            "Epoch 26/100\n",
            "13/13 [==============================] - 1s 104ms/step - loss: 0.4328 - acc: 0.9570\n",
            " - 13s - loss: 0.0328 - acc: 0.9883 - val_loss: 0.4328 - val_acc: 0.9570\n",
            "Epoch 27/100\n",
            "13/13 [==============================] - 1s 105ms/step - loss: 0.2842 - acc: 0.9648\n",
            " - 11s - loss: 0.0158 - acc: 0.9942 - val_loss: 0.2842 - val_acc: 0.9648\n",
            "Epoch 28/100\n",
            "13/13 [==============================] - 1s 103ms/step - loss: 0.7104 - acc: 0.9297\n",
            " - 11s - loss: 0.0162 - acc: 0.9961 - val_loss: 0.7104 - val_acc: 0.9297\n",
            "Epoch 29/100\n",
            "13/13 [==============================] - 1s 103ms/step - loss: 0.4531 - acc: 0.9609\n",
            " - 11s - loss: 0.0309 - acc: 0.9922 - val_loss: 0.4531 - val_acc: 0.9609\n",
            "Epoch 30/100\n",
            "13/13 [==============================] - 1s 103ms/step - loss: 0.4246 - acc: 0.9570\n",
            " - 11s - loss: 0.0330 - acc: 0.9912 - val_loss: 0.4246 - val_acc: 0.9570\n",
            "Epoch 31/100\n",
            "13/13 [==============================] - 1s 106ms/step - loss: 0.1672 - acc: 0.9766\n",
            " - 11s - loss: 0.0230 - acc: 0.9912 - val_loss: 0.1672 - val_acc: 0.9766\n",
            "Epoch 32/100\n",
            "13/13 [==============================] - 1s 105ms/step - loss: 0.0596 - acc: 0.9883\n",
            " - 12s - loss: 0.0217 - acc: 0.9951 - val_loss: 0.0596 - val_acc: 0.9883\n",
            "Epoch 33/100\n",
            "13/13 [==============================] - 1s 105ms/step - loss: 0.4312 - acc: 0.9531\n",
            " - 11s - loss: 0.0101 - acc: 0.9961 - val_loss: 0.4312 - val_acc: 0.9531\n",
            "Epoch 34/100\n",
            "13/13 [==============================] - 1s 105ms/step - loss: 0.4192 - acc: 0.9531\n",
            " - 11s - loss: 0.0313 - acc: 0.9942 - val_loss: 0.4192 - val_acc: 0.9531\n",
            "Epoch 35/100\n",
            "13/13 [==============================] - 1s 104ms/step - loss: 0.4657 - acc: 0.9531\n",
            " - 11s - loss: 0.0108 - acc: 0.9961 - val_loss: 0.4657 - val_acc: 0.9531\n",
            "Epoch 36/100\n",
            "13/13 [==============================] - 1s 106ms/step - loss: 0.4556 - acc: 0.9570\n",
            " - 11s - loss: 0.0112 - acc: 0.9942 - val_loss: 0.4556 - val_acc: 0.9570\n",
            "Epoch 37/100\n",
            "13/13 [==============================] - 1s 103ms/step - loss: 0.7119 - acc: 0.9414\n",
            " - 11s - loss: 0.0371 - acc: 0.9932 - val_loss: 0.7119 - val_acc: 0.9414\n",
            "Epoch 38/100\n",
            "13/13 [==============================] - 1s 106ms/step - loss: 0.3419 - acc: 0.9648\n",
            " - 11s - loss: 0.0258 - acc: 0.9922 - val_loss: 0.3419 - val_acc: 0.9648\n",
            "Epoch 39/100\n",
            "13/13 [==============================] - 1s 103ms/step - loss: 0.6260 - acc: 0.9492\n",
            " - 12s - loss: 0.0047 - acc: 0.9971 - val_loss: 0.6260 - val_acc: 0.9492\n",
            "Epoch 40/100\n",
            "13/13 [==============================] - 1s 103ms/step - loss: 0.5787 - acc: 0.9531\n",
            " - 11s - loss: 0.0382 - acc: 0.9893 - val_loss: 0.5787 - val_acc: 0.9531\n",
            "Epoch 41/100\n",
            "13/13 [==============================] - 1s 100ms/step - loss: 0.5009 - acc: 0.9570\n",
            " - 11s - loss: 0.0094 - acc: 0.9971 - val_loss: 0.5009 - val_acc: 0.9570\n",
            "Epoch 42/100\n",
            "13/13 [==============================] - 1s 106ms/step - loss: 0.4088 - acc: 0.9570\n",
            " - 11s - loss: 0.0144 - acc: 0.9971 - val_loss: 0.4088 - val_acc: 0.9570\n",
            "Epoch 43/100\n",
            "13/13 [==============================] - 1s 104ms/step - loss: 0.3137 - acc: 0.9648\n",
            " - 11s - loss: 0.0201 - acc: 0.9951 - val_loss: 0.3137 - val_acc: 0.9648\n",
            "Epoch 44/100\n",
            "13/13 [==============================] - 1s 101ms/step - loss: 0.2870 - acc: 0.9727\n",
            " - 11s - loss: 0.0195 - acc: 0.9961 - val_loss: 0.2870 - val_acc: 0.9727\n",
            "Epoch 45/100\n",
            "13/13 [==============================] - 1s 102ms/step - loss: 0.4647 - acc: 0.9531\n",
            " - 11s - loss: 0.0212 - acc: 0.9951 - val_loss: 0.4647 - val_acc: 0.9531\n",
            "Epoch 46/100\n",
            "13/13 [==============================] - 2s 120ms/step - loss: 0.3864 - acc: 0.9570\n",
            "\n",
            "Reached 99.9% accuracy so cancelling training!\n",
            " - 13s - loss: 0.0041 - acc: 0.9990 - val_loss: 0.3864 - val_acc: 0.9570\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "C2Fp6Se9rKuL",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 299
        },
        "outputId": "faa82b4d-9484-42a5-af1f-e97442b2936d"
      },
      "source": [
        "import matplotlib.pyplot as plt\n",
        "acc = history.history['acc']\n",
        "val_acc = history.history['val_acc']\n",
        "loss = history.history['loss']\n",
        "val_loss = history.history['val_loss']\n",
        "\n",
        "epochs = range(len(acc))\n",
        "\n",
        "plt.plot(epochs, acc, 'r', label='Training accuracy')\n",
        "plt.plot(epochs, val_acc, 'b', label='Validation accuracy')\n",
        "plt.title('Training and validation accuracy')\n",
        "plt.legend(loc=0)\n",
        "plt.figure()\n",
        "\n",
        "\n",
        "plt.show()"
      ],
      "execution_count": 16,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX0AAAEICAYAAACzliQjAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJztnXd8VOXSx39DCaFIT0KTpmgIJZiE\nIoqAiGJBroggoggK2Hu53msH67W8qK/6CgiIIuWiKNgx4lWuotQAklAkKCEhUkMJEJLM+8fsSU42\nW87unt1Nduf7+eSzu6c+58nu78yZmWceYmYoiqIo0UGNcDdAURRFCR0q+oqiKFGEir6iKEoUoaKv\nKIoSRajoK4qiRBEq+oqiKFGEin4UQkQ1iegoEbW1c9twQkRnEpHt+cdEdBER7TR93kJE/axs68e5\nZhDRP/3dX1GsUCvcDVC8Q0RHTR/rATgJoMTx+RZmnuvL8Zi5BEADu7eNBpj5bDuOQ0QTAFzPzANM\nx55gx7EVxRMq+tUAZi4TXYclOYGZv3W3PRHVYubiULRNUbyh38eqhbp3IgAieoaIFhDRPCI6AuB6\nIjqXiFYS0SEiyiOi14motmP7WkTERNTe8fkDx/oviegIEf1MRB183dax/lIi2kpEBUT0BhH9l4jG\nuWm3lTbeQkTbieggEb1u2rcmEf0PEe0noh0Ahnjon0eJaL7TsjeJ6FXH+wlElOm4nt8dVri7Y+UQ\n0QDH+3pE9L6jbb8BSHXa9jEi2uE47m9EdKVjeTcA/wugn8N1ts/Ut0+Z9r/Vce37iegTImpppW98\n6WejPUT0LREdIKI9RPSw6TyPO/rkMBGtJqJWrlxpRLTC+D87+vMHx3kOAHiMiDoR0XLHOfY5+q2R\naf92jmvc61j/GhHFOtrc2bRdSyIqJKJm7q5X8QIz6181+gOwE8BFTsueAVAEYCjkRl4XQE8AvSFP\ncx0BbAVwp2P7WgAYQHvH5w8A7AOQBqA2gAUAPvBj23gARwAMc6y7H8ApAOPcXIuVNn4KoBGA9gAO\nGNcO4E4AvwFoA6AZgB/k6+zyPB0BHAVQ33TsvwCkOT4PdWxDAC4EcBxAd8e6iwDsNB0rB8AAx/uX\nAXwPoAmAdgA2O207EkBLx//kOkcbEhzrJgD43qmdHwB4yvH+YkcbewCIBfAWgO+s9I2P/dwIQD6A\newDUAdAQQC/Hun8AyADQyXENPQA0BXCmc18DWGH8nx3XVgzgNgA1Id/HswAMAhDj+J78F8DLpuvZ\n5OjP+o7tz3OsmwbgWdN5HgCwONy/w+r8F/YG6J+P/zD3ov+dl/0eBPBvx3tXQv5/pm2vBLDJj21v\nAvCjaR0ByIMb0bfYxj6m9R8DeNDx/geIm8tYd5mzEDkdeyWA6xzvLwWwxcO2nwG4w/Hek+j/af5f\nALjdvK2L424CcLnjvTfRfw/Ac6Z1DSFxnDbe+sbHfr4BwCo32/1utNdpuRXR3+GlDSOM8wLoB2AP\ngJoutjsPQDYAcnxeD2C43b+raPpT907ksMv8gYgSiehzx+P6YQCTATT3sP8e0/tCeA7eutu2lbkd\nLL/SHHcHsdhGS+cC8IeH9gLAhwBGO95f5/hstOMKIvrF4Xo4BLGyPfWVQUtPbSCicUSU4XBRHAKQ\naPG4gFxf2fGY+TCAgwBam7ax9D/z0s+nQ8TdFZ7WecP5+9iCiBYS0W5HG2Y7tWEnS9JABZj5v5Cn\nhvOJqCuAtgA+97NNCtSnH0k4pyu+A7Esz2TmhgCegFjewSQPYokCAIiIUFGknAmkjXkQsTDwllK6\nEMBFRNQa4n760NHGugAWAXge4nppDOAbi+3Y464NRNQRwNsQF0czx3GzTMf1ll6aC3EZGcc7DeJG\n2m2hXc546uddAM5ws5+7dcccbapnWtbCaRvn63sRknXWzdGGcU5taEdENd20Yw6A6yFPJQuZ+aSb\n7RQLqOhHLqcBKABwzBEIuyUE5/wMQAoRDSWiWhA/cVyQ2rgQwL1E1NoR1Pu7p42ZeQ/EBTEb4trZ\n5lhVB+Jn3gughIiugPierbbhn0TUmGQcw52mdQ0gwrcXcv+bCLH0DfIBtDEHVJ2YB+BmIupORHUg\nN6Ufmdntk5MHPPXzEgBtiehOIqpDRA2JqJdj3QwAzxDRGST0IKKmkJvdHkjCQE0imgTTDcpDG44B\nKCCi0yEuJoOfAewH8BxJcLwuEZ1nWv8+xB10HeQGoASAin7k8gCAGyGB1XcgAdegwsz5AEYBeBXy\nIz4DwDqIhWd3G98GkA5gI4BVEGvdGx9CfPRlrh1mPgTgPgCLIcHQEZCblxWehDxx7ATwJUyCxMwb\nALwB4FfHNmcD+MW07zIA2wDkE5HZTWPs/xXEDbPYsX9bAGMstssZt/3MzAUABgO4GnIj2gqgv2P1\nSwA+gfTzYUhQNdbhtpsI4J+QoP6ZTtfmiicB9ILcfJYA+MjUhmIAVwDoDLH6/4T8H4z1OyH/55PM\n/JOP1644YQRHFMV2HI/ruQBGMPOP4W6PUn0hojmQ4PBT4W5LdUcHZym2QkRDIJkyxyEpf6cg1q6i\n+IUjPjIMQLdwtyUSUPeOYjfnA9gB8WVfAuAqDbwp/kJEz0PGCjzHzH+Guz2RgLp3FEVRogi19BVF\nUaKIKufTb968Obdv3z7czVAURalWrFmzZh8ze0qRBlAFRb99+/ZYvXp1uJuhKIpSrSAib6PSAah7\nR1EUJapQ0VcURYkiVPQVRVGiCBV9RVGUKEJFX1EUJYrwKvpENJOI/iKiTW7Wk2NatO1EtIGIUkzr\nbiSibY6/G+1suKIoiuI7Viz92fAw/yhkFqJOjr9JkOqHcJRgfRIyTVsvAE8SUZNAGqsoiqIEhtc8\nfWb+gRyTYrthGIA5jnKrKx21xVsCGABgGTMfAAAiWga5ecwLtNGuOHYMePFF+4976aXAueda3/74\ncWDqVHn1lYsuAi64wPf9FEVRrGLH4KzWqDg1Wo5jmbvllXBMwjAJANq29TYBkmsKC4FnnvFrV7cw\nAx9+CGzdCtSwGP14913gn/8EyMc5qpiBJUuA9et9b6eiKIpVqkQgl5mnMXMaM6fFxXkdReySuDig\ntNTevw8+AH7/Hfj+e6vXAUyfDqSm+n6u++8HtmyR94qiRCErVgA//xz009gh+rtRcZ7QNo5l7pZX\nG66+GmjSBJgxw9r2q1cDGzYAEyf6fq7ERODECeBPLR6rKNHH7t0iOJMmBd3ys0P0lwAY68ji6QOg\ngJnzAHwN4GIiauII4F7sWFZtiI0Frr8e+OgjYP9+79vPmAHUqweMHu37uRIds6dmZfm+r6Io1ZiT\nJ4ERIyQwuWCBdV+yn1hJ2ZwHmbj4bCLKIaKbiehWIrrVsckXkEkztgOYDuB2AHAEcKdA5i9dBWCy\nEdStTkyYABQViavHE0ePiv9/5EigYUPfz6OiryhRyn33AStXArNmAUlJQT+dlewdj3arI2vnDjfr\nZgKY6V/TqgbduwO9eokVf/fd7gO0CxeK8Pvj2gEkJtGsmYq+ogREaal/lnJBgQTlXNGwYfCs79mz\ngbffBh56CLjmmuCcw4kqEcit6kyYAGzaBPzyi/ttpk8HOnf2Lb3TmcREIDPT//0VJarJzhbrafZs\n3/Z79lmgcWMJ4Ln669IF+PFH+9u7di1w663AwIHAc8/Zf3w3VLnpEtPS0riq1dM/cgRo2RK49lrX\nQd1Nm4Bu3YBXX5UnNX+ZMAFYuhTIz/f/GIoSlTADl10GfPUVkJwMrFtnLW+6uBg4/XSgbVvXwbhT\np4C33gJ27pQg64svyg0iUPbvL0/zW7NGblYBQkRrmDnN64bMXKX+UlNTuSpy883M9eszHz5ced09\n9zDXrs28d29g53jpJWaAef/+wI6jhIAPPmC+8ELmgweDe54jR5jvv5+5Tx/m/Pzgnssd2dnMr77K\nfMEFzIMGMa9fb22/0lLmhQuZu3ZlfvJJ5hMngtfGefPkx9O7t7yuWmVtv08+ke0//dT9NkePMj/w\nAHONGswtWjD/+99ybc4cO8b88cfMN9zAfOaZ8vrxx7LcTHEx8+DBzDExzL/+av0avQBgNVvQ2LCL\nvPNfVRX9n3+W3po2reLy48eZmzZlHjky8HMsXSrn+OmnwI+lBJHDh5mbN5d/1hVXMJeUBOc8X3zB\n3K6dnKd2beYBA5hPnQrOucyUljJv3Mg8eTLzOefI+QHm7t2Z4+KYa9ZkfuQR5sJC98f480/pG4D5\n9NPlNTGR+ccf7W/vgQPM8fHMaWliMdWtyzxpkrV9L7+cuVUra/26Zk15f1x5pVzjgQPMc+YwX3WV\nnBdgbtKE+bLLRBgAWf63vzG/955s/89/yvLp0wO7bidU9G2mtFQMlp49Ky7/8EPpxWXLAj/Htm1y\nrJkzAz9WlWHhQuZbbmHOy7PneOvXM197rVifgbJrF/Mbb8gP+D//sb7f00/LP2rSJHl9+unA22Im\nP5959Gg5dufOIpRz5sjnBx6w91zOrFjBfNZZci4i5r595RF0+3ZZv38/8/jxsv6MM5i//bbi/sXF\nzK+9xtygAXO9esyvvCKC+uWX5TewW26x9wlp4kS5Ea1dK59vvFHOf+SI5/127RLr/dFHrZ/r1Cnp\nj7p15a9WLbmmVq2Y77hD+qOoqHzb9HRZ3rq1bGdsP2GCX5fqCRX9IDB1qvSY+en2wguZO3Swx9gr\nLpYnvoceCvxYVYLiYua2baXTGjcWy8bVY7FV9u0rF442bZizsnw/xpYtzM8/z9yrV7kFGxMjxz16\n1FobTjtNLLfSUuaxY0UcP//c97Y4U1oqd/wmTaRNTz1V0SVy553S3gULAj+XK3btEku+Y0fmt99m\nzs11v216urgwABHZffuYMzLK+3XIkMo35qNHxVVVowZzy5bMixYF9n1gZv7hBznfgw+WL1uxQpa9\n+67nfSdPlu1+/9338+7YwXzTTcwPP8y8cqV3ASgpYf7lF+a//118xceP+35OL6joB4H9+5nr1GG+\n6y75vH279OAzz9h3jq5dmYcOte94YeXLL6WDnn9eXBMAc//+/om12Q86a5Y8zsfHi9B4o6iI+eWX\nmZOSyoU+LY35ueeYMzNdC4c7HnpIRH7TJvlcWMjco4fc1Axr2B8KCsRfDjCffz7z5s2Vtzl5Uizv\n+vXLz28XJ06IYDdo4PrcrigsZP7HP8R6bdpUXuPimOfO9Szmq1eXu0kuv1z86Z5cRZ7anJhY+YZd\nWipPSH36uN+3pET2u+gi389bRVHRDxKjR8vv2/i+16jBnJNj3/FHjGDu1Mm+44WV4cPF933ypPwQ\nZ8yQzqtTh3nKFFluFcMPOmOGfM7KkkfmJk3EgnLHypVyJwUkEPn66+KLdWbChIouAlfk5DDHxkqA\nzsyOHdKO7t0rB+2sYlzfW295thp375ZgYqdOzIcO+XcuV9xyi5x/0SLf983IkEfem24Si98Kp04x\n/+tfzI0ayXnr15cv/9y51q/LcLN98UXlda++Kus2bnS971dfBfepKQyo6AeJ9HTptdmz5bd3xRX2\nHv+xx0R7gpnoEBL27BHLz9kHnZfHPGqUdGKXLtai1osXy/YTJ1ZcvmOHuCIaNKjskz98WB7JiMQV\n5Ck7g1kCbAkJ8gRQXOx6m1tvlWty5Q746is513XX+e6y2LNH/N/XXmtt+x9/lHZceaU9fsV335X+\n/fvfAz+Wr5w8yfz113LTadGCy4LWQ4Ywv/OO9I0rsrLkqW/UKNfr9+6V9ffc43r9iBHMzZpFwA+t\nHBX9IFFSIvGrJk3Ya6aXP3zwgRz3t9/sPW7IefFFuZDMTNfrP/tMsjqImG+/XdwbrsjKEh96z56u\nf6A5OfKIX7euCC8z85IlIvRE4gd3d2xnjLS/qVMrr9u+XYT29tvd7//MM7L/a69ZO5/B3XfLnX7r\nVuv7vP462+JbXLVKnrwGDQpNZpAnSkqY//tfcbN17MhlweTzz5eA8I4dsl1pqbgLGzf2nCAwapS4\nnZz95/n5cmO5//7gXUsYUNEPIs89Jz3XsqX9v5PVq9nvp2yr7N/vOUYXMKWl4n7o18/jZjlZR7jg\nFoePvHVryZk2c/iw+Gbj4spcMqWlLu4jf/0lfvWYGOaLL5YO7NpV8mx9bfeQIfLk4OwCuv56ubF4\n6riSErG+a9WSOIEVdu6Udjs/xVhp65gx0ncLF/pn8e/dK4H2tm2lD6sSpaXiNnrqKXGbGbGYHj3k\nugF5EvDEt9/KdnPnVlz+r3/Jcquxi2qCin4Qyc0V4+jxx+0/9pEj9hhwnhg0SJ5WgpVezsuXy0W8\n957HzZKSmIcNY/HJd+sm+1x9tfitS0vlEbxGDfGpOZgxg10nXBw4IIG7OnWYn33Wt3iBmR07RNyH\nDi1302zYIOL68MPe9z90SG54CQlyHd4YP17a7CrO4I1jx0QEAblpOqcMeqK4WIKYdepYH8gUTrZv\nl2B8377yv7jgAmsZMx07ylOBQWmppKSed15w2xsGVPSDzB9/WPtt+cPpp4thGQy2bi03mr75Jjjn\n4DFjJEDnIahZUiIGcVkgvKhIHqHq1JF9R46URr70UoX90tLY/biIoqLAh0Uzl1uCxuPWsGHMDRta\nHyq9aZMEJvv29XzzycyUDrjvPv/beuRI5cFBTZtKGuUnn0hbXP3dfz9bSmusivz1l/WAufFYbrjO\n/vMfLgvKRRgq+tWYwYNF3ILB3/8u7uPGjZmvuSYIJzDyWu+4w+NmubnlN58pU0wrtm6VTBBAGmgK\niq5bV77PnDlBaLtBURFzcrL4777+2kUjLbBggeznqR9GjBBXkl2uFXMZgMaNyzvL3Z/VUavVmdxc\n+cIbQerrr5cbuL9ZVlUYFf1qzF13iRaU6V2gA1gcFBWJ12HYsFK+955Srl3bR72x0o7XXuNKI9hc\n8Msvslm9eszt2zs9qZeWSoaKUwDujjvE/Q0wv/CCD+32h19/FTdCTIzEFFwVXfLGAw+wWzeXEbx5\n4onA2+qKoiLm774Tf7+rv88/D3/gNlQMGyZf/Px8Sbm97bZwtygoqOhXY958U/4zOTksecYdO8oA\npwD5+GM57mdn3ceb2l/OgLhJvVJUJCrbuLHkP7vDqFVh4TFl0SJpy8MPs6UyFseOidfnuuvkhugu\nE89W7r6b3WbzWOHUKfEnx8ZWzv8fMkTcMHbm2iuu+ewz+T9edJG8ehqLUY1R0a/GGGMBlr25pbxo\nU61agY3CLCzkyzpmcmvs4lNxLZk7duRz8V9ObLSbS/d58FX/+qu4OoDyNLqnn3Zt9a9cyZayKri8\npEVOjrWCdUbpmeXLJU5qR4E7rxQWipsmkOBNfr4EWdu3L48JGH5lp3iFEiSKi8tr30SwvqjoV2MM\nf/cbsQ/IUPFffpGBJH37+pdys2wZ/9nufK6BYn4seYmIT2Ehz7zs3www/9j4CslRNwv5kSNiTteo\nIcWkFi+WH8+4cdK4hx6qLPye6k878cADYgCXllorTd2vn5R6KS2VxA0v2aBVi5UrxU10ySXSh+ed\nJ33qT+kBxT8ef1y+t2+/He6WBA0V/WpM6TfLuCEO8R2N3i9P5XvvPd+/tPv2SRYHwE83ncpEpRVq\nYB09ynxa/WK+sZmjpvOll0re+OefS+62MXDK7IIoKZFlgPhGjZvQ4cMi+DfdZKlpI0eWl5vYuFEO\n98orrrfNyuIKfvxRo+QGUK34v/+TizDGEUSw+FRJ9u2TuikRGMA1UNGvrixZwhwTw73qbuBB/Uwj\nUEtLJcG+USNrI6u++UYCkLVqcfEjj3Lb00t48ODKm91yC3PduqV88Lm3RLSNSGlSkoyOdEVpabkz\nfuxY8V2/8458tjggqm9f5oEDyz/36SPjsFx5jR58ULxbxuDLe++VplYrSkvLSxJ37Bi8fN8A2blT\n5juxKXdACSEq+tWR+fNF3Xr25LGjTnDr1k7rt26VdEhvuZaLF4t4d+vGnJFRVltq4cLKm65axWV1\nvnjnTsmxt1IMrbS0vDTt1VeLr7RrV8tqcfrpcr8wMAZdOd9nTp6Ue9dVV5UvM9Lo/UmoCSvHj0th\nN9Ngs6rGQw9J3xoVD5Tqg4p+dWPmTPGf9+vHXFBQNqakUtkYo77L0qWujzN3ruQl9+4to1RZNLl5\nc9ela0pLJU57zjl+tvuVV7gs79tizZniYmmiee6KI0ckK2fcuIrbGlk+5nL1778vy7Zs8bPNiltS\nUqRvV64Md0sUX7Eq+jUCno1XCZw33gBuugm46CKZ2LlhQ3TuLKu2bHHa9qGHgC5dgDvuAI4erbhu\nxgzg+uuBfv2AZcuAJk3w11/Ap58CN94I1KlT+dREwMSJMo/02rV+tP3++4Hp04Fzz5VzW2DPHqCk\nBGjTpnxZgwYyL/XChUBBQfny6dNlu0suKV/WsqW85uX50V7FLQcOyPcAAPLzw9sWJXio6IebF14A\n7r4b+NvfgCVLgHr1AACJibI6K8tp+5gYYNo04M8/gSeeKF8+daqo95AhwBdfAKedBgCYMwcoLgZu\nvtl9E667DoiNlXuGX0yYAPz0E9C0qaXNd+2S19NPr7h84kSgsBCYN08+//EH8M03cj+sWbN8OxX9\n4LB8uTyyAcBff4W3LUrwUNEPF8zAY48B//iHqO7ChRVM8TPOAGrVciH6ANC3L3DrrcBrrwFr1gDP\nPgvcdx8wfDiweDFQt27ZKWbMAM47D2VPDq5o0gS45hpg7lzg2DGbr9MF7kQ/LQ3o3r385jNzprze\ndFPF7QzRz80NXhujkfT0MptDRT+CUdEPB8wi0s8+K+btnDlA7doVNqldW4TfpegDwPPPA/HxwODB\ncvO44QZgwYIKN44VK8Q9NHGi9yZNmAAcPgwsWhTAdVkkJ0denUXfcDWtWSN/M2cCF18MtGtXcbvG\njeUy1dK3l/R0YOBAeUhU907koqIfakpKgEmTxEq/917gnXcq+i5MJCYCmZlujtO4scQCDh4Uq3/2\nbHk0MDF9OtCwITBihPdm9esHnHWW7BNsdu0Si7Jx48rrxowRV9O4cXJzmDCh8jZEYu2r6NtHTg6w\ndSswaJDYEmrpRy4q+qHk1Clg7FjxXzz+OPDqq6JgbkhMBLZvl91cMmKEKOhbbwE1Kv4rDx0C/v1v\nEdH69b03jUgE9r//9XCjsYldu8TKd3XpTZrIZW3aBMTFAVde6foYKvr2kp4ur4MGAQkJKvqRjIp+\nsDlxAvj8c4mktmoFfPihBG8nT/Yo+ID44U+dArKzPWzUpo3L43z0kZzaUwDXmRtvlIeFOXOs7+MP\nOTmVXTtmDOv+xhslbu0KFX17SU+Xm2zXrmrpRzq1vG+i+Mzhw5JBs3ixvB49Kn6WK66QtMZLL7V0\nGHMGz1ln+daE334TF0pKivV94uOB1FRg5UrfzuUru3ZJKMIdF1wg90ZP3dSyJfDdd/a3LRphFtG/\n8EJ5YIyPlyc+JTJR0feHY8eA5GTJKXRFSYn8kuLjJTPnqqvkF+XObHXD2WfLa1aWezeHO3bsADp0\n8PowUYmUFMniKS2t5DGyheJisdA9WfpEkrPviVatxIV1/HhZspLiJ1u2SCbUoEHyOSEB2LdPvsZu\nwk1KNUZF3x/+/W/g998lgOoqN71OHRH5c88N6FfTuDHQooWHDB4PZGcDHTv6vl9KCvD227L/GWf4\nvr83cnPlhuJJ9K1gpG3u2SM3N8V/zP58QGwVZmD/fnmvRBYq+v4wfbqY4W+95bsp7SMeM3jcwCyW\n/oABvp/PcAetWRMc0TfSNc2jcf3BPEBLRT8w0tOB9u3LjQRD6PPzVfQjEQ3k+srmzTL6dMKEoAs+\nIKKflVU+UtIK+/dLGMEfS79rVxkj4FdJBgu4G5jlKzoq1x5KSmQkrmHlA+LeATSYG6mo6PvKjBmi\nimPHhuR0nTuL79qXH+COHfLqjwUcEwN061Z9RF9H5QbGunXy/TKLvmHdq+hHJpZEn4iGENEWItpO\nRI+4WN+OiNKJaAMRfU9EbUzr/kVEvxFRJhG9ThQC8zhYnDwp+YzDhoXsuddtDR4PGCme/lj6gLh4\n1q717enCKjk5MuKzUaPAjtO8uaSXqqUfGIY/f+DA8mVm944SeXgVfSKqCeBNAJcCSAIwmoiSnDZ7\nGcAcZu4OYDKA5x379gVwHoDuALoC6Amgv22tDzWffCK+Eyt1DWzCH9E3LP327f07Z0qKXOaff/q3\nvyd27Qrcnw9IZlFCgnXR/+03L+MdopT0dCna2qJF+bImTST/QC39yMSKpd8LwHZm3sHMRQDmAxjm\ntE0SACNrerlpPQOIBRADoA6A2gCqr/0wfboUgrnoopCdsk0bybf3JZibnS3WWoMG/p0zNVVeg+Hi\nMUbj2oEvA7RGjpRyR0o5J09KfSazawcoz9VX0Y9MrIh+awC7TJ9zHMvMZAAY7nh/FYDTiKgZM/8M\nuQnkOf6+ZuZK8kVEk4hoNRGt3rt3r6/XEBp+/13MoptvDk4Cuxtq1JBEIV/dO4FktHTrJpZeMETf\n22hcX7Aq+kVFkou+e7c9540Ufv5Zxjk4iz4goq/uncjELvV6EEB/IloHcd/sBlBCRGcC6AygDeRG\ncSER9XPemZmnMXMaM6fFxcXZ1CSbmTlTFHj8+JCfunNn3907/vrzARnslJRkv+gXFUlevR3uHUAG\naFkR/d9/lywVtVwrkp4uX+n+LhyuaulHLlZEfzcAs23WxrGsDGbOZebhzHwOgEcdyw5BrP6VzHyU\nmY8C+BLAuba0PJQUFwOzZgGXXWafYvlAYqIM/i0s9L5tcbH44gPNXU9JkVx9O4O5ublyPDst/b17\nPRSkc2DcMPPzgxOcrq6kpwM9e7oOqmvRtcjFiuivAtCJiDoQUQyAawEsMW9ARM2JyDjWPwA4pr/A\nn5AngFpEVBvyFBDkGo5B4PPPxaQMYQDXjBHM3brV+7Y5OSL8gVj6gIh+fr692THu6uj7i5G26c0N\nYYj+yZPAkSP2nLu6c/gw8Ouvrl07QLl7R2+SkYdX0WfmYgB3AvgaItgLmfk3IppMREZFmAEAthDR\nVgAJAJ51LF8E4HcAGyF+/wxmXmrvJYSA6dNFYS67LCyn9yWDx8hQCdTSD0Yw18jRt+thyeoALXO/\nqfUq/PCDuLw8if7x46GZSU0L5pLrAAAgAElEQVQJLZbKMDDzFwC+cFr2hOn9IojAO+9XAuCWANsY\nXnJygC+/BB55pNIkJaGiUycZ/Gslg8dI1wzU0k9OlnOuXSvFQe3AroFZBlYHaGVmyni6U6fEej3z\nTHvOX51JT5fJavr2db3ePCrX3ywwpWqiI3K9MWuWVAjzpTC9zcTGiuVu1dKvWTNwa7pBA8kaWrMm\nsOOY2bVL/MeOOdsDxoqlzyz91quXfFZLX0hPl7mTY2Ndr9cBWpGLiv6sWeI/uece4PvvxSFuUFoK\nvPuu5OUHajoHiNUMnh07gLZt7XkoMUbm2kVOjr1x8IQEeRrxJPp5eeLHNzJUVPQl+L1xo3vXDqCl\nGCIZFf2PPxYT9J13ZCx6y5Zi1X/2mfz98YfriVpDTGKiBHJLSjxv529JZVekpopQ2/XDt3NgFiA3\ntrg4z6Jv3Cj7ORKF1XIF1q+X1z593G+jRdciFxX9jAzgb3+TWSP+/W+Z0mnRImDoUKmx06yZrA8z\niYky/aG30gjG5Cl2YJRZXrfOnuPZLfqA9wFahuh37y7lBVTEyvukc2f32xjDZfQmGXlEdz39AwdE\niXr0ECf2iBHyd/KkzMW3ZIk4PuvUCXdLK2TwuBP1Y8dE1Oyy9Hv0kNc1a4BLLgnsWCdPSttCLfqZ\nmRJDaNlSBxwZZGVJbMWw5l0RGyszfGp/RR7RLfoZGfKanFxxeZ06MkGrxblsQ4Eh+pmZ7ptlV7qm\nQePGMpGKHX59owSC3WPbWrUCNmxwvz4rS/qOSEsLGJj7xBM6QCsyiW73juHcdBb9Kkjz5vLnKZgb\naEllV9gVzLU7XdOgZUsRcnexDkPgABUxg8xMz64dA30yikyiW/QzMqSmrKfn3CqEMYuWOwKZPMUd\nqalyMzl4MLDj2D0a16BlSxH8ffsqrztyRM5riL6KGFBQIO4wo088oU9GkYmKfjWw8g28iX52toQm\nmje375xGMDdQa9/u0bgGnnL1jbIVhlUbHy9hHG+1eiKZLVvk1Yro65NRZBK9ol9UJDNrGNHKakBi\nouRY79/ver2RuWPn3GTnnCOvdoh+kyZA/fqBt8mMp1G5xg3S7N4BpA+jFec+8UR8vHzXzENXFGsc\nOBDuFrgnekU/K0tMvmpm6QPl1pozduboGzRvLoO9AhV9O+vom/Fk6WdmyujkM86QzzrgSL72tWpZ\n+57Ex8uIZleuM8U9338vvxs7R7PbSfSKvhHErUaWvuGmcOXiYbY3R9+MHcFcu6ZJdMaY5s+V6Gdl\nieDHxMhnFX3pk06dpBaRN3SAln+89Zb8HletCndLXBO9op+RIcnInTqFuyWWaddOskldFV7bu1fq\n7QejWkRqqvjHDx/2/xjBGJgFyL+wSRP3om92YxgiFs3BycxMa64dQG+S/rB3r0ylDfg28VEoiW7R\n79o1bJUz/aFmTeCss1x/mYKRuWNgBHONhyNfOX5cXATBEH3A9QCt4mJg27aKAhftInbqFLB9u++i\nH803SV+ZM0f6OS7Ot3mtQ0l0ij6zKFg1cu0YuMvgCUaOvkGgGTzGwKxQiv7OnRKrN+ejN2worp5o\nFf0dO+RmqJZ+cGCWqTf69pVqLmrpVyVycyUtoRoFcQ0SE+XHe/JkxeWGpd++vf3nbNFChNVf0Q9W\nuqaBq7lyDSvLLHBE4uKJVsvVl8wdQNxmtWqp6Fvlv/+VJIsJE8TY+PPPqjkJTXSKvlF+oZpa+qWl\n8phuJjtbxLleveCcNzXV/2yEYI3GNTAsffPUfobAnX12xW2jeYCWuz5xh5au8I0ZM6TO08iRvk1x\nGmqiU/QN53T37uFthx+4y+AJVuaOQUqKnNMfy8UYjRssS79lS3HlmEcNZ2WJVd+kScVto1n0MzPl\nqcjVROjuiOb+8oVDh4CFC4HrrpOxKL5McRpqolP0MzJEIRs2DHdLfOass+TVOUiUnR180S8tLX9I\n8oVdu6RCdd269rcLcD1AyzlzxyDa3TtWXTsGOirXGvPmScKCMfXGmWcCNWqo6FcdqmkQFxArom3b\nil+mU6fEfxjMyb369JEv8eef+75vsNI1DZwHaDG7T000LFezKygaMKaN9FX01b1jjRkzRFJSU+Wz\nMcVpVczgiT7RP3ZMcvmqYRDXwDmDZ9cuscKDaeknJEhJ51mzfB+WH6zRuAbOor9vn7h6XFWSjI8X\nV1AgYw6qI/n5UmzNH9GPxpukL6xdK38TJlQsgWJ1itNQE32iv2mTfIOrqaUPlIu+8UM0MneCPY3v\nhAkirF984dt+wRqNa+As+q4ydwyidYCWr5k7BgkJMmPb0aP2tylSmDFDLPvrrqu43OoUp6Em+kS/\nGtXQd0fnzvLAYuS/2z15ijsuv1wyhGbMsL5PYaEUnwqmpd+ggfwZou9J4KI199xf0dcBWp45dgyY\nOxe45prKSQOJiZJa/ccf4WmbO6JP9DMyJH2hXbtwt8RvnDMDduyQfOpgWtOA1GsZN078+sYNxxvB\nqqPvjHmAVlaWBI1dnTNaRT8zU+JBvn5HorW/rLJokbgKjQCumaqawRN9or9+vVj5dtYfDjHmqRMB\nsfTbtZMyDcHm5pslfjB7trXtg52jb2AeoJWVJbnoNVx8u6PZvWNlikRntOiaZ2bMkIy6fv0qr1PR\nrwqUlsqEqtXYtQPID7FRo4qWfrD9+QZnngkMHAi8+650pzeCPRrXwNnSd+fGMCaYiTYR8ydzB1BL\n3xOZmcCKFZUDuAbNmsn3rapl8ESX6O/YIU64ahzEBeQLZs7gCXaOvjMTJ8o5v/vO+7bBHphlYIj+\n8eNSd8fdHLC1awNNm0aXiB07Jim9/oh+XJy8RtuTkRXefVfcqmPHut+mKmbwRJfoR0AQ18D4Mh05\nIimKobL0AeCqqyRoZSWgu369WIt16gS3TS1biritWSNZTZ4EzuoArS1bpBBrKCbD+P13+VoamVh2\nYpQC8Ef069QBGjeuWjfJ224DXnwxvG04eRJ47z1g2DDPU2x7m+I0HESX6GdkiOO7S5dwtyRgEhNl\nBKoxQjaUln5sLHDDDcDixZ5nVZozB/joI+Cmm4LfJiNt03j68CRwVksLfP+9zKg5fHjwZ4966y3x\nPAbjBuNv5o5BVSvFsHChZMyEkyVL5DvhKoBrJjFRtqtKs49Fl+ivXy//hdjYcLckYIwf8Jdfymso\nLX1AvuxFRcD777tev24dcMst4v+fMiX47TFEf/lycX95mhvHqohlZUkp5vx8YPTo4OVbG1YjEBw3\nSmamBLX9nS+oKo3KLSiQFODNm8WVFy6mT5eR8YMHe97O2xSn4SC6RD8jIyJcO0D5l8kYKBVKSx8A\nunUDevcWF4/zaM0DB4Crr5Yg1vz5oZmnxhD9n3+W8tKe6vxYde9kZclD4ZtvAt9+Czz+uC1NrcSn\nn5ZPdh8MizorS4wCf11sVan+jjEmpaQE2LgxfG1YtkyeYL1lzFXFDJ7oEf0DBySVpJoHcQ06dhQx\nXb9e6sY1bRr6NkycKBbXzz+XLyspAcaMkTz+RYvKsz+CjSH6J096d2PEx0tVxKIiz9sZGS833yzX\n+vzz5VPh2YlhNTZvHjzR99e1A1Qt944h+kD4Jh6fOVOeJq24LT1NcRouokf0Ded3hFj6tWtL+iQg\nVn44hh2MGiUjYc0B3aefBr76Cnj9dXkSCBWNG5dbsu4ydwyMG9Heve63KSyUkZSGWL7xBtCzp2Rq\n2Pmonp0tTxE33yyjne12o5SUSCA3UNHfv18K+4UbI9Bdr57/k/oEQnGx1J8aMsTa2JOaNWXMiFr6\n4SDCRB8oF7dQ+/MNGjQArr0WWLBARiUuXSr++/HjgUmTQtsWonJr35vAWRmgtW1bxSygOnXkyaVO\nHQns2lWLZuZM8bePHx8cN8off1h7+vGE0V9VIRiZnS03+L59wyP6X30lT7ETJ1rfp6pl8ESP6K9f\nL6aUp/yqaobxQw61P9/MxIliFU+ZIhk9KSniAw/Hk0erVvJqxb0DeBZY43Hc/NTQtq3EKLKyxDIP\ntPJkcbGIvmE1BsONYoiNt6cfT1SlAVrGZEEpKeLT9+ais5sZM6Q/rrjC+j6JiXKzOnEieO3yBUui\nT0RDiGgLEW0nokdcrG9HROlEtIGIvieiNqZ1bYnoGyLKJKLNRNTevub7QAQFcQ2qguj37ClB3Zdf\nlkfZjz4K3mQp3rBq6VsRsaws11lAgwYBzz0naYP/8z/+txUQqzE3tzztz58smT17ZDCaO4ybl9Up\nEl1RlYquZWfLk21KiribfvstdOfOywM++0zqT9WubX0/d1Ochguvok9ENQG8CeBSAEkARhNRktNm\nLwOYw8zdAUwG8Lxp3RwALzFzZwC9AITeXigqkohjhARxDdLSRJjCeVlEwN13i+B/+GFwJma3SlKS\nnN8oteAOK+6drCy5mbrK7n34YWDoUODRR32fW8DM9OnSFsNqTEiQwXa+pCLefrtM3OFO+LOyZFRt\ns2b+t7OqWPqlpeWjz43JSkIZzJ09W2Ik3nLznXGulRVurFj6vQBsZ+YdzFwEYD6AYU7bJAEwBuUv\nN9Y7bg61mHkZADDzUWYutKXlvpCVJcIfYZZ+UpII1/nnh7cdEyZIUPSSS8LbjsceEy+eN9dSgwYi\n5t4sfXdPDEQyEvPEifIyE76SmyvVSs1Wo5UAszPZ2ZKYNny465tFoJk7QNUpurZnj8QnOnaUv4YN\nQ+fXLy0V107//r6PdzCesqqKX9+K6LcGsMv0OcexzEwGgOGO91cBOI2ImgE4C8AhIvqYiNYR0UuO\nJ4fQYoxDT3J+QKn+GLVRwo1zLfFwEBNjbdJvIs/+89JSydDxJJaGS83fsgmG1XjzzeXL/HGj5OWJ\nv37dOrH6neMMdoh+o0ZyYwq3e8fo6w4dJPh9zjmhE/3vv5fz+xLANahXT1I3q5PoW+FBAP2JaB2A\n/gB2AygBUAtAP8f6ngA6AhjnvDMRTSKi1US0eq8vZo5VDh6U10CecZWIwpP//M8/xYr3FPw0MqbM\neeNWKS2VYl0DBlS0Gn11oxQXy7bXXCMDx2bPBqZNK19vDP8PVPS93SRDhdHXRt+npEioLhAXm1Wm\nT5esoeHDvW/riqqUwWNF9HcDMGektnEsK4OZc5l5ODOfA+BRx7JDkKeC9Q7XUDGATwCkOJ+Amacx\ncxozp8UFw3Q9dEheGze2/9hKtcRTeqSn6RYN2rSROIY/lr5hNTr7hn11o+Tni2XfsiXw5JOSBXTX\nXcAvv8h6YzxBIJk75raFW/R37JAbkDH/UUqK3JyDLab79wMffyzZaf4mKRiib6UcebCxIvqrAHQi\nog5EFAPgWgBLzBsQUXMiMo71DwAzTfs2JiJDyS8EsDnwZvtIQYH8QuvXD/mplaqJJ8vVSoGyWrUk\nhdMfS3/6dHGHXX11xeW+ljE25g9o1Uq+3nPnys3o6qvl2qzcvKxSFervZGfLtRqD8EIVzH3/fQkJ\n+hrANZOYKKnNVmecCyZeRd9hod8J4GsAmQAWMvNvRDSZiK50bDYAwBYi2gogAcCzjn1LIK6ddCLa\nCIAATLf9Krxx6JBY+dV4tizFXgzRd5Vrn5VVPgGGJzp29N3SN6zG66+vnBlUv778WbWoDdE3UlWb\nNpWU2f37ZbT0pk1yjrZtfWujK6qKe8c8EPGss4I/MpdZAri9egHdu/t/nKqUwWOpFBYzfwHgC6dl\nT5jeLwKwyM2+ywAE0F02cOiQtQifEjUkJEie96FDlYPQVoOfHTpIiV1f8GY1+uJGcRZ9QIKb77wD\n3Hgj8NNPch12TKNptIs5fLbTjh3AhReWf65ZU9KVgyn6v/wiYwHMsRJ/MFxsWVnAxRcH3q5ACEH9\nwyqAYekrigNz0NSV6A8d6v0YHTvK/kePShqoFd5917PV6IsbJS9PBNh5kPnYscDKlcDbb9vj2jHa\ndfKklNuwy37KypKaRq++6r0C6MmT4hpxLjmSkiK1cEpLXc+JbIXvvwdee821v33rVnn6uvZa/45t\nEB8vEuQp/vDsszJO47nn/L8WK0RHGQYVfcUJd5kyBw7IMivBTyNt09OIWDP794vLxdmX79wuXyz9\n5s1djw6dOlUCj9ddZ+1Y3rB7gFZBAXDllTJ5zMqV3rf/4w95ynAefZ6aKjOmbdvmXzu2bpUxFz//\nLFlbzn+xsRIkP+00/45v4DzFqTNffSUZWDk5wX+Sig5Lv6BAHICK4sDdqFxfZpkyrM4dO2RaRW8Y\n2TSeJm5LSAB+/dX7sQARfbNrx0xMjMxcZhfmzCJ/J2MxKC0V95MRD1m7VgY9ecI5XdMgxZELuGaN\n76Umjh6VFMyYGOlzO2IfnkhMBL7+uvLy7Gy5OXfrJm6kYIu+WvpKVOLOcvVF9A2r02oGj5Vjx8fL\niFwrqX2eRN9u7LT0X3hBJo55+WWgdWtrPnnzwCwznTuLa8hXvz6zxFUyM6WIXrAFH5D/e16e2KAG\nx4/LjYdZAvz16gW/HdEj+hrIVUw0by4WlSvRj4mxVkOoeXPx5VvN4MnKEoHydOz4eBmpe+CA9+Pl\n5oZe9ANN2/zmGymXce21wD33iKVuJeUyO1v6zvl6a9eW+Iivoj91qpQEf+45KaIXCszBXECE/rbb\npHTIBx8AZ5wRmnZEvugXF8tznFr6iolatSQt05V756yzrGW8EInladXSz8z0fmyrA7RKS6XtoRJ9\nYwxBIJb+zp0y13CXLpIGSSSin5UlfnlP7NghN0tXAc6UFBF9q6Wu//Mf4KGHxMJ++GFfr8J/nKdO\n/L//k7mRn3wSuPzy0LUj8kXfeJZS0VeccBU09bVWjS+5+laObdWi3rdP7BljDoFgExMjWU7+iv7x\n4xLALi4WN4YxTjI1VcTamOPIHc45+mZSU+VnbuXmu3s3MHKkxCVmzQpt+mmHDvJkkpUlgeN77gEu\nuwx44gnv+9qJir4StTiL/smTIuC+lC0wLH1vVqZxbDtq/QOuc/SDjb+jcpmBO+4Qa/yDDyoGgo1A\nrDf3jDF5iiusHqOoCBgxQkbGfvyxVOkMJcYUpz/+KO1o00bGbQQzPdMVkZ+9o3V3FDckJFQUiu3b\nxZ/uq6VfWCgi7WlStu3bxSVjdSrHqij6CQkSR7ASbzAzb55Y1Y89Vnn8Q6tWcjPx5Nc/eFB+xu4s\n/a5dxV23Zo2IqTvuu0/SQxcutKcekT8kJgKLF0sq6M8/yyjqUBM9oq+BXMUJZ0vfl8wdA3MGjyfR\ntzptYdOmYvl5s6jDIfotW0rw059itZdcAjz1VOXlhl/fk5VuuG3cWfp16ojwezrGnDkyJuDBB6Uq\nabjo0kVEf9q08E1+FD2ir5a+4kR8vHj/Tp4U4TCE2ZchHeYSy336uN/OqLni7dg1akjQtCpa+s88\nIxOS+0psrARw3QWwU1KAZcukYqarmcrc5eg7H2PJEtdlItatA265BRg4EHj+edf7h4q775Y+vPTS\n8LVBRV+JWsyulNNPF9E//XTrJRWA8vRLb8HcrCzJBbdS6NXKqNy8PPlKuxLJYHHmmSJadpOSIm61\njRtlzmVn3OXom0lNlUnmc3Lkf2hw4IAEkJs3l3z8WmFWvLi48Ao+oIFcJYpxDpr6M8tUvXpy8/CW\nOeLLsa0UXQvlwKxgY5RIdueeyc4Wt5cnD62rYG5JCTBmjGTsLFpU/v+OdiJf9A8dkue9QItnKBGH\nWfSZRZj9CfB5S9s0jm1V9K1kyYRyYFawaddO0kHdBXM9Ze4YdO8urjHzMZ5+WmravP460Lu3fe2t\n7kSH6DdsGPq8KKXKY66/k5srY/j8qUrpbYDW7t0y+MgX0Y8mS99bMNdTjr5BvXpywzaOsXQpMGWK\nTDw/aZKtza32RL4Sat0dxQ1mSz+QWaY6dpSKjKdOuV5vHNvqU0RCgtyACgtdr2cW0Q/VwKxQkJIi\nPv2ioorLS0tlJK83Sx8QN9HatZIee8MNcsy33tK5k5yJfNEvKFDRV1xSv77MefrXX/6laxp06CDi\ntGuX6/W+HtvbAK1DhyTjKFIsfUAEuqgI2Ow0mWpuriz3Zukbx8jLk0BpzZoyi5i/c9pGMpEv+mrp\nK24wJiDJzxdhbtgQaNHC9+OYSyy7IitLgpCe8vjNeBP9cKRrBht3wVwrmTsGRjD3999lQJiVonnR\niIq+EtUY/nMjiOuPK8BbiWUjiGv12N5G5Uai6J9xhuRaOAdzreToG5xzjvTdCy+Ef0rCqkx05Onr\naFzFDfHxktu9dy9w0UX+HaNNG8n/9mTpDx7sW5sA9xk8kSj6NWqIaLuy9Ims1btv0EDcQZqz4ZnI\n7x619BUPJCSIsOze7f98sjVrStqhK0v/8GERIl+OHY3uHUDcMxkZUonTIDtbbqoxMdaOoYLvncju\notJS+dWp6CtuiI+XrwgQ2CTi7nL1rdbcMVO3rrg6PIl+/fqRN/QkJUVKMBvTSgLSp1ZcO4p1Ilv0\njxyR/DYVfcUN5lGagYi+u1x9f7OCPA3QiqSBWWZcBXOzs60FcRXrRLboa90dxQtG0LRWrcCmq+vY\nUSY2OXKk4vKsLDm2r9aqpwFakZajb3D22fKUYwRzT5yQG5xa+vYSHaKvgVzFDYalf+aZMsmFv7jL\n4MnK8u/YnurvRNJoXDM1a0q5YcPS37lTXtXSt5foEH219BU3GKIfiGsHcJ+r708RN6NdnrJ3IlH0\nAfHrr1sn4Thf0jUV60S26GuFTcULhnsnUNF3ZemfOgVs2+a/6O/bJ5UizRw9Kn+RLPpHj0opBV8G\nZinWiWzRV0tf8UJcnFRjHDcusOM0bSojes2W/o4dkn7oT+XOhASxdp2nJozUdE0DI5i7Zo3cQGNj\n/RslrbgnsgdnqegrXiACnnjCnuM4Z/AEUs/HPEArLq58eaSLflKS5OSvXVteUlkLptlLdIh+qKe9\nV6KSDh2ArVvLPxuif/bZvh/L3QCtSBf92rWlNv7atfKUo/58+4l8906DBuGfI02JCjp2FEufWT5n\nZYk4+5M8Fq2iD5TX1rcyeYriO5Et+lpWWQkhHTrIiFIj6yYz0/8AsXmCFzN5eTKJe5Mm/rezqpOS\nIvba4cNq6QeDyBZ9rbujhBBz2qavUyQ606SJ5K07W/rGaNxI9nMbwVxALf1goKKvKDZhTtvMz5cH\nTX8ydwApHBYX59q9E8muHQDo2rXcI6uWvv1EvujraFwlRBiTduzYEVjmjoExwYuZaBD92FigSxd5\nr5a+/VgSfSIaQkRbiGg7ET3iYn07Ikonog1E9D0RtXFa35CIcojof+1quCXU0ldCSN26IsjZ2faI\nvqv6O9Eg+gDQt6+UVI60SqJVAa+iT0Q1AbwJ4FIASQBGE1GS02YvA5jDzN0BTAbwvNP6KQB+CLy5\nPqKBXCXEGCWWMzOl/HHr1v4fy1n0T5wADh6MDtF/4QVgxYpwtyIysWLp9wKwnZl3MHMRgPkAhjlt\nkwTgO8f75eb1RJQKIAHAN4E31weY1dJXQo4xQCsrS/LzA5nUw9m9s2ePvEaD6DdsKBPTKPZj5SvZ\nGsAu0+ccxzIzGQCGO95fBeA0ImpGRDUAvALgwUAb6jPHjknhEhV9JYR07Ajs2gVs3Bh4PZ/4eKCw\nUL7KQHTk6CvBx65A7oMA+hPROgD9AewGUALgdgBfMHOOp52JaBIRrSai1Xv37rWnRVpWWQkDHTrI\nQ2Zenv+ZOwbOA7RU9BU7sDJUdTeA002f2ziWlcHMuXBY+kTUAMDVzHyIiM4F0I+IbgfQAEAMER1l\n5kec9p8GYBoApKWlsb8XUwGtu6OEAXOKYaCWvnmAVocOKvqKPVgR/VUAOhFRB4jYXwvgOvMGRNQc\nwAFmLgXwDwAzAYCZx5i2GQcgzVnwg4aWVVbCgDnF0A73DlBu6efmyoAtcwE2RfEVr+4dZi4GcCeA\nrwFkAljIzL8R0WQiutKx2QAAW4hoKyRo+2yQ2msdtfSVMNCqlVSJrFFDZswKBFfunRYtAgsOK4ql\nSmTM/AWAL5yWPWF6vwjAIi/HmA1gts8t9BcVfSUM1KwpWSelpTLIKBDM5ZWB6MnRV4JL5Jaf1ECu\nEiZGjbLHGo+NldRFs6Xftm3gx1WiGxV9RbGZKVPsO5Z5gFZeHtC7t33HVqKTyPUOFhTIuPg6dcLd\nEkXxG2OC9OJiYO9ede8ogRO5oq+jcZUIICFBLP38fMn/V9FXAiWyRV9dO0o1x3DvaI6+YheRLfpq\n6SvVnPh4YN8+Ke0AqOgrgaOiryhVmIQEcets3CifW7UKb3uU6k/kir6WVVYiACNXPyNDpkg0SjMo\nir9Eruirpa9EAGbRj4srn0ZQUfwlMkXfqKWvgVylmmNY9r//rv58xR4iU/RPnACKitTSV6o9hqUP\nqOgr9hCZoq91d5QIoXHjcpeOir5iB5Ep+lpWWYkQatQot/ZV9BU7iEzRV0tfiSBU9BU7iWzR10Cu\nEgGo6Ct2Etmir5a+EgEYGTw6MEuxAxV9RaniqKWv2Elkir4GcpUIoksXoFkzFX3FHiJT9A8dkolK\nA52vTlGqADfeCPz5p04NodhD5Ip+o0ZSrERRqjk1agD16oW7FUqkELmir64dRVGUSqjoK4qiRBGR\nKfpaVllRFMUlkSn6aukriqK4JHJFX0fjKoqiVCJyRV8tfUVRlEpEnugXFQHHj6voK4qiuCDyRF9H\n4yqKorgl8kRf6+4oiqK4JXJFXwO5iqIolYhc0VdLX1EUpRIq+oqiKFFErXA3wHY0kKtEEKdOnUJO\nTg5OnDgR7qYoVYTY2Fi0adMGtWvX9mv/yBN9tfSVCCInJwennXYa2rdvD9KqsVEPM2P//v3IyclB\nhw4d/DpGZLp3atYE6tcPd0sUJWBOnDiBZs2aqeArAAAiQrNmzQJ68otM0dda+koEoYKvmAn0+2BJ\n9IloCBFtIaLtRPSIi/XtiCidiDYQ0fdE1MaxvAcR/UxEvznWjQqotVbQEgyKoihu8Sr6RFQTwJsA\nLgWQBGA0ESU5bfYygPK7G2EAABD7SURBVDnM3B3AZADPO5YXAhjLzF0ADAEwlYiCq8haVllRbGP/\n/v3o0aMHevTogRYtWqB169Zln4uKiiwdY/z48diyZYvHbd58803MnTvXjiYrXrASyO0FYDsz7wAA\nIpoPYBiAzaZtkgDc73i/HMAnAMDMW40NmDmXiP4CEAfgUOBNd4Na+opiG82aNcP69esBAE899RQa\nNGiABx98sMI2zAxmRo0arm3IWbNmeT3PHXfcEXhjQ0xxcTFq1ap+uTBW3DutAewyfc5xLDOTAWC4\n4/1VAE4jombmDYioF4AYAL87n4CIJhHRaiJavXfvXqttd42WVVYilXvvBQYMsPfv3nv9asr27duR\nlJSEMWPGoEuXLsjLy8OkSZOQlpaGLl26YPLkyWXbnn/++Vi/fj2Ki4vRuHFjPPLII0hOTsa5556L\nv/76CwDw2GOPYerUqWXbP/LII+jVqxfOPvts/PTTTwCAY8eO4eqrr0ZSUhJGjBiBtLS0shuSmSef\nfBI9e/ZE165dceutt4KZAQBbt27FhRdeiOTkZKSkpGDnzp0AgOeeew7dunVDcnIyHn300QptBoA9\ne/bgzDPPBADMmDEDf/vb3zBw4EBccsklOHz4MC688EKkpKSge/fu+Oyzz8raMWvWLHTv3h3JyckY\nP348CgoK0LFjRxQXFwMADh48WOFzqLArkPsggP5EtA5AfwC7AZQYK4moJYD3AYxn5lLnnZl5GjOn\nMXNaXFxcYC1RS19RQkJWVhbuu+8+bN68Ga1bt8YLL7yA1atXIyMjA8uWLcPmzZsr7VNQUID+/fsj\nIyMD5557LmbOnOny2MyMX3/9FS+99FLZDeSNN95AixYtsHnzZjz++ONYt26dy33vuecerFq1Chs3\nbkRBQQG++uorAMDo0aNx3333ISMjAz/99BPi4+OxdOlSfPnll/j111+RkZGBBx54wOt1r1u3Dh9/\n/DHS09NRt25dfPLJJ1i7di2+/fZb3HfffQCAjIwMvPjii/j++++RkZGBV155BY0aNcJ5551X1p55\n8+bhmmuuCfnTgpWz7QZwuulzG8eyMpg5Fw5Ln4gaALiamQ85PjcE8DmAR5l5pR2N9oiKvhKpOCzh\nqsIZZ5yBtLS0ss/z5s3Du+++i+LiYuTm5mLz5s1ISqoY/qtbty4uvfRSAEBqaip+/PFHl8cePnx4\n2TaGRb5ixQr8/e9/BwAkJyejS5cuLvdNT0/HSy+9hBMnTmDfvn1ITU1Fnz59sG/fPgwdOhSADHAC\ngG+//RY33XQT6tatCwBo2rSp1+u++OKL0aRJEwByc3rkkUewYsUK1KhRA7t27cK+ffvw3XffYdSo\nUWXHM14nTJiA119/HVdccQVmzZqF999/3+v57MaKpb8KQCci6kBEMQCuBbDEvAERNSci41j/ADDT\nsTwGwGJIkHeRfc12Q3ExcPSoir6ihID6prEw27Ztw2uvvYbvvvsOGzZswJAhQ1zmksfExJS9r1mz\nplvXRp06dbxu44rCwkLceeedWLx4MTZs2ICbbrrJr5z2WrVqobRUnBLO+5uve86cOSgoKMDatWux\nfv16NG/e3OP5+vfvj61bt2L58uWoXbs2EhMTfW5boHgVfWYuBnAngK8BZAJYyMy/EdFkIrrSsdkA\nAFuIaCuABADPOpaPBHABgHFEtN7x18Puiyjj8GF5VdFXlJBy+PBhnHbaaWjYsCHy8vLw9ddf236O\n8847DwsXLgQAbNy40aX76Pjx46hRowaaN2+OI0eO4KOPPgIANGnSBHFxcVi6dCkAEfLCwkIMHjwY\nM2fOxPHjxwEABw4cAAC0b98ea9asAQAsWuTeXi0oKEB8fDxq1aqFZcuWYfducYJceOGFWLBgQdnx\njFcAuP766zFmzBiMHz8+oP7wF0vOJGb+AsAXTsueML1fBKBSzzDzBwA+CLCN1tGyyooSFlJSUpCU\nlITExES0a9cO5513nu3nuOuuuzB27FgkJSWV/TVy+q03a9YMN954I5KSktCyZUv07t27bN3cuXNx\nyy234NFHH0VMTAw++ugjXHHFFcjIyEBaWhpq166NoUOHYsqUKXjooYcwatQovP3222XuKFfccMMN\nGDp0KLp164ZevXqhU6dOAMT99PDDD+OCCy5ArVq1kJqainfffRcAMGbMGEyePBmjRgV/2JIryIhs\nVxXS0tJ49erV/u28di2Qmgp88gkwbJi9DVOUMJCZmYnOnTuHuxlVguLiYhQXFyM2Nhbbtm3DxRdf\njG3btlW7tMn58+fj66+/tpTK6g5X3wsiWsPMaW52KaN69ZY3tNiaokQsR48exaBBg1BcXAxmxjvv\nvFPtBP+2227Dt99+W5bBEw6qV495Q8sqK0rE0rhx4zI/e3Xl7bffDncTIqzgmlr6iqIoHolM0ddA\nrqIoiksiT/SJgIYNw90SRVGUKknkiX7DhoCbwk+KoijRTmSpo5ZVVhRbGThwYKWBVlOnTsVtt93m\ncb8GDRoAAHJzczFixAiX2wwYMADe0rOnTp2KwsLCss+XXXYZDh0KXpHeaCCyRF8rbCqKrYwePRrz\n58+vsGz+/PkYPXq0pf1btWrlcUSrN5xF/4svvkDjamTYMXNZOYeqQuSJfjX6QiiKL4SjsvKIESPw\n+eefl02YsnPnTuTm5qJfv35lefMpKSno1q0bPv3000r779y5E127dgUgJRKuvfZadO7cGVdddVVZ\n6QNA8teNssxPPvkkAOD1119Hbm4uBg4ciIEDBwKQ8gj79u0DALz66qvo2rUrunbtWlaWeefOnejc\nuTMmTpyILl264OKLL65wHoOlS5eid+/eOOecc3DRRRchPz8fgIwFGD9+PLp164bu3buXlXH46quv\nkJKSguTkZAwaNAiAzC/w8ssvlx2za9eu2LlzJ3bu3Imzzz4bY8eORdeuXbFr1y6X1wcAq1atQt++\nfZGcnIxevXrhyJEjuOCCCyqUjD7//PORkZHh+R/lA5GVp3/oENCuXbhboSgRQ9OmTdGrVy98+eWX\nGDZsGObPn4+RI0eCiBAbG4vFixejYcOG2LdvH/r06YMrr7zS7Ryub7/9NurVq4fMzExs2LABKSkp\nZeueffZZNG3aFCUlJRg0aBA2bNiAu+++G6+++iqWL1+O5s2bVzjWmjVrMGvWLPzyyy9gZvTu3Rv9\n+/dHkyZNsG3bNsybNw/Tp0/HyJEj8dFHH+H666+vsP/555+PlStXgogwY8YM/Otf/8Irr7yCKVOm\noFGjRti4cSMAqXm/d+9eTJw4ET/88AM6dOhQoY6OO7Zt24b33nsPffr0cXt9iYmJGDVqFBYsWICe\nPXvi8OHDqFu3Lm6++WbMnj0bU6dOxdatW3HixAkkJyf79H/zROSJvo2doyhViXBVVjZcPIboGzVk\nmBn//Oc/8cMPP6BGjRrYvXs38vPz0aJFC5fH+eGHH3D33XcDALp3747u3buXrVu4cCGmTZuG4uJi\n5OXlYfPmzRXWO7NixQpcddVVZRUvhw8fjh9//BFXXnklOnTogB49pK6juTSzmZycHIwaNQp5eXko\nKipChw4dAEipZbM7q0mTJli6dCkuuOCCsm2slF9u165dmeC7uz4iQsuWLdGzZ08AQENH1uE111yD\nKVOm4KWXXsLMmTMxbtw4r+fzhchy72ggV1FsZ9iwYUhPT8fatWtRWFiI1NRUAFLAbO/evVizZg3W\nr1+PhIQEv8oYZ2dn4+WXX0Z6ejo2bNiAyy+/3K/jGBhlmQH3pZnvuusu3Hnnndi4cSPeeeedgMsv\nAxVLMJvLL/t6ffXq1cPgwYPx6aefYuHChRgzZozPbfNE5Ih+aamIvgZyFcVWGjRogIEDB+Kmm26q\nEMA1ygrXrl0by5cvxx9//OHxOBdccAE+/PBDAMCmTZuwYcMGAFKWuX79+mjUqBHy8/Px5Zdflu1z\n2mmn4ciRI5WO1a9fP3zyyScoLCzEsWPHsHjxYvTr18/yNRUUFKB1a5n19b333itbPnjwYLz55ptl\nnw8ePIg+ffrghx9+QHZ2NoCK5ZfXrl0LAFi7dm3ZemfcXd/ZZ5+NvLw8rFq1CgBw5MiRshvUhAkT\ncPfdd6Nnz55lE7bYReSI/pEjALNa+ooSBEaPHo2MjIwKoj9mzBisXr0a3bp1w5w5c7xOCHLbbbfh\n6NGj6Ny5M5544omyJ4bk5GScc845SExMxHXXXVehLPOkSZMwZMiQskCuQUpKCsaNG4devXqhd+/e\nmDBhAs455xzL1/PUU0/hmmuuQWpqaoV4wWOPPYaDBw+ia9euSE5OxvLlyxEXF4dp06Zh+PDhSE5O\nLiuJfPXVV+PAgQPo0qUL/vd//xdnnXWWy3O5u76YmBgsWLAAd911F5KTkzF48OCyJ4DU1FQ0bNgw\nKDX3I6e08oEDwO23A+PHA5dcYn/DFCUMaGnl6CQ3NxcDBgxAVlYWargYbBpIaeXIsfSbNgXmz1fB\nVxSlWjNnzhz07t0bzz77rEvBD5TIyt5RFEWp5owdOxZjx44N2vEjx9JXlAilqrlglfAS6PdBRV9R\nqjCxsbHYv3+/Cr8CQAR///79iI2N9fsY6t5RlCpMmzZtkJOTg71794a7KUoVITY2Fm3atPF7fxV9\nRanC1K5du2wkqKLYgbp3FEVRoggVfUVRlChCRV9RFCWKqHIjcoloLwDPRTw80xzAPpuaU93RvqiI\n9kdFtD/KiYS+aMfMcd42qnKiHyhEtNrKUORoQPuiItofFdH+KCea+kLdO4qiKFGEir6iKEoUEYmi\nPy3cDahCaF9URPujItof5URNX0ScT19RFEVxTyRa+oqiKIobVPQVRVGiiIgRfSIaQkRbiGg7ET0S\n7vaEGiKaSUR/EdEm07KmRLSMiLY5Xu2dbLOKQkSnE9FyItpMRL8R0T2O5dHaH7FE9CsRZTj642nH\n8g5E9IvjN7OAiGLC3dZQQUQ1iWgdEX3m+Bw1fRERok9ENQG8CeBSAEkARhNRUnhbFXJmAxjitOwR\nAOnM3AlAuuNzNFAM4AFmTgLQB8Adju9DtPbHSQAXMnMygB4AhhBRHwAvAvgfZj4TwEEAN4exjaHm\nHgCZps9R0xcRIfoAegHYzsw7mLkIwHwAw8LcppDCzD8AOOC0eBiA9xzv3wPwt5A2Kkwwcx4zr3W8\nPwL5cbdG9PYHM/NRx8fajj8GcCGARY7lUdMfRNQGwOUAZjg+E6KoLyJF9FsD2GX6nONYFu0kMHOe\n4/0eAAnhbEw4IKL2AM4B8AuiuD8c7oz1AP4CsAzA7wAOMXOxY5No+s1MBfAwgFLH52aIor6IFNFX\nvMCSmxtV+blE1ADARwDuZebD5nXR1h/MXMLMPQC0gTwZJ4a5SWGBiK4A8Bczrwl3W8JFpEyishvA\n6abPbRzLop18ImrJzHlE1BJi5UUFRFQbIvhzmfljx+Ko7Q8DZj5ERMsBnAugMRHVcli40fKbOQ/A\nlUR0GYBYAA0BvIYo6otIsfRXAejkiMDHALgWwJIwt6kqsATAjY73NwL4NIxtCRkOH+27ADKZ+VXT\nqmjtjzgiaux4XxfAYEicYzmAEY7NoqI/mPkfzNyGmdtDdOI7Zh6DKOqLiBmR67hzTwVQE8BMZn42\nzE0KKUQ0D8AASInYfABPAvgEwEIAbSHlqkcys3OwN+IgovMB/AhgI8r9tv+E+PWjsT+6Q4KTNSGG\n3kJmnkxEHSFJD00BrANwPTOfDF9LQwsRDQDwIDNfEU19ETGiryiKongnUtw7iqIoigVU9BVFUaII\nFX1FUZQoQkVfURQlilDRVxRFiSJU9BVFUaIIFX1FUZQo4v8BkxrzJmrL9XkAAAAASUVORK5CYII=\n",
            "text/plain": [
              "<Figure size 432x288 with 1 Axes>"
            ]
          },
          "metadata": {
            "tags": []
          }
        },
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "<Figure size 432x288 with 0 Axes>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "jkSn6CrmCJvl",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        ""
      ],
      "execution_count": 0,
      "outputs": []
    }
  ]
}