{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "executionInfo": {
     "elapsed": 11956,
     "status": "ok",
     "timestamp": 1663734664762,
     "user": {
      "displayName": "wang gaoyu",
      "userId": "01287641996976751749"
     },
     "user_tz": -480
    },
    "id": "ZQ6M_mFTW_G9",
    "outputId": "089652ea-4167-4ebb-b2a7-2d1cf581548a"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n"
     ]
    }
   ],
   "source": [
    "from google.colab import drive\n",
    "drive.mount('/content/drive')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "executionInfo": {
     "elapsed": 403,
     "status": "ok",
     "timestamp": 1663734667417,
     "user": {
      "displayName": "wang gaoyu",
      "userId": "01287641996976751749"
     },
     "user_tz": -480
    },
    "id": "hR-PtdChpRcK",
    "outputId": "eb030df7-07c9-4e5c-cc48-80f208fbdd96"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "/content/drive/MyDrive/cNN-Based-Hyperspectral-Image-Classification-master\n"
     ]
    }
   ],
   "source": [
    "%cd /content/drive/MyDrive/cNN-Based-Hyperspectral-Image-Classification-master"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "executionInfo": {
     "elapsed": 543,
     "status": "ok",
     "timestamp": 1663490447389,
     "user": {
      "displayName": "wang gaoyu",
      "userId": "01287641996976751749"
     },
     "user_tz": -480
    },
    "id": "4mSzs1nZeax8",
    "outputId": "0070de17-8320-4467-9241-252f4bdeaf43"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Colab only includes TensorFlow 2.x; %tensorflow_version has no effect.\n"
     ]
    }
   ],
   "source": [
    "%tensorflow_version 2.x"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "executionInfo": {
     "elapsed": 3592,
     "status": "ok",
     "timestamp": 1663734674059,
     "user": {
      "displayName": "wang gaoyu",
      "userId": "01287641996976751749"
     },
     "user_tz": -480
    },
    "id": "q99oZ_fMNRJk",
    "outputId": "a73764c5-68f0-4a4e-ea66-b16c72bd7dbb"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
      "Collecting spectral\n",
      "  Downloading spectral-0.23-py3-none-any.whl (212 kB)\n",
      "\u001b[K     |████████████████████████████████| 212 kB 8.2 MB/s \n",
      "\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from spectral) (1.21.6)\n",
      "Installing collected packages: spectral\n",
      "Successfully installed spectral-0.23\n"
     ]
    }
   ],
   "source": [
    "!pip install spectral"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "executionInfo": {
     "elapsed": 7873,
     "status": "ok",
     "timestamp": 1663734681927,
     "user": {
      "displayName": "wang gaoyu",
      "userId": "01287641996976751749"
     },
     "user_tz": -480
    },
    "id": "-UhNFlMolVCL"
   },
   "outputs": [],
   "source": [
    "import keras\n",
    "from keras.layers import Conv2D, Conv3D, Flatten, Dense, Reshape, BatchNormalization, Activation, GlobalAveragePooling2D, Multiply\n",
    "from keras.layers import Dropout, Input\n",
    "from keras.models import Model\n",
    "from keras.optimizers import adam_v2\n",
    "from keras.callbacks import ModelCheckpoint\n",
    "from keras.utils import np_utils\n",
    "from keras.activations import sigmoid\n",
    "from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn.metrics import confusion_matrix, accuracy_score, classification_report, cohen_kappa_score\n",
    "import time\n",
    "\n",
    "from operator import truediv\n",
    "#import tensorflow as tf\n",
    "#from plotly.offline import init_notebook_mode\n",
    "from cbam import spatial_attention3D, spatial_attention2D, channel_attention2D, se_block2D, cc\n",
    "from helper import *\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import scipy.io as sio\n",
    "import os\n",
    "import spectral\n",
    "\n",
    "#init_notebook_mode(connected=True)\n",
    "#%matplotlib inline\n",
    "dataset = 'IP' #备选项：WHU_Hi_HanChuan, WHU_Hi_LongKou, WHU_Hi_HongHu,Indian_pines 修改项：数据集、路径、output_units\n",
    "def loadTestData(name):\n",
    "    data_path = os.path.join(os.getcwd(),'data')\n",
    "    data = sio.loadmat(os.path.join(data_path, 'Indian_pines_Test_patch_17.mat'))[\"test_patch\"]\n",
    "    labels = sio.loadmat(os.path.join(data_path, 'Indian_pines_Test_patch_17.mat'))[\"test_labels\"]\n",
    "    \n",
    "    return data, labels\n",
    "X, y = loadTestData(dataset)\n",
    "\n",
    "X.shape, y.shape\n",
    "\n",
    "\n",
    "def loadTrainData(name):\n",
    "    data_path = os.path.join(os.getcwd(),'data')\n",
    "    data = sio.loadmat(os.path.join(data_path, 'Indian_pines_Train_patch_17.mat'))[\"train_patch\"]\n",
    "    labels = sio.loadmat(os.path.join(data_path, 'Indian_pines_Train_patch_17.mat'))[\"train_labels\"]\n",
    "    \n",
    "    return data, labels\n",
    "\n",
    "def loadValData(name):\n",
    "    data_path = os.path.join(os.getcwd(),'data')\n",
    "    data = sio.loadmat(os.path.join(data_path, 'Indian_pines_Val_patch_17.mat'))[\"val_patch\"]\n",
    "    labels = sio.loadmat(os.path.join(data_path, 'Indian_pines_Val_patch_17.mat'))[\"val_labels\"]\n",
    "    \n",
    "    return data, labels\n",
    "Xtest=X\n",
    "ytest=y\n",
    "Xtrain, ytrain = loadTrainData(dataset)\n",
    "Xval, yval = loadValData(dataset)\n",
    "\n",
    "Xtrain.shape, Xtest.shape, Xval.shape, ytrain.shape, ytest.shape, yval.shape\n",
    "\n",
    "\n",
    "#variance_epsilon = 1e-5\n",
    "K = Xtrain.shape[3]\n",
    "S = Xtrain.shape[2]\n",
    "Xtrain = Xtrain.reshape(-1, S, S, K, 1)\n",
    "\n",
    "L = K\n",
    "output_units = 16 #分类数：LK-9 HC-16 HH-22 IP-16\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "executionInfo": {
     "elapsed": 165030,
     "status": "ok",
     "timestamp": 1663735017013,
     "user": {
      "displayName": "wang gaoyu",
      "userId": "01287641996976751749"
     },
     "user_tz": -480
    },
    "id": "QdutsiZblxXz",
    "outputId": "c07a6135-03c3-4130-b858-b4f21344897b"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model: \"model_1\"\n",
      "__________________________________________________________________________________________________\n",
      " Layer (type)                   Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      " input_2 (InputLayer)           [(None, 17, 17, 16,  0           []                               \n",
      "                                 1)]                                                              \n",
      "                                                                                                  \n",
      " conv3d_9 (Conv3D)              (None, 17, 17, 16,   2           ['input_2[0][0]']                \n",
      "                                1)                                                                \n",
      "                                                                                                  \n",
      " conv3d_10 (Conv3D)             (None, 17, 17, 16,   28          ['input_2[0][0]']                \n",
      "                                1)                                                                \n",
      "                                                                                                  \n",
      " conv3d_11 (Conv3D)             (None, 17, 17, 16,   126         ['input_2[0][0]']                \n",
      "                                1)                                                                \n",
      "                                                                                                  \n",
      " activation_10 (Activation)     (None, 17, 17, 16,   0           ['conv3d_9[0][0]']               \n",
      "                                1)                                                                \n",
      "                                                                                                  \n",
      " activation_11 (Activation)     (None, 17, 17, 16,   0           ['conv3d_10[0][0]']              \n",
      "                                1)                                                                \n",
      "                                                                                                  \n",
      " activation_12 (Activation)     (None, 17, 17, 16,   0           ['conv3d_11[0][0]']              \n",
      "                                1)                                                                \n",
      "                                                                                                  \n",
      " tf.concat_1 (TFOpLambda)       (None, 17, 17, 48,   0           ['activation_10[0][0]',          \n",
      "                                1)                                'activation_11[0][0]',          \n",
      "                                                                  'activation_12[0][0]']          \n",
      "                                                                                                  \n",
      " conv3d_12 (Conv3D)             (None, 15, 15, 46,   224         ['tf.concat_1[0][0]']            \n",
      "                                8)                                                                \n",
      "                                                                                                  \n",
      " activation_13 (Activation)     (None, 15, 15, 46,   0           ['conv3d_12[0][0]']              \n",
      "                                8)                                                                \n",
      "                                                                                                  \n",
      " lambda_6 (Lambda)              (None, 15, 15, 46,   0           ['activation_13[0][0]']          \n",
      "                                1)                                                                \n",
      "                                                                                                  \n",
      " lambda_7 (Lambda)              (None, 15, 15, 46,   0           ['activation_13[0][0]']          \n",
      "                                1)                                                                \n",
      "                                                                                                  \n",
      " concatenate_3 (Concatenate)    (None, 15, 15, 46,   0           ['lambda_6[0][0]',               \n",
      "                                2)                                'lambda_7[0][0]']               \n",
      "                                                                                                  \n",
      " conv3d_13 (Conv3D)             (None, 15, 15, 46,   54          ['concatenate_3[0][0]']          \n",
      "                                1)                                                                \n",
      "                                                                                                  \n",
      " multiply_5 (Multiply)          (None, 15, 15, 46,   0           ['activation_13[0][0]',          \n",
      "                                8)                                'conv3d_13[0][0]']              \n",
      "                                                                                                  \n",
      " conv3d_14 (Conv3D)             (None, 13, 13, 44,   3472        ['multiply_5[0][0]']             \n",
      "                                16)                                                               \n",
      "                                                                                                  \n",
      " activation_14 (Activation)     (None, 13, 13, 44,   0           ['conv3d_14[0][0]']              \n",
      "                                16)                                                               \n",
      "                                                                                                  \n",
      " lambda_8 (Lambda)              (None, 13, 13, 44,   0           ['activation_14[0][0]']          \n",
      "                                1)                                                                \n",
      "                                                                                                  \n",
      " lambda_9 (Lambda)              (None, 13, 13, 44,   0           ['activation_14[0][0]']          \n",
      "                                1)                                                                \n",
      "                                                                                                  \n",
      " concatenate_4 (Concatenate)    (None, 13, 13, 44,   0           ['lambda_8[0][0]',               \n",
      "                                2)                                'lambda_9[0][0]']               \n",
      "                                                                                                  \n",
      " conv3d_15 (Conv3D)             (None, 13, 13, 44,   54          ['concatenate_4[0][0]']          \n",
      "                                1)                                                                \n",
      "                                                                                                  \n",
      " multiply_6 (Multiply)          (None, 13, 13, 44,   0           ['activation_14[0][0]',          \n",
      "                                16)                               'conv3d_15[0][0]']              \n",
      "                                                                                                  \n",
      " conv3d_16 (Conv3D)             (None, 11, 11, 42,   13856       ['multiply_6[0][0]']             \n",
      "                                32)                                                               \n",
      "                                                                                                  \n",
      " activation_15 (Activation)     (None, 11, 11, 42,   0           ['conv3d_16[0][0]']              \n",
      "                                32)                                                               \n",
      "                                                                                                  \n",
      " lambda_10 (Lambda)             (None, 11, 11, 42,   0           ['activation_15[0][0]']          \n",
      "                                1)                                                                \n",
      "                                                                                                  \n",
      " lambda_11 (Lambda)             (None, 11, 11, 42,   0           ['activation_15[0][0]']          \n",
      "                                1)                                                                \n",
      "                                                                                                  \n",
      " concatenate_5 (Concatenate)    (None, 11, 11, 42,   0           ['lambda_10[0][0]',              \n",
      "                                2)                                'lambda_11[0][0]']              \n",
      "                                                                                                  \n",
      " conv3d_17 (Conv3D)             (None, 11, 11, 42,   54          ['concatenate_5[0][0]']          \n",
      "                                1)                                                                \n",
      "                                                                                                  \n",
      " multiply_7 (Multiply)          (None, 11, 11, 42,   0           ['activation_15[0][0]',          \n",
      "                                32)                               'conv3d_17[0][0]']              \n",
      "                                                                                                  \n",
      " reshape_5 (Reshape)            (None, 11, 11, 1344  0           ['multiply_7[0][0]']             \n",
      "                                )                                                                 \n",
      "                                                                                                  \n",
      " conv2d_2 (Conv2D)              (None, 9, 9, 128)    1548416     ['reshape_5[0][0]']              \n",
      "                                                                                                  \n",
      " batch_normalization_2 (BatchNo  (None, 9, 9, 128)   512         ['conv2d_2[0][0]']               \n",
      " rmalization)                                                                                     \n",
      "                                                                                                  \n",
      " activation_16 (Activation)     (None, 9, 9, 128)    0           ['batch_normalization_2[0][0]']  \n",
      "                                                                                                  \n",
      " global_average_pooling2d_2 (Gl  (None, 128)         0           ['activation_16[0][0]']          \n",
      " obalAveragePooling2D)                                                                            \n",
      "                                                                                                  \n",
      " global_max_pooling2d_2 (Global  (None, 128)         0           ['activation_16[0][0]']          \n",
      " MaxPooling2D)                                                                                    \n",
      "                                                                                                  \n",
      " reshape_6 (Reshape)            (None, 1, 1, 128)    0           ['global_average_pooling2d_2[0][0\n",
      "                                                                 ]']                              \n",
      "                                                                                                  \n",
      " reshape_7 (Reshape)            (None, 1, 1, 128)    0           ['global_max_pooling2d_2[0][0]'] \n",
      "                                                                                                  \n",
      " dense_7 (Dense)                (None, 1, 1, 16)     2064        ['reshape_6[0][0]',              \n",
      "                                                                  'reshape_7[0][0]']              \n",
      "                                                                                                  \n",
      " dense_8 (Dense)                (None, 1, 1, 128)    2176        ['dense_7[0][0]',                \n",
      "                                                                  'dense_7[1][0]']                \n",
      "                                                                                                  \n",
      " add_2 (Add)                    (None, 1, 1, 128)    0           ['dense_8[0][0]',                \n",
      "                                                                  'dense_8[1][0]']                \n",
      "                                                                                                  \n",
      " activation_17 (Activation)     (None, 1, 1, 128)    0           ['add_2[0][0]']                  \n",
      "                                                                                                  \n",
      " multiply_8 (Multiply)          (None, 9, 9, 128)    0           ['activation_16[0][0]',          \n",
      "                                                                  'activation_17[0][0]']          \n",
      "                                                                                                  \n",
      " conv2d_3 (Conv2D)              (None, 7, 7, 256)    295168      ['multiply_8[0][0]']             \n",
      "                                                                                                  \n",
      " batch_normalization_3 (BatchNo  (None, 7, 7, 256)   1024        ['conv2d_3[0][0]']               \n",
      " rmalization)                                                                                     \n",
      "                                                                                                  \n",
      " activation_18 (Activation)     (None, 7, 7, 256)    0           ['batch_normalization_3[0][0]']  \n",
      "                                                                                                  \n",
      " global_average_pooling2d_3 (Gl  (None, 256)         0           ['activation_18[0][0]']          \n",
      " obalAveragePooling2D)                                                                            \n",
      "                                                                                                  \n",
      " global_max_pooling2d_3 (Global  (None, 256)         0           ['activation_18[0][0]']          \n",
      " MaxPooling2D)                                                                                    \n",
      "                                                                                                  \n",
      " reshape_8 (Reshape)            (None, 1, 1, 256)    0           ['global_average_pooling2d_3[0][0\n",
      "                                                                 ]']                              \n",
      "                                                                                                  \n",
      " reshape_9 (Reshape)            (None, 1, 1, 256)    0           ['global_max_pooling2d_3[0][0]'] \n",
      "                                                                                                  \n",
      " dense_9 (Dense)                (None, 1, 1, 32)     8224        ['reshape_8[0][0]',              \n",
      "                                                                  'reshape_9[0][0]']              \n",
      "                                                                                                  \n",
      " dense_10 (Dense)               (None, 1, 1, 256)    8448        ['dense_9[0][0]',                \n",
      "                                                                  'dense_9[1][0]']                \n",
      "                                                                                                  \n",
      " add_3 (Add)                    (None, 1, 1, 256)    0           ['dense_10[0][0]',               \n",
      "                                                                  'dense_10[1][0]']               \n",
      "                                                                                                  \n",
      " activation_19 (Activation)     (None, 1, 1, 256)    0           ['add_3[0][0]']                  \n",
      "                                                                                                  \n",
      " multiply_9 (Multiply)          (None, 7, 7, 256)    0           ['activation_18[0][0]',          \n",
      "                                                                  'activation_19[0][0]']          \n",
      "                                                                                                  \n",
      " flatten_1 (Flatten)            (None, 12544)        0           ['multiply_9[0][0]']             \n",
      "                                                                                                  \n",
      " dense_11 (Dense)               (None, 128)          1605760     ['flatten_1[0][0]']              \n",
      "                                                                                                  \n",
      " dropout_2 (Dropout)            (None, 128)          0           ['dense_11[0][0]']               \n",
      "                                                                                                  \n",
      " dense_12 (Dense)               (None, 64)           8256        ['dropout_2[0][0]']              \n",
      "                                                                                                  \n",
      " dropout_3 (Dropout)            (None, 64)           0           ['dense_12[0][0]']               \n",
      "                                                                                                  \n",
      " dense_13 (Dense)               (None, 16)           1040        ['dropout_3[0][0]']              \n",
      "                                                                                                  \n",
      "==================================================================================================\n",
      "Total params: 3,498,958\n",
      "Trainable params: 3,498,190\n",
      "Non-trainable params: 768\n",
      "__________________________________________________________________________________________________\n",
      "Epoch 1/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 2.6677 - accuracy: 0.1807\n",
      "Epoch 1: accuracy improved from -inf to 0.18344, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 4s 260ms/step - loss: 2.6599 - accuracy: 0.1834 - val_loss: 2.7703 - val_accuracy: 0.1412\n",
      "Epoch 2/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 2.2467 - accuracy: 0.3213\n",
      "Epoch 2: accuracy improved from 0.18344 to 0.32549, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 140ms/step - loss: 2.2320 - accuracy: 0.3255 - val_loss: 2.7667 - val_accuracy: 0.2392\n",
      "Epoch 3/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 1.6997 - accuracy: 0.4746\n",
      "Epoch 3: accuracy improved from 0.32549 to 0.47695, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 142ms/step - loss: 1.6878 - accuracy: 0.4770 - val_loss: 2.7625 - val_accuracy: 0.2693\n",
      "Epoch 4/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 1.2463 - accuracy: 0.6270\n",
      "Epoch 4: accuracy improved from 0.47695 to 0.63029, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 141ms/step - loss: 1.2390 - accuracy: 0.6303 - val_loss: 2.7594 - val_accuracy: 0.2222\n",
      "Epoch 5/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.9804 - accuracy: 0.7021\n",
      "Epoch 5: accuracy improved from 0.63029 to 0.70273, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 141ms/step - loss: 0.9803 - accuracy: 0.7027 - val_loss: 2.7565 - val_accuracy: 0.1940\n",
      "Epoch 6/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.7595 - accuracy: 0.7637\n",
      "Epoch 6: accuracy improved from 0.70273 to 0.76294, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 142ms/step - loss: 0.7604 - accuracy: 0.7629 - val_loss: 2.7547 - val_accuracy: 0.1450\n",
      "Epoch 7/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.5902 - accuracy: 0.8242\n",
      "Epoch 7: accuracy improved from 0.76294 to 0.82502, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 141ms/step - loss: 0.5856 - accuracy: 0.8250 - val_loss: 2.7534 - val_accuracy: 0.1337\n",
      "Epoch 8/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.4797 - accuracy: 0.8555\n",
      "Epoch 8: accuracy improved from 0.82502 to 0.85701, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 141ms/step - loss: 0.4792 - accuracy: 0.8570 - val_loss: 2.7495 - val_accuracy: 0.0942\n",
      "Epoch 9/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.4134 - accuracy: 0.8770\n",
      "Epoch 9: accuracy improved from 0.85701 to 0.87770, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 142ms/step - loss: 0.4090 - accuracy: 0.8777 - val_loss: 2.7434 - val_accuracy: 0.0942\n",
      "Epoch 10/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.3147 - accuracy: 0.9072\n",
      "Epoch 10: accuracy improved from 0.87770 to 0.90593, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 156ms/step - loss: 0.3174 - accuracy: 0.9059 - val_loss: 2.7376 - val_accuracy: 0.0979\n",
      "Epoch 11/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.2824 - accuracy: 0.9219\n",
      "Epoch 11: accuracy improved from 0.90593 to 0.92098, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 141ms/step - loss: 0.2860 - accuracy: 0.9210 - val_loss: 2.7330 - val_accuracy: 0.0942\n",
      "Epoch 12/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.2715 - accuracy: 0.9141\n",
      "Epoch 12: accuracy did not improve from 0.92098\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.2719 - accuracy: 0.9135 - val_loss: 2.7286 - val_accuracy: 0.0942\n",
      "Epoch 13/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.2352 - accuracy: 0.9277\n",
      "Epoch 13: accuracy improved from 0.92098 to 0.92850, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 140ms/step - loss: 0.2366 - accuracy: 0.9285 - val_loss: 2.7258 - val_accuracy: 0.0942\n",
      "Epoch 14/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.2377 - accuracy: 0.9238\n",
      "Epoch 14: accuracy did not improve from 0.92850\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.2320 - accuracy: 0.9266 - val_loss: 2.7211 - val_accuracy: 0.0942\n",
      "Epoch 15/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.1851 - accuracy: 0.9453\n",
      "Epoch 15: accuracy improved from 0.92850 to 0.94450, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 141ms/step - loss: 0.1850 - accuracy: 0.9445 - val_loss: 2.7140 - val_accuracy: 0.0942\n",
      "Epoch 16/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.1964 - accuracy: 0.9443\n",
      "Epoch 16: accuracy improved from 0.94450 to 0.94544, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 141ms/step - loss: 0.1929 - accuracy: 0.9454 - val_loss: 2.7054 - val_accuracy: 0.0942\n",
      "Epoch 17/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.1620 - accuracy: 0.9492\n",
      "Epoch 17: accuracy improved from 0.94544 to 0.95108, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 139ms/step - loss: 0.1575 - accuracy: 0.9511 - val_loss: 2.6923 - val_accuracy: 0.0942\n",
      "Epoch 18/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.1343 - accuracy: 0.9561\n",
      "Epoch 18: accuracy improved from 0.95108 to 0.95484, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 140ms/step - loss: 0.1367 - accuracy: 0.9548 - val_loss: 2.6784 - val_accuracy: 0.0942\n",
      "Epoch 19/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.1353 - accuracy: 0.9580\n",
      "Epoch 19: accuracy improved from 0.95484 to 0.95767, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 141ms/step - loss: 0.1375 - accuracy: 0.9577 - val_loss: 2.6711 - val_accuracy: 0.0942\n",
      "Epoch 20/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.1464 - accuracy: 0.9570\n",
      "Epoch 20: accuracy did not improve from 0.95767\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.1494 - accuracy: 0.9558 - val_loss: 2.6580 - val_accuracy: 0.0942\n",
      "Epoch 21/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.1294 - accuracy: 0.9580\n",
      "Epoch 21: accuracy did not improve from 0.95767\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.1290 - accuracy: 0.9577 - val_loss: 2.6422 - val_accuracy: 0.0942\n",
      "Epoch 22/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.1177 - accuracy: 0.9619\n",
      "Epoch 22: accuracy improved from 0.95767 to 0.96143, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 142ms/step - loss: 0.1197 - accuracy: 0.9614 - val_loss: 2.6207 - val_accuracy: 0.0942\n",
      "Epoch 23/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.1050 - accuracy: 0.9717\n",
      "Epoch 23: accuracy improved from 0.96143 to 0.97084, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 140ms/step - loss: 0.1044 - accuracy: 0.9708 - val_loss: 2.6026 - val_accuracy: 0.0942\n",
      "Epoch 24/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0978 - accuracy: 0.9707\n",
      "Epoch 24: accuracy did not improve from 0.97084\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.1011 - accuracy: 0.9699 - val_loss: 2.5824 - val_accuracy: 0.0942\n",
      "Epoch 25/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.1215 - accuracy: 0.9639\n",
      "Epoch 25: accuracy did not improve from 0.97084\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.1304 - accuracy: 0.9605 - val_loss: 2.5615 - val_accuracy: 0.1431\n",
      "Epoch 26/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0900 - accuracy: 0.9746\n",
      "Epoch 26: accuracy improved from 0.97084 to 0.97366, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 140ms/step - loss: 0.0937 - accuracy: 0.9737 - val_loss: 2.5407 - val_accuracy: 0.1657\n",
      "Epoch 27/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0998 - accuracy: 0.9736\n",
      "Epoch 27: accuracy improved from 0.97366 to 0.97460, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 140ms/step - loss: 0.0968 - accuracy: 0.9746 - val_loss: 2.5178 - val_accuracy: 0.1544\n",
      "Epoch 28/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.1095 - accuracy: 0.9668\n",
      "Epoch 28: accuracy did not improve from 0.97460\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.1061 - accuracy: 0.9680 - val_loss: 2.4920 - val_accuracy: 0.1488\n",
      "Epoch 29/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.1186 - accuracy: 0.9658\n",
      "Epoch 29: accuracy did not improve from 0.97460\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.1220 - accuracy: 0.9652 - val_loss: 2.4605 - val_accuracy: 0.1601\n",
      "Epoch 30/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0717 - accuracy: 0.9785\n",
      "Epoch 30: accuracy improved from 0.97460 to 0.97742, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 139ms/step - loss: 0.0741 - accuracy: 0.9774 - val_loss: 2.4303 - val_accuracy: 0.1883\n",
      "Epoch 31/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0839 - accuracy: 0.9717\n",
      "Epoch 31: accuracy did not improve from 0.97742\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0829 - accuracy: 0.9718 - val_loss: 2.4055 - val_accuracy: 0.1996\n",
      "Epoch 32/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0863 - accuracy: 0.9746\n",
      "Epoch 32: accuracy did not improve from 0.97742\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0865 - accuracy: 0.9746 - val_loss: 2.3829 - val_accuracy: 0.2034\n",
      "Epoch 33/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0944 - accuracy: 0.9678\n",
      "Epoch 33: accuracy did not improve from 0.97742\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0932 - accuracy: 0.9680 - val_loss: 2.3510 - val_accuracy: 0.2072\n",
      "Epoch 34/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0491 - accuracy: 0.9863\n",
      "Epoch 34: accuracy improved from 0.97742 to 0.98401, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 140ms/step - loss: 0.0513 - accuracy: 0.9840 - val_loss: 2.3049 - val_accuracy: 0.2373\n",
      "Epoch 35/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0519 - accuracy: 0.9863\n",
      "Epoch 35: accuracy improved from 0.98401 to 0.98589, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 140ms/step - loss: 0.0536 - accuracy: 0.9859 - val_loss: 2.2530 - val_accuracy: 0.2787\n",
      "Epoch 36/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0836 - accuracy: 0.9756\n",
      "Epoch 36: accuracy did not improve from 0.98589\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0869 - accuracy: 0.9737 - val_loss: 2.1842 - val_accuracy: 0.3258\n",
      "Epoch 37/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0749 - accuracy: 0.9775\n",
      "Epoch 37: accuracy did not improve from 0.98589\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0736 - accuracy: 0.9784 - val_loss: 2.1224 - val_accuracy: 0.4030\n",
      "Epoch 38/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0654 - accuracy: 0.9824\n",
      "Epoch 38: accuracy did not improve from 0.98589\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0641 - accuracy: 0.9831 - val_loss: 2.0581 - val_accuracy: 0.4595\n",
      "Epoch 39/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0521 - accuracy: 0.9873\n",
      "Epoch 39: accuracy did not improve from 0.98589\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0524 - accuracy: 0.9859 - val_loss: 1.9858 - val_accuracy: 0.5066\n",
      "Epoch 40/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0555 - accuracy: 0.9834\n",
      "Epoch 40: accuracy did not improve from 0.98589\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0553 - accuracy: 0.9840 - val_loss: 1.9012 - val_accuracy: 0.5254\n",
      "Epoch 41/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0579 - accuracy: 0.9785\n",
      "Epoch 41: accuracy did not improve from 0.98589\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0586 - accuracy: 0.9784 - val_loss: 1.8110 - val_accuracy: 0.5217\n",
      "Epoch 42/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0533 - accuracy: 0.9854\n",
      "Epoch 42: accuracy did not improve from 0.98589\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0522 - accuracy: 0.9859 - val_loss: 1.7592 - val_accuracy: 0.5819\n",
      "Epoch 43/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0630 - accuracy: 0.9814\n",
      "Epoch 43: accuracy did not improve from 0.98589\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0686 - accuracy: 0.9793 - val_loss: 1.6559 - val_accuracy: 0.5951\n",
      "Epoch 44/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0381 - accuracy: 0.9873\n",
      "Epoch 44: accuracy improved from 0.98589 to 0.98683, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 2s 207ms/step - loss: 0.0390 - accuracy: 0.9868 - val_loss: 1.5370 - val_accuracy: 0.6685\n",
      "Epoch 45/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0503 - accuracy: 0.9854\n",
      "Epoch 45: accuracy did not improve from 0.98683\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0488 - accuracy: 0.9859 - val_loss: 1.4033 - val_accuracy: 0.7646\n",
      "Epoch 46/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0450 - accuracy: 0.9893\n",
      "Epoch 46: accuracy improved from 0.98683 to 0.98965, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 139ms/step - loss: 0.0447 - accuracy: 0.9897 - val_loss: 1.2781 - val_accuracy: 0.8098\n",
      "Epoch 47/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0681 - accuracy: 0.9775\n",
      "Epoch 47: accuracy did not improve from 0.98965\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0661 - accuracy: 0.9784 - val_loss: 1.1451 - val_accuracy: 0.8343\n",
      "Epoch 48/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0540 - accuracy: 0.9824\n",
      "Epoch 48: accuracy did not improve from 0.98965\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0582 - accuracy: 0.9802 - val_loss: 1.0097 - val_accuracy: 0.8512\n",
      "Epoch 49/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0518 - accuracy: 0.9863\n",
      "Epoch 49: accuracy did not improve from 0.98965\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0509 - accuracy: 0.9868 - val_loss: 0.9134 - val_accuracy: 0.8701\n",
      "Epoch 50/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0517 - accuracy: 0.9844\n",
      "Epoch 50: accuracy did not improve from 0.98965\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0524 - accuracy: 0.9840 - val_loss: 0.8579 - val_accuracy: 0.8512\n",
      "Epoch 51/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0534 - accuracy: 0.9785\n",
      "Epoch 51: accuracy did not improve from 0.98965\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0528 - accuracy: 0.9784 - val_loss: 0.8420 - val_accuracy: 0.8380\n",
      "Epoch 52/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0454 - accuracy: 0.9863\n",
      "Epoch 52: accuracy did not improve from 0.98965\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0464 - accuracy: 0.9859 - val_loss: 0.7957 - val_accuracy: 0.8493\n",
      "Epoch 53/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0372 - accuracy: 0.9873\n",
      "Epoch 53: accuracy did not improve from 0.98965\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0370 - accuracy: 0.9868 - val_loss: 0.7145 - val_accuracy: 0.8644\n",
      "Epoch 54/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0468 - accuracy: 0.9824\n",
      "Epoch 54: accuracy did not improve from 0.98965\n",
      "9/9 [==============================] - 1s 114ms/step - loss: 0.0493 - accuracy: 0.9821 - val_loss: 0.6312 - val_accuracy: 0.8795\n",
      "Epoch 55/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0483 - accuracy: 0.9873\n",
      "Epoch 55: accuracy did not improve from 0.98965\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0504 - accuracy: 0.9868 - val_loss: 0.6106 - val_accuracy: 0.8493\n",
      "Epoch 56/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0656 - accuracy: 0.9785\n",
      "Epoch 56: accuracy did not improve from 0.98965\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0658 - accuracy: 0.9784 - val_loss: 0.5441 - val_accuracy: 0.8606\n",
      "Epoch 57/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0479 - accuracy: 0.9854\n",
      "Epoch 57: accuracy did not improve from 0.98965\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0466 - accuracy: 0.9859 - val_loss: 0.3851 - val_accuracy: 0.9171\n",
      "Epoch 58/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0378 - accuracy: 0.9912\n",
      "Epoch 58: accuracy improved from 0.98965 to 0.99059, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 2s 198ms/step - loss: 0.0373 - accuracy: 0.9906 - val_loss: 0.2977 - val_accuracy: 0.9379\n",
      "Epoch 59/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0329 - accuracy: 0.9922\n",
      "Epoch 59: accuracy improved from 0.99059 to 0.99247, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 1s 139ms/step - loss: 0.0328 - accuracy: 0.9925 - val_loss: 0.2713 - val_accuracy: 0.9416\n",
      "Epoch 60/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0279 - accuracy: 0.9912\n",
      "Epoch 60: accuracy did not improve from 0.99247\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0291 - accuracy: 0.9906 - val_loss: 0.2368 - val_accuracy: 0.9435\n",
      "Epoch 61/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0349 - accuracy: 0.9873\n",
      "Epoch 61: accuracy did not improve from 0.99247\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0338 - accuracy: 0.9878 - val_loss: 0.1979 - val_accuracy: 0.9567\n",
      "Epoch 62/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0283 - accuracy: 0.9902\n",
      "Epoch 62: accuracy did not improve from 0.99247\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0305 - accuracy: 0.9887 - val_loss: 0.1717 - val_accuracy: 0.9567\n",
      "Epoch 63/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0269 - accuracy: 0.9902\n",
      "Epoch 63: accuracy did not improve from 0.99247\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0282 - accuracy: 0.9897 - val_loss: 0.1508 - val_accuracy: 0.9642\n",
      "Epoch 64/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0303 - accuracy: 0.9893\n",
      "Epoch 64: accuracy did not improve from 0.99247\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0306 - accuracy: 0.9897 - val_loss: 0.1313 - val_accuracy: 0.9680\n",
      "Epoch 65/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0470 - accuracy: 0.9844\n",
      "Epoch 65: accuracy did not improve from 0.99247\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0453 - accuracy: 0.9849 - val_loss: 0.1110 - val_accuracy: 0.9718\n",
      "Epoch 66/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0305 - accuracy: 0.9893\n",
      "Epoch 66: accuracy did not improve from 0.99247\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0298 - accuracy: 0.9897 - val_loss: 0.1003 - val_accuracy: 0.9755\n",
      "Epoch 67/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0542 - accuracy: 0.9863\n",
      "Epoch 67: accuracy did not improve from 0.99247\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0527 - accuracy: 0.9868 - val_loss: 0.0901 - val_accuracy: 0.9793\n",
      "Epoch 68/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0380 - accuracy: 0.9844\n",
      "Epoch 68: accuracy did not improve from 0.99247\n",
      "9/9 [==============================] - 1s 114ms/step - loss: 0.0368 - accuracy: 0.9849 - val_loss: 0.0824 - val_accuracy: 0.9793\n",
      "Epoch 69/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0341 - accuracy: 0.9893\n",
      "Epoch 69: accuracy did not improve from 0.99247\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0354 - accuracy: 0.9887 - val_loss: 0.0796 - val_accuracy: 0.9812\n",
      "Epoch 70/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0216 - accuracy: 0.9941\n",
      "Epoch 70: accuracy improved from 0.99247 to 0.99436, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 2s 200ms/step - loss: 0.0218 - accuracy: 0.9944 - val_loss: 0.0760 - val_accuracy: 0.9812\n",
      "Epoch 71/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0387 - accuracy: 0.9883\n",
      "Epoch 71: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0383 - accuracy: 0.9887 - val_loss: 0.0742 - val_accuracy: 0.9831\n",
      "Epoch 72/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0175 - accuracy: 0.9941\n",
      "Epoch 72: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0175 - accuracy: 0.9944 - val_loss: 0.0714 - val_accuracy: 0.9831\n",
      "Epoch 73/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0242 - accuracy: 0.9932\n",
      "Epoch 73: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0238 - accuracy: 0.9934 - val_loss: 0.0653 - val_accuracy: 0.9831\n",
      "Epoch 74/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0337 - accuracy: 0.9902\n",
      "Epoch 74: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0346 - accuracy: 0.9887 - val_loss: 0.0586 - val_accuracy: 0.9868\n",
      "Epoch 75/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0402 - accuracy: 0.9883\n",
      "Epoch 75: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0402 - accuracy: 0.9878 - val_loss: 0.0516 - val_accuracy: 0.9906\n",
      "Epoch 76/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0234 - accuracy: 0.9883\n",
      "Epoch 76: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0241 - accuracy: 0.9878 - val_loss: 0.0494 - val_accuracy: 0.9906\n",
      "Epoch 77/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0261 - accuracy: 0.9912\n",
      "Epoch 77: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0258 - accuracy: 0.9915 - val_loss: 0.0487 - val_accuracy: 0.9925\n",
      "Epoch 78/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0172 - accuracy: 0.9951\n",
      "Epoch 78: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0184 - accuracy: 0.9944 - val_loss: 0.0486 - val_accuracy: 0.9925\n",
      "Epoch 79/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0342 - accuracy: 0.9893\n",
      "Epoch 79: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0338 - accuracy: 0.9897 - val_loss: 0.0489 - val_accuracy: 0.9906\n",
      "Epoch 80/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0367 - accuracy: 0.9883\n",
      "Epoch 80: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0354 - accuracy: 0.9887 - val_loss: 0.0470 - val_accuracy: 0.9906\n",
      "Epoch 81/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0312 - accuracy: 0.9912\n",
      "Epoch 81: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0301 - accuracy: 0.9915 - val_loss: 0.0497 - val_accuracy: 0.9887\n",
      "Epoch 82/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0215 - accuracy: 0.9932\n",
      "Epoch 82: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0213 - accuracy: 0.9934 - val_loss: 0.0524 - val_accuracy: 0.9887\n",
      "Epoch 83/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0237 - accuracy: 0.9922\n",
      "Epoch 83: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0230 - accuracy: 0.9925 - val_loss: 0.0555 - val_accuracy: 0.9868\n",
      "Epoch 84/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0238 - accuracy: 0.9941\n",
      "Epoch 84: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0245 - accuracy: 0.9934 - val_loss: 0.0523 - val_accuracy: 0.9849\n",
      "Epoch 85/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0412 - accuracy: 0.9883\n",
      "Epoch 85: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0401 - accuracy: 0.9887 - val_loss: 0.0705 - val_accuracy: 0.9793\n",
      "Epoch 86/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0408 - accuracy: 0.9893\n",
      "Epoch 86: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0402 - accuracy: 0.9897 - val_loss: 0.0825 - val_accuracy: 0.9774\n",
      "Epoch 87/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0245 - accuracy: 0.9893\n",
      "Epoch 87: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0238 - accuracy: 0.9897 - val_loss: 0.0829 - val_accuracy: 0.9793\n",
      "Epoch 88/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0260 - accuracy: 0.9951\n",
      "Epoch 88: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0267 - accuracy: 0.9944 - val_loss: 0.0897 - val_accuracy: 0.9868\n",
      "Epoch 89/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0293 - accuracy: 0.9922\n",
      "Epoch 89: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0295 - accuracy: 0.9925 - val_loss: 0.1020 - val_accuracy: 0.9831\n",
      "Epoch 90/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0257 - accuracy: 0.9932\n",
      "Epoch 90: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 114ms/step - loss: 0.0277 - accuracy: 0.9925 - val_loss: 0.1166 - val_accuracy: 0.9831\n",
      "Epoch 91/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0237 - accuracy: 0.9932\n",
      "Epoch 91: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0244 - accuracy: 0.9925 - val_loss: 0.1205 - val_accuracy: 0.9774\n",
      "Epoch 92/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0189 - accuracy: 0.9941\n",
      "Epoch 92: accuracy did not improve from 0.99436\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0189 - accuracy: 0.9944 - val_loss: 0.1220 - val_accuracy: 0.9718\n",
      "Epoch 93/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0183 - accuracy: 0.9961\n",
      "Epoch 93: accuracy improved from 0.99436 to 0.99624, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 2s 215ms/step - loss: 0.0177 - accuracy: 0.9962 - val_loss: 0.1271 - val_accuracy: 0.9774\n",
      "Epoch 94/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0370 - accuracy: 0.9893\n",
      "Epoch 94: accuracy did not improve from 0.99624\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0517 - accuracy: 0.9878 - val_loss: 0.1221 - val_accuracy: 0.9793\n",
      "Epoch 95/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0195 - accuracy: 0.9941\n",
      "Epoch 95: accuracy did not improve from 0.99624\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0192 - accuracy: 0.9944 - val_loss: 0.1027 - val_accuracy: 0.9849\n",
      "Epoch 96/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0216 - accuracy: 0.9941\n",
      "Epoch 96: accuracy did not improve from 0.99624\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0286 - accuracy: 0.9925 - val_loss: 0.1015 - val_accuracy: 0.9849\n",
      "Epoch 97/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0275 - accuracy: 0.9912\n",
      "Epoch 97: accuracy did not improve from 0.99624\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0267 - accuracy: 0.9915 - val_loss: 0.1045 - val_accuracy: 0.9868\n",
      "Epoch 98/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0250 - accuracy: 0.9893\n",
      "Epoch 98: accuracy did not improve from 0.99624\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0251 - accuracy: 0.9897 - val_loss: 0.0975 - val_accuracy: 0.9868\n",
      "Epoch 99/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0234 - accuracy: 0.9941\n",
      "Epoch 99: accuracy did not improve from 0.99624\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0226 - accuracy: 0.9944 - val_loss: 0.0962 - val_accuracy: 0.9831\n",
      "Epoch 100/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0231 - accuracy: 0.9922\n",
      "Epoch 100: accuracy did not improve from 0.99624\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0238 - accuracy: 0.9925 - val_loss: 0.0945 - val_accuracy: 0.9831\n",
      "Epoch 101/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0261 - accuracy: 0.9893\n",
      "Epoch 101: accuracy did not improve from 0.99624\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0273 - accuracy: 0.9887 - val_loss: 0.0706 - val_accuracy: 0.9906\n",
      "Epoch 102/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0225 - accuracy: 0.9932\n",
      "Epoch 102: accuracy did not improve from 0.99624\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0221 - accuracy: 0.9934 - val_loss: 0.0729 - val_accuracy: 0.9887\n",
      "Epoch 103/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0191 - accuracy: 0.9951\n",
      "Epoch 103: accuracy did not improve from 0.99624\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0200 - accuracy: 0.9944 - val_loss: 0.0750 - val_accuracy: 0.9849\n",
      "Epoch 104/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0188 - accuracy: 0.9932\n",
      "Epoch 104: accuracy did not improve from 0.99624\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0218 - accuracy: 0.9925 - val_loss: 0.0755 - val_accuracy: 0.9868\n",
      "Epoch 105/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0184 - accuracy: 0.9941\n",
      "Epoch 105: accuracy did not improve from 0.99624\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0185 - accuracy: 0.9944 - val_loss: 0.0681 - val_accuracy: 0.9887\n",
      "Epoch 106/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0094 - accuracy: 0.9980\n",
      "Epoch 106: accuracy improved from 0.99624 to 0.99812, saving model to best-model.hdf5\n",
      "9/9 [==============================] - 2s 203ms/step - loss: 0.0102 - accuracy: 0.9981 - val_loss: 0.0692 - val_accuracy: 0.9887\n",
      "Epoch 107/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0166 - accuracy: 0.9961\n",
      "Epoch 107: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0178 - accuracy: 0.9953 - val_loss: 0.0758 - val_accuracy: 0.9887\n",
      "Epoch 108/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0191 - accuracy: 0.9932\n",
      "Epoch 108: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0187 - accuracy: 0.9934 - val_loss: 0.0856 - val_accuracy: 0.9887\n",
      "Epoch 109/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0218 - accuracy: 0.9941\n",
      "Epoch 109: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0235 - accuracy: 0.9934 - val_loss: 0.0877 - val_accuracy: 0.9887\n",
      "Epoch 110/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0254 - accuracy: 0.9912\n",
      "Epoch 110: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0245 - accuracy: 0.9915 - val_loss: 0.0974 - val_accuracy: 0.9868\n",
      "Epoch 111/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0471 - accuracy: 0.9824\n",
      "Epoch 111: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0479 - accuracy: 0.9821 - val_loss: 0.1323 - val_accuracy: 0.9849\n",
      "Epoch 112/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0288 - accuracy: 0.9922\n",
      "Epoch 112: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0298 - accuracy: 0.9915 - val_loss: 0.0809 - val_accuracy: 0.9849\n",
      "Epoch 113/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0269 - accuracy: 0.9932\n",
      "Epoch 113: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0284 - accuracy: 0.9925 - val_loss: 0.0814 - val_accuracy: 0.9906\n",
      "Epoch 114/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0202 - accuracy: 0.9951\n",
      "Epoch 114: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0206 - accuracy: 0.9944 - val_loss: 0.0825 - val_accuracy: 0.9906\n",
      "Epoch 115/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0164 - accuracy: 0.9941\n",
      "Epoch 115: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0160 - accuracy: 0.9944 - val_loss: 0.1144 - val_accuracy: 0.9831\n",
      "Epoch 116/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0284 - accuracy: 0.9893\n",
      "Epoch 116: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0276 - accuracy: 0.9897 - val_loss: 0.1165 - val_accuracy: 0.9849\n",
      "Epoch 117/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0191 - accuracy: 0.9932\n",
      "Epoch 117: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0190 - accuracy: 0.9934 - val_loss: 0.1083 - val_accuracy: 0.9849\n",
      "Epoch 118/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0223 - accuracy: 0.9922\n",
      "Epoch 118: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0300 - accuracy: 0.9915 - val_loss: 0.0887 - val_accuracy: 0.9868\n",
      "Epoch 119/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0300 - accuracy: 0.9883\n",
      "Epoch 119: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0289 - accuracy: 0.9887 - val_loss: 0.0741 - val_accuracy: 0.9793\n",
      "Epoch 120/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0425 - accuracy: 0.9863\n",
      "Epoch 120: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0438 - accuracy: 0.9859 - val_loss: 0.1312 - val_accuracy: 0.9718\n",
      "Epoch 121/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0237 - accuracy: 0.9922\n",
      "Epoch 121: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0228 - accuracy: 0.9925 - val_loss: 0.0738 - val_accuracy: 0.9906\n",
      "Epoch 122/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0291 - accuracy: 0.9922\n",
      "Epoch 122: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0306 - accuracy: 0.9915 - val_loss: 0.0813 - val_accuracy: 0.9887\n",
      "Epoch 123/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0200 - accuracy: 0.9941\n",
      "Epoch 123: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0194 - accuracy: 0.9944 - val_loss: 0.0918 - val_accuracy: 0.9868\n",
      "Epoch 124/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0199 - accuracy: 0.9951\n",
      "Epoch 124: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0200 - accuracy: 0.9953 - val_loss: 0.0871 - val_accuracy: 0.9849\n",
      "Epoch 125/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0246 - accuracy: 0.9922\n",
      "Epoch 125: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0239 - accuracy: 0.9925 - val_loss: 0.0814 - val_accuracy: 0.9906\n",
      "Epoch 126/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0125 - accuracy: 0.9961\n",
      "Epoch 126: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0121 - accuracy: 0.9962 - val_loss: 0.0804 - val_accuracy: 0.9887\n",
      "Epoch 127/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0125 - accuracy: 0.9961\n",
      "Epoch 127: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0128 - accuracy: 0.9962 - val_loss: 0.0789 - val_accuracy: 0.9887\n",
      "Epoch 128/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0343 - accuracy: 0.9902\n",
      "Epoch 128: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0331 - accuracy: 0.9906 - val_loss: 0.0751 - val_accuracy: 0.9887\n",
      "Epoch 129/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0151 - accuracy: 0.9951\n",
      "Epoch 129: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0193 - accuracy: 0.9944 - val_loss: 0.0823 - val_accuracy: 0.9868\n",
      "Epoch 130/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0670 - accuracy: 0.9863\n",
      "Epoch 130: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0646 - accuracy: 0.9868 - val_loss: 0.1806 - val_accuracy: 0.9718\n",
      "Epoch 131/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0391 - accuracy: 0.9863\n",
      "Epoch 131: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0432 - accuracy: 0.9859 - val_loss: 0.1122 - val_accuracy: 0.9812\n",
      "Epoch 132/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0366 - accuracy: 0.9883\n",
      "Epoch 132: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0362 - accuracy: 0.9887 - val_loss: 0.1323 - val_accuracy: 0.9774\n",
      "Epoch 133/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0231 - accuracy: 0.9902\n",
      "Epoch 133: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0224 - accuracy: 0.9906 - val_loss: 0.1142 - val_accuracy: 0.9812\n",
      "Epoch 134/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0198 - accuracy: 0.9932\n",
      "Epoch 134: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0197 - accuracy: 0.9934 - val_loss: 0.0845 - val_accuracy: 0.9868\n",
      "Epoch 135/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0281 - accuracy: 0.9893\n",
      "Epoch 135: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0276 - accuracy: 0.9897 - val_loss: 0.0800 - val_accuracy: 0.9868\n",
      "Epoch 136/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0267 - accuracy: 0.9893\n",
      "Epoch 136: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0259 - accuracy: 0.9897 - val_loss: 0.0760 - val_accuracy: 0.9868\n",
      "Epoch 137/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0202 - accuracy: 0.9912\n",
      "Epoch 137: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0201 - accuracy: 0.9915 - val_loss: 0.0732 - val_accuracy: 0.9868\n",
      "Epoch 138/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0186 - accuracy: 0.9932\n",
      "Epoch 138: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0224 - accuracy: 0.9915 - val_loss: 0.0718 - val_accuracy: 0.9887\n",
      "Epoch 139/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0292 - accuracy: 0.9902\n",
      "Epoch 139: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0290 - accuracy: 0.9906 - val_loss: 0.0853 - val_accuracy: 0.9868\n",
      "Epoch 140/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0167 - accuracy: 0.9951\n",
      "Epoch 140: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0171 - accuracy: 0.9944 - val_loss: 0.0966 - val_accuracy: 0.9868\n",
      "Epoch 141/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0185 - accuracy: 0.9941\n",
      "Epoch 141: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0180 - accuracy: 0.9944 - val_loss: 0.0919 - val_accuracy: 0.9887\n",
      "Epoch 142/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0258 - accuracy: 0.9922\n",
      "Epoch 142: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0296 - accuracy: 0.9906 - val_loss: 0.0966 - val_accuracy: 0.9887\n",
      "Epoch 143/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0277 - accuracy: 0.9902\n",
      "Epoch 143: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0270 - accuracy: 0.9906 - val_loss: 0.1031 - val_accuracy: 0.9868\n",
      "Epoch 144/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0159 - accuracy: 0.9932\n",
      "Epoch 144: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0153 - accuracy: 0.9934 - val_loss: 0.1042 - val_accuracy: 0.9868\n",
      "Epoch 145/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0194 - accuracy: 0.9922\n",
      "Epoch 145: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0198 - accuracy: 0.9915 - val_loss: 0.1046 - val_accuracy: 0.9868\n",
      "Epoch 146/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0253 - accuracy: 0.9912\n",
      "Epoch 146: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0259 - accuracy: 0.9906 - val_loss: 0.1020 - val_accuracy: 0.9887\n",
      "Epoch 147/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0210 - accuracy: 0.9922\n",
      "Epoch 147: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0203 - accuracy: 0.9925 - val_loss: 0.0968 - val_accuracy: 0.9868\n",
      "Epoch 148/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0121 - accuracy: 0.9980\n",
      "Epoch 148: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0124 - accuracy: 0.9981 - val_loss: 0.0980 - val_accuracy: 0.9868\n",
      "Epoch 149/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0122 - accuracy: 0.9961\n",
      "Epoch 149: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 113ms/step - loss: 0.0143 - accuracy: 0.9953 - val_loss: 0.1003 - val_accuracy: 0.9849\n",
      "Epoch 150/150\n",
      "8/9 [=========================>....] - ETA: 0s - loss: 0.0145 - accuracy: 0.9971\n",
      "Epoch 150: accuracy did not improve from 0.99812\n",
      "9/9 [==============================] - 1s 112ms/step - loss: 0.0140 - accuracy: 0.9972 - val_loss: 0.0949 - val_accuracy: 0.9868\n",
      "Total training time...164.0235493183136seconds\n"
     ]
    }
   ],
   "source": [
    "# input layer\n",
    "input_layer = Input((S, S, L, 1))\n",
    "input_layer1 = cc(input_layer)\n",
    "\n",
    "# convolutional layers\n",
    "conv_layer1 = Conv3D(filters=8, kernel_size=(3, 3, 3))(input_layer1)\n",
    "#bn_layer1 = BatchNormalization()(conv_layer1)\n",
    "ac_layer1 = Activation('relu')(conv_layer1)\n",
    "sa_layer1 = spatial_attention3D(ac_layer1)\n",
    "\n",
    "conv_layer2 = Conv3D(filters=16, kernel_size=(3, 3, 3))(sa_layer1)\n",
    "#bn_layer2 = BatchNormalization()(conv_layer2)\n",
    "ac_layer2 = Activation('relu')(conv_layer2)\n",
    "sa_layer2 = spatial_attention3D(ac_layer2)\n",
    "\n",
    "conv_layer3 = Conv3D(filters=32, kernel_size=(3, 3, 3))(sa_layer2)\n",
    "#bn_layer3 = BatchNormalization()(conv_layer3)\n",
    "ac_layer3 = Activation('relu')(conv_layer3)\n",
    "#ca_layer3 = channel_attention(ac_layer3)\n",
    "sa_layer3 = spatial_attention3D(ac_layer3)\n",
    "conv3d_shape = sa_layer3.shape\n",
    "conv_layer3 = Reshape((conv3d_shape[1], conv3d_shape[2], conv3d_shape[3]*conv3d_shape[4]))(sa_layer3)\n",
    "\n",
    "conv_layer4 = Conv2D(filters=128, kernel_size=(3,3), padding='valid')(conv_layer3)\n",
    "bn_layer4 = BatchNormalization(axis=-1)(conv_layer4)#如果绘图表现不好就把2D BN删去\n",
    "#mean, variance = tf.nn.moments(conv_layer4, [0])\n",
    "#beta = tf.Variable(tf.zeros(256))\n",
    "#gamma = tf.Variable(tf.ones(256))\n",
    "#bn_layer4 = tf.nn.batch_normalization(conv_layer4, mean, variance, None, None, variance_epsilon)\n",
    "ac_layer4 = Activation('relu')(bn_layer4)\n",
    "#se_layer4 = se_block2D(ac_layer4)\n",
    "ca_layer4 = channel_attention2D(ac_layer4)\n",
    "\n",
    "conv_layer5 = Conv2D(filters=256, kernel_size=(3,3), padding='valid')(ca_layer4)\n",
    "bn_layer5 = BatchNormalization(axis=-1)(conv_layer5)\n",
    "#mean, variance = tf.nn.moments(conv_layer5, [0])\n",
    "#beta = tf.Variable(tf.zeros(512))\n",
    "#gamma = tf.Variable(tf.ones(512))\n",
    "#bn_layer5 = tf.nn.batch_normalization(conv_layer5, mean, variance, None, None, variance_epsilon)\n",
    "ac_layer5 = Activation('relu')(bn_layer5)\n",
    "#se_layer5 = se_block2D(ac_layer5)\n",
    "ca_layer5 = channel_attention2D(ac_layer5)\n",
    "flatten_layer = Flatten()(ca_layer5)\n",
    "\n",
    "#fully connected layers\n",
    "dense_layer1 = Dense(units=128, activation='relu')(flatten_layer)\n",
    "dense_layer1 = Dropout(0.4)(dense_layer1)\n",
    "dense_layer2 = Dense(units=64, activation='relu')(dense_layer1)\n",
    "dense_layer2 = Dropout(0.4)(dense_layer2)\n",
    "output_layer = Dense(units=output_units, activation='softmax')(dense_layer2)\n",
    "\n",
    "model = Model(inputs=input_layer, outputs=output_layer)#定义网络的起点和终点\n",
    "model.summary()\n",
    "\n",
    "# compiling the model\n",
    "adam = adam_v2.Adam(learning_rate=0.0001, decay=1e-06)\n",
    "model.compile(loss='categorical_crossentropy', optimizer=adam, metrics=['accuracy'])\n",
    "\n",
    "# checkpoint\n",
    "filepath = \"best-model.hdf5\"\n",
    "checkpoint = ModelCheckpoint(filepath, monitor='accuracy', verbose=1, save_best_only=True, mode='max')\n",
    "callbacks_list = [checkpoint]\n",
    "\n",
    "start=time.time()\n",
    "#开始训练\n",
    "history = model.fit(x=Xtrain, y=ytrain, batch_size=128, epochs=150, callbacks=callbacks_list, validation_data=(Xval, yval))\n",
    "end = time.time()\n",
    "print(\"Total training time...\" + str(end-start)+'seconds')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "executionInfo": {
     "elapsed": 13790,
     "status": "ok",
     "timestamp": 1663735039584,
     "user": {
      "displayName": "wang gaoyu",
      "userId": "01287641996976751749"
     },
     "user_tz": -480
    },
    "id": "T5QIINb3l32q",
    "outputId": "e33369c4-5052-4e5f-a227-cbbbc1f66a52"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "              precision    recall  f1-score   support\n",
      "\n",
      "           0       0.93      1.00      0.96        39\n",
      "           1       0.99      0.97      0.98      1278\n",
      "           2       0.99      0.99      0.99       680\n",
      "           3       1.00      1.00      1.00       201\n",
      "           4       0.98      1.00      0.99       333\n",
      "           5       0.98      1.00      0.99       580\n",
      "           6       1.00      1.00      1.00        24\n",
      "           7       1.00      1.00      1.00       328\n",
      "           8       0.94      1.00      0.97        17\n",
      "           9       0.98      0.99      0.98       822\n",
      "          10       1.00      0.98      0.99      2305\n",
      "          11       0.99      1.00      0.99       443\n",
      "          12       0.99      0.98      0.99       175\n",
      "          13       0.99      1.00      1.00      1115\n",
      "          14       0.97      1.00      0.98       236\n",
      "          15       0.99      1.00      0.99        79\n",
      "\n",
      "    accuracy                           0.99      8655\n",
      "   macro avg       0.98      0.99      0.99      8655\n",
      "weighted avg       0.99      0.99      0.99      8655\n",
      "\n",
      "Total test time...3.2154924869537354seconds\n",
      "271/271 [==============================] - 4s 11ms/step - loss: 0.0566 - accuracy: 0.9893\n",
      "[100.          97.49608764  99.11764706 100.          99.6996997\n",
      "  99.82758621 100.         100.         100.          98.90510949\n",
      "  98.39479393  99.54853273  97.71428571 100.          99.57627119\n",
      " 100.        ]\n"
     ]
    }
   ],
   "source": [
    "# load best weights\n",
    "import keras\n",
    "from keras.layers import Conv2D, Conv3D, Flatten, Dense, Reshape, BatchNormalization, Activation, GlobalAveragePooling2D, Multiply\n",
    "from keras.layers import Dropout, Input\n",
    "from keras.models import Model\n",
    "model.load_weights(\"best-model.hdf5\")\n",
    "model.compile(loss='categorical_crossentropy', optimizer=adam, metrics=['accuracy'])\n",
    "Xtest = Xtest.reshape(-1, S, S, K, 1)\n",
    "Xtest.shape\n",
    "\n",
    "Y_pred_test = model.predict(Xtest)\n",
    "y_pred_test = np.argmax(Y_pred_test, axis=1)\n",
    "\n",
    "classification = classification_report(np.argmax(ytest, axis=1), y_pred_test)\n",
    "print(classification)\n",
    "\n",
    "def AA_andEachClassAccuracy(confusion_matrix):\n",
    "    counter = confusion_matrix.shape[0]\n",
    "    list_diag = np.diag(confusion_matrix)\n",
    "    list_raw_sum = np.sum(confusion_matrix, axis=1)\n",
    "    each_acc = np.nan_to_num(truediv(list_diag, list_raw_sum))\n",
    "    average_acc = np.mean(each_acc)\n",
    "    return each_acc, average_acc\n",
    "def reports (X_test,y_test,name):\n",
    "    start = time.time()\n",
    "    Y_pred = model.predict(X_test)\n",
    "    y_pred = np.argmax(Y_pred, axis=1)\n",
    "    end = time.time()\n",
    "    print(\"Total test time...\" + str(end-start)+'seconds')\n",
    "    if name == 'IP':\n",
    "        target_names = ['Alfalfa', 'Corn-notill', 'Corn-mintill', 'Corn'\n",
    "                        ,'Grass-pasture', 'Grass-trees', 'Grass-pasture-mowed', \n",
    "                        'Hay-windrowed', 'Oats', 'Soybean-notill', 'Soybean-mintill',\n",
    "                        'Soybean-clean', 'Wheat', 'Woods', 'Buildings-Grass-Trees-Drives',\n",
    "                        'Stone-Steel-Towers']\n",
    "    elif name == 'SA':\n",
    "        target_names = ['Brocoli_green_weeds_1','Brocoli_green_weeds_2','Fallow','Fallow_rough_plow','Fallow_smooth',\n",
    "                        'Stubble','Celery','Grapes_untrained','Soil_vinyard_develop','Corn_senesced_green_weeds',\n",
    "                        'Lettuce_romaine_4wk','Lettuce_romaine_5wk','Lettuce_romaine_6wk','Lettuce_romaine_7wk',\n",
    "                        'Vinyard_untrained','Vinyard_vertical_trellis']\n",
    "    elif name == 'PU':\n",
    "        target_names = ['Asphalt','Meadows','Gravel','Trees', 'Painted metal sheets','Bare Soil','Bitumen',\n",
    "                        'Self-Blocking Bricks','Shadows']\n",
    "    elif name == 'HC':\n",
    "        target_names = ['1','2','3','4','5','6','7','8','9','10','11','12','13','14','15','16']\n",
    "    elif name == 'LK':\n",
    "        target_names = ['1','2','3','4','5','6','7','8','9']\n",
    "    elif name == 'HH':\n",
    "        target_names = ['1','2','3','4','5','6','7','8','9','10','11','12','13','14','15','16','17','18','19','20','21','22']\n",
    "    \n",
    "    classification = classification_report(np.argmax(y_test, axis=1), y_pred, target_names=target_names)\n",
    "    oa = accuracy_score(np.argmax(y_test, axis=1), y_pred)\n",
    "    confusion = confusion_matrix(np.argmax(y_test, axis=1), y_pred)\n",
    "    each_acc, aa = AA_andEachClassAccuracy(confusion)\n",
    "    kappa = cohen_kappa_score(np.argmax(y_test, axis=1), y_pred)\n",
    "    score = model.evaluate(X_test, y_test, batch_size=32)\n",
    "    Test_Loss =  score[0]*100\n",
    "    Test_accuracy = score[1]*100\n",
    "    \n",
    "    return classification, confusion, Test_Loss, Test_accuracy, oa*100, each_acc*100, aa*100, kappa*100\n",
    "classification, confusion, Test_loss, Test_accuracy, oa, each_acc, aa, kappa = reports(Xtest,ytest,dataset)\n",
    "classification = str(classification)\n",
    "confusion = str(confusion)\n",
    "file_name = \"classification_report.txt\"\n",
    "\n",
    "print(each_acc)\n",
    "\n",
    "with open(file_name, 'w') as x_file:\n",
    "    x_file.write('{} Test loss (%)'.format(Test_loss))\n",
    "    x_file.write('\\n')\n",
    "    x_file.write('{} Test accuracy (%)'.format(Test_accuracy))\n",
    "    x_file.write('\\n')\n",
    "    x_file.write('\\n')\n",
    "    x_file.write('{} Kappa accuracy (%)'.format(kappa))\n",
    "    x_file.write('\\n')\n",
    "    x_file.write('{} Overall accuracy (%)'.format(oa))\n",
    "    x_file.write('\\n')\n",
    "    x_file.write('{} Average accuracy (%)'.format(aa))\n",
    "    x_file.write('\\n')\n",
    "    x_file.write('\\n')\n",
    "    x_file.write('{} per_class acc (%)'.format(each_acc))\n",
    "    x_file.write('\\n')\n",
    "    x_file.write('{}'.format(classification))\n",
    "    x_file.write('\\n')\n",
    "    x_file.write('{}'.format(confusion))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "executionInfo": {
     "elapsed": 2072,
     "status": "ok",
     "timestamp": 1663735102534,
     "user": {
      "displayName": "wang gaoyu",
      "userId": "01287641996976751749"
     },
     "user_tz": -480
    },
    "id": "_Fvorlm1lfUD",
    "outputId": "987d90b5-4f3d-4803-a78d-f3436112c87a"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(145, 145, 220)\n",
      "(145, 145, 16)\n",
      "(16, 161, 161)\n",
      "(16, 161, 161)\n"
     ]
    }
   ],
   "source": [
    "import scipy.io as io\n",
    "datatype='float32'\n",
    "def loadData(name):\n",
    "    data_path = os.path.join(os.getcwd(),'data')\n",
    "    if name == 'IP':\n",
    "        data = sio.loadmat(os.path.join(data_path, 'Indian_pineas.mat'))['indian_pines']\n",
    "        labels = sio.loadmat(os.path.join(data_path, 'Indian_pines_gt.mat'))['indian_pines_gt']\n",
    "    elif name == 'LK':\n",
    "        data = sio.loadmat(os.path.join(data_path, 'WHU_Hi_LongKou.mat'))['WHU_Hi_LongKou']\n",
    "        labels = sio.loadmat(os.path.join(data_path, 'WHU_Hi_LongKou_gt.mat'))['WHU_Hi_LongKou_gt']\n",
    "    elif name == 'PU':\n",
    "        data = sio.loadmat(os.path.join(data_path, 'PaviaU.mat'))['paviaU']\n",
    "        labels = sio.loadmat(os.path.join(data_path, 'PaviaU_gt.mat'))['paviaU_gt']\n",
    "    elif name == 'HC':\n",
    "        data = sio.loadmat(os.path.join(data_path, 'WHU_Hi_HanChuan.mat'))['WHU_Hi_HanChuan']\n",
    "        labels = sio.loadmat(os.path.join(data_path, 'WHU_Hi_HanChuan_gt.mat'))['WHU_Hi_HanChuan_gt']\n",
    "    elif name == 'HH':\n",
    "        data = sio.loadmat(os.path.join(data_path, 'WHU_Hi_HongHu.mat'))['WHU_Hi_HongHu']\n",
    "        labels = sio.loadmat(os.path.join(data_path, 'WHU_Hi_HongHu_gt.mat'))['WHU_Hi_HongHu_gt']\n",
    "    return data, labels\n",
    "\n",
    "\n",
    "def Patch(data,height_index,width_index):\n",
    "    height_slice = slice(height_index, height_index+PATCH_SIZE)\n",
    "    width_slice = slice(width_index, width_index+PATCH_SIZE)\n",
    "    patch = data[height_slice, width_slice, :]\n",
    "    \n",
    "    return patch\n",
    "def Patch(height_index, width_index):\n",
    "\n",
    "    # Input:\n",
    "    # Given the index position (x,y) of spatio dimension of the hyperspectral image,\n",
    "\n",
    "    # Output:\n",
    "    # a data cube with patch size S (24 neighbours), with label based on central pixel\n",
    "\n",
    "    height_slice = slice(height_index, height_index+PATCH_SIZE)\n",
    "    width_slice = slice(width_index, width_index+PATCH_SIZE)\n",
    "\n",
    "    patch = input_mat[:, height_slice, width_slice]  #220，5，5\n",
    "    mean_normalized_patch = []\n",
    "    for i in range(patch.shape[0]):\n",
    "        mean_normalized_patch.append(patch[i] - MEAN_ARRAY[i])\n",
    "\n",
    "    return np.array(mean_normalized_patch).astype(datatype)\n",
    "\n",
    "def padWithZeros(X, margin=2):\n",
    "    newX = np.zeros((X.shape[0] + 2 * margin, X.shape[1] + 2* margin, X.shape[2]))\n",
    "    x_offset = margin\n",
    "    y_offset = margin\n",
    "    newX[x_offset:X.shape[0] + x_offset, y_offset:X.shape[1] + y_offset, :] = X\n",
    "    return newX\n",
    "\n",
    "def applyLDA(X,y):\n",
    "    newX=np.reshape(X, (-1, X.shape[2]))\n",
    "    y=np.reshape(y,(-1,1))\n",
    "    lda=LDA()\n",
    "    newX = lda.fit_transform(newX,y.ravel())\n",
    "    newX = np.reshape(newX, (X.shape[0],X.shape[1],newX.shape[1]))\n",
    "    return newX\n",
    "# load the original image\n",
    "input_mat, target_mat = loadData(dataset)#(550, 400, 270)\n",
    "print(input_mat.shape)#1---------------------------------------------\n",
    "height = target_mat.shape[0]\n",
    "width = target_mat.shape[1]\n",
    "PATCH_SIZE = 17\n",
    "numComponents = K\n",
    "input_mat= applyLDA(input_mat, target_mat)#(550, 400, 9)\n",
    "print(input_mat.shape)#2------------------------------------------\n",
    "\n",
    "# Normalize image data and select datatype\n",
    "input_mat = input_mat.astype('float32')\n",
    "input_mat = input_mat - np.min(input_mat)\n",
    "input_mat = input_mat / np.max(input_mat)\n",
    "\n",
    "HEIGHT = input_mat.shape[0]\n",
    "WIDTH = input_mat.shape[1]\n",
    "BAND = input_mat.shape[2]\n",
    "MEAN_ARRAY = np.ndarray(shape=(BAND, 1))\n",
    "new_input_mat = []\n",
    "input_mat = np.transpose(input_mat, (2, 0, 1))\n",
    "calib_val_pad = int((PATCH_SIZE - 1)/2) \n",
    "for i in range(BAND):\n",
    "    MEAN_ARRAY[i] = np.mean(input_mat[i, :, :]) #每个波段的平均值\n",
    "    new_input_mat.append(np.pad(input_mat[i, :, :], calib_val_pad, 'constant', constant_values=0)) #边缘填充2\n",
    "\n",
    "input_mat = np.array(new_input_mat)#(9, 566, 416)\n",
    "print(input_mat.shape)#3-------------------------------------\n",
    "#input_mat = np.transpose(input_mat, (1,2,0))#(566, 416, 9)\n",
    "print(input_mat.shape)#4-------------------------------------\n",
    "#calculate the predicted image\n",
    "outputs = np.zeros((height,width))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 1000
    },
    "executionInfo": {
     "elapsed": 400930,
     "status": "ok",
     "timestamp": 1663735505492,
     "user": {
      "displayName": "wang gaoyu",
      "userId": "01287641996976751749"
     },
     "user_tz": -480
    },
    "id": "mamCiWz8fFdC",
    "outputId": "e409b0fa-f9e1-4a0b-df97-b8b78b84e19d"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "i=0\n",
      "i=1\n",
      "i=2\n",
      "i=3\n",
      "i=4\n",
      "i=5\n",
      "i=6\n",
      "i=7\n",
      "i=8\n",
      "i=9\n",
      "i=10\n",
      "i=11\n",
      "i=12\n",
      "i=13\n",
      "i=14\n",
      "i=15\n",
      "i=16\n",
      "i=17\n",
      "i=18\n",
      "i=19\n",
      "i=20\n",
      "i=21\n",
      "i=22\n",
      "i=23\n",
      "i=24\n",
      "i=25\n",
      "i=26\n",
      "i=27\n",
      "i=28\n",
      "i=29\n",
      "i=30\n",
      "i=31\n",
      "i=32\n",
      "i=33\n",
      "i=34\n",
      "i=35\n",
      "i=36\n",
      "i=37\n",
      "i=38\n",
      "i=39\n",
      "i=40\n",
      "i=41\n",
      "i=42\n",
      "i=43\n",
      "i=44\n",
      "i=45\n",
      "i=46\n",
      "i=47\n",
      "i=48\n",
      "i=49\n",
      "i=50\n",
      "i=51\n",
      "i=52\n",
      "i=53\n",
      "i=54\n",
      "i=55\n",
      "i=56\n",
      "i=57\n",
      "i=58\n",
      "i=59\n",
      "i=60\n",
      "i=61\n",
      "i=62\n",
      "i=63\n",
      "i=64\n",
      "i=65\n",
      "i=66\n",
      "i=67\n",
      "i=68\n",
      "i=69\n",
      "i=70\n",
      "i=71\n",
      "i=72\n",
      "i=73\n",
      "i=74\n",
      "i=75\n",
      "i=76\n",
      "i=77\n",
      "i=78\n",
      "i=79\n",
      "i=80\n",
      "i=81\n",
      "i=82\n",
      "i=83\n",
      "i=84\n",
      "i=85\n",
      "i=86\n",
      "i=87\n",
      "i=88\n",
      "i=89\n",
      "i=90\n",
      "i=91\n",
      "i=92\n",
      "i=93\n",
      "i=94\n",
      "i=95\n",
      "i=96\n",
      "i=97\n",
      "i=98\n",
      "i=99\n",
      "i=100\n",
      "i=101\n",
      "i=102\n",
      "i=103\n",
      "i=104\n",
      "i=105\n",
      "i=106\n",
      "i=107\n",
      "i=108\n",
      "i=109\n",
      "i=110\n",
      "i=111\n",
      "i=112\n",
      "i=113\n",
      "i=114\n",
      "i=115\n",
      "i=116\n",
      "i=117\n",
      "i=118\n",
      "i=119\n",
      "i=120\n",
      "i=121\n",
      "i=122\n",
      "i=123\n",
      "i=124\n",
      "i=125\n",
      "i=126\n",
      "i=127\n",
      "i=128\n",
      "i=129\n",
      "i=130\n",
      "i=131\n",
      "i=132\n",
      "i=133\n",
      "i=134\n",
      "i=135\n",
      "i=136\n",
      "i=137\n",
      "i=138\n",
      "i=139\n",
      "i=140\n",
      "i=141\n",
      "i=142\n",
      "i=143\n",
      "i=144\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaQAAAGeCAYAAADbrXX+AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3df4x0WX3f+fc3YHBwsgz2bNjHM+zObMwGOdZm/TCi78pRhEw2AfqRhz8sBGsl2ObUaBln4ySWDNh6ll21LGElCrG1eVhNHQjDCgFe4iyI9mZDiC1r/7gVzzxrA+ZHPIvBzKjxGBlIZEtx2P3uH/dWdXV1/b731j3n1uf1qPrprq4f53Z13299z/mec8zdERER6duf6rsBIiIioIAkIiKJUEASEZEkKCCJiEgSFJBERCQJCkgiIpKEzgKSmb3azL5gZk+Z2du6eh4RERkG62Iekpk9B/g3wH8DPA38BvBGd/9s608mIiKD8NyOHvcVwFPu/kUAM/sQ8DCwNCCZ3evwQEdNObCXP8nLn4QnZ18DT7585W0vbyhD97J74eIFL4RvvrDvphzWC7/JN1/wbfD5ry184+Xcyx/Byz7fS7MW3fv5e/nayxbbuN/jfP5lwHd8TX/fq33N3f/jxSu7Ckj3AV+Z+/pp4GT1zR8AnuioKQfiNvv0CYPZV08AtuLYrtxQhu59r4Ozm38Zzk/7bsphnZ5zfvMGFHHhG0/wOibwvqKXZk2FIhDLCDQPRpWv8XkCnET9fa/25WVXdhWQNjKzR4BHqq/+076a0appTNLvoEg+qmAkKeiqqOEZ4CVzX99fXzfj7o+5+0Pu/hBcy9yyZCgYiYjsq6uA9BvAS83sQTN7HvAG4GMdPVd/3GYXP0Ak8jUXkZnT82E9T8tCEfpugqzQSZedu3/LzP4W8H8CzwHe6+6/3cVz9W1VN50D1mqkcCZhTdSLvqQVcpSajFGdnm9//0zHwtRFl67OxpDc/VeAX+nq8VPSfTCqHnO05o2da/xU2rBvkNklkGXosvBButRbUUO25vrmfFORXNtRCYixvHZdCEWVPcXVz+eYAlYOdjmxpxQEUmlHRxSMDkNLB+3B6+qFayf4ekCngzi0WQyUjJZeAhNMI0152OXEPpAg0OeYzvxza2ypf8qQtrVt1UIP0SjGkrjmbymEAi9cGdKxm2ZUKWVW9Jt9zD+3sqD+KUPawbSoLscTu5UjcF96qf5JFxzAnTKw9k3DQUyDUELBSGSeMqQd5BiIpgIRJsvfAU4IFKhKrwuGEybG2c1TuNtzYxLNkESmFJDW2WFyUeenczewkpI9uhUW3prHMl4pjgjEpRmSwlM7Yiw5veg7GpFshtRnBdv8c6uSrn8KSBts6qKblnh3fvI2iJwQ1y0JuAUHKK7GqFhGYhGI5dXA5ScRU1Q6rFyr7BrQGJJMpRGQXv5ktdDool7K1dgpM5rpuK0297HR4zgEKwnhckHLGEsIzKLU7HumbryDy7XKbiDBcRVlT4eRRFHDy5+8sgrPpUOsx7Noh3lGbU+AXbckkK+8xXaX6WO5QfQTQgSK63Oa4HKu0yTYqjqIK48pA7LvckADDkag7OlQksiQnmThxG/0v0DbmmDURTedzz7MNWE+Q3FgMtr78a2oHssAr/vhCj8hTFbfJwbAlpeGBSYHGDiTvTTJVna5X6ZZkbKddCURkJZxq7OP2WJxHUeoXVZgmOqiTWseMxShqpbbUVy41/TY3GC0PEmaZUmrSpVDKNSll6pDBYkMgxEo20lZEl12y1Tv5A/ba7dpntG0A6yvoa3UrJvbVF36bqGI5CTZDAkO/9570/MpEF21bm5TKAInh6g+FGlIXXjpSDogTVXdd3OntrYiQ4LVdJd9lWlbtsjrvECsjmPdz3jA0arqzKyzxP2H/uQA2p6HVD1O44c5SlkEJKs/dLKtQ2pLAWUQjLYRywhFVWK+TAgFxUCHoByYBAjBiKOS05tn7Y+3pFBQkEIbWtRWllQ9Tt/rROUpi4B0zbaFDn2UjQuwxYKvMUAcM8iIRHX8FzfPqmAEm8upF0/s6072qQSCFNogg5JVQJoWOsBuiUTTXqPpUw3z1NkjB59/Id1m2XDOqlkLJ9jNGxtvWxZwdufi+jfWnewVCDJ2mPl7bU2kP7SsAhLs/iNu2iWnqrpuxDISJvFK2XmwgHvM8M/oOsPwYvPtwDk9v8We01ElI05VmWoxcOfirLPnefTGbUoKigzrNLILSFPXCh26dqRRKYRiYwHDPo+19DHLgpMBzW3aKvt24+4ETrc4P80SI2VIWbtzcdbpS3jn/Aw2J+dJyjYgzXffbbrdPmbddMcZh2baCkZtP9ZgGIwdOP/4+ps5hIlxcXfDmSyV8SWRPSQ7MXYbtsVlH7OlgY48GEn3DDCruvjW/XOzlWsPXqFgtDNtXZ6ObDOkg1BEys+6Qb/kavy3ZwB+Qmk32LTTXxFCN6XmA6VJselQQJpzsL2NpBPT9Wcjy1eMDSOynvvkZlu+RxpzeovDF0pk2F2oVRrSooA0RwlRe+b3W9r6PkVolMXYdEbqKrGEeEKuEWnbknh3w26Ul3OgVpkPHm0Ek13mUiVCwSgtCkhzDrWw+LGIBDgZb337aprsHieIej6TAWUxYtWQQCASQsFJXN2lN4Qdcg1gfMLt0YocKYbrc5+6CByJByNJjwLSnPltGRSUmglFYOwRs+0DzL6TYt2cUWmESZUErVoiYvpuOIblTxInAWcA86DqH+TKHNXH3Lm149ynDLIdyZ8Cksy0OefomjUBvmmtQVUSvbmsfOOCsBGMMb6mNdkHK8AxRrfhPN5ee7sr82U6DEYax5EpBSSZ6W6ekBNsUq2Ts8QkgMcETvZlQShKrFyeYXlRBave29lUPfdpfLJhnM+cu7e3mPvUkIKRTCkgraNBpVa4G+++dcpblnSl3bq4Ced3YZ+xo5bFWFaLNC9pZwgFkxA56b+ZjW1dHIERinq8adPisFPq1pMGFJCWmK4CMYtDGlTqVBECjMdXV96o+/HW7d5reFVRNjnMhkMxlpxwgq9YImQIC8MuKnxcZbeblMXVTEpjTrIHBaQVutyDaUj2HXe6dXGTj9+oJnieXtyFd59x687l929boGD19hRWTzqaWCAcqsunLJgUAWxFGZ8PazuN6o3ZdsdjJ86ds7lCiZaCUR/jSxrT6o8CUmq62Bm3Q/PB6Mrco/luL3M4vbX0/tOgxN2rq0FaCPh4TRm2OSWjKrtat/FSS2aLwq4qKQ8F5SiS7yyn5bY9FjewcHvp3KfTczi/ud3jLAaDfQJD04CyzX0VtLqhgJSIZX/4WQ6gFyVjTgCYjCKUgUkx4vwU3jK34s0sEK1wevOMiQWYS77KGCliukstVGNQRb1aasVZ3/U4FOZWzQXz64vEnr+F+gew+QTexkn+EIFCwagbCkgpsbVfZiH6CdGqvs7SAqcXNzk7pVrHZkMQuuL8lLPTq9synJ2ekm44AsqCSLUq95SRSglhx+oFYgd/nNIpBaRtbdrrouPuNTOnXDGhc5NAJK5anaAjbsxWlNuUDS0zvc/8WNMtYBIMxlcfvw1N52Ctum+IVBOEPeu1XWUHVXde363IkwLSBhsrpw50nncM234VnuviAE6F56fE2+cEKxkxgTK2Nn7U2RyssmBkZdXWY8iUpO7O05YW+1BAamhWEb4ug2ohexpiSfG2bl1cjojfPDvl0enfegZvQ6dzmwIRS7vDsTe77EeksZthU0BqQfuxyFvNvGbbvWd6LjzlnLvn1TI3d8tHCZxr99kBqJZocsxPNt422ISQwORp6Va2ASmVgui2z/EOWDnizq2LjbfdloXb9WrYeUaku+e3iV6XlE+CTkwDYdOPG34tq7mATthhoV7JU7YBCfqfprOpzmF6G9vmhnNCETi/096On3cuzoDr5bhtmc4/CkUgOuBVEUYoDc7urL9zi21Q1jRM05VTRgSW7S0y/wZFXXp5yzogpWBZTLSFz1fFozzzletCESh8zNgNGBOsJDDi7tnpXruWzlfWQV23UG+8VxbLh44UjIbN3Nb2itgkrFq7VzLyp/puwBAt/uHYisvetl3o8kAiAahWVShHxsXpXc5Ob3LO6dqS7/lihXkfv3H3yvdOb55BrLrqYhkVfI6Rga24YNWkacmfMqQGlnUZ7tg7t5/z0+QXr9xm7tGm2xiBcLvuDrypQgZZzgCPjpUjNpVbh+LyjY2kRwGpI5u68hrbNhidnrNXv1lDi91uuzrlnI/Ht9Rzr/zK6gci1xm+VZfdmGpteAWkFO0dkMzsJcD7gRdTnX8fc/dfMLPvBD4MPAB8CXi9u3+9eVPzcNCsaZss6Xy6bs9h7ROM5rvpzm/c5FGYrbg+KrspXFAxxDDs8icW2Dz3qesMqkmve8IdI401yZC+BfyUu981sz8LPGlmnwB+FPiku7/TzN4GvA14a/Om5q/1aZED+c2cz6Zu3H6USKCMj1K4V+Xq5ajaN6+DwKFgdFymc59g+dynSbDOVo83wE8ikxsbb7rUdGX7U66vqD4Uewckd78ALurP/52ZfQ64D3gYeGV9s8eBX+PIA9J0HTNYXvCQm7azilk2dXpOiBBjpMAJccKoLGajAilkMym0QfZncx+X8TFMOkyOttxeark4pmR0qJkUvWhlDMnMHgC+n2q9yxfXwQrgq1Rdesvu8wjwSBvPn4PeCiA6EGNJCMWs26MomUXdYhKurNC9zMrxpfNTits3YByrtfsmgXJu35lpIOgzKCgYDZuZQzkiFJtf54MXR7gRreS8/tO5c3E2lE6SmcYBycz+DPBPgb/j7v/W5t4CuLubLZ++6u6PAY/Vj5HKwguyi1DtT+QF1WKnE7h5drFxxGrd+NKdswvu3oZRUeKcYFQlvfO9KAoK0hXHsMI3ljy4V2+YDrqEqkHkpNq4chIYYs9do4BkZt9GFYw+4O6/XF/9+2Z2w90vzOwG8GzTRsoKPZd+n4yBaNWioQE8BrgBb2G/6roqUN2AGLAA1dQS4yQ6cc+tN0R2salLb/6GZVi92nx375mq+X7OGAuTwY0nNamyM+A9wOfc/R/OfetjwJuAd9b/f7RRC2W1zPL1pqXgIik5iV4VQSzhhQMxyzHiPjXJkH4A+BvAp83sN+vrfoYqEP2Smb0Z+DLw+mZNlKFQMJKhmGZSvrJvL9eljPvVpMru/2J1bvuqfR9XMmJVIbu7YSON64Cq8I5N30Fn2XymR2/cpuxjNnwLtFKD7G3CCMrAxOD0zhnc3XOCxYAoGMkhmFs1R8+Xr+JfWJ6bQWpxVZltH7GLGEvO7lxwenaTs9ObrY1nrVpwVUTmWN0puG7F2QwpQ5Kt39XPAtd0jkZ5Trx7e/Z94zKYzOZInK9e8XtZkUNf40xNu9qu/WwWxVIbmItsoIAkO6m2mjiZ7jy48nZv4ePcvW1crMmcUipyaKOr7crPZomxopHIWgpIHcp1JYZF8116ZYycEOslUNYfYCgCZ6fLu+FSCkZtKWOkIK78sQzk10GkMxpD6lFOm/RFAiFWk2G3bXfhY26c37xyOb+hMSIRWU4BqWOrdott/G75gJNiQxGgGHMSt18cskqgjDHFlQth+cSNLooZNhVrhFDsVdAhIt1Ql90xcyNYtYPmOpEAbnutVLx4lzJGzk5PuQVrCx7asGlcSCXaImlRQDpiBvjqqeYzhY2rGoYWnrOIjlPtO1PvgAQMc0xJRHajLrtjtmYaw5UpDa0ug2IYRjGGu7fznE0+T11+Iu3JOkMaShUbTHeT9bobLQ5sDd9mqpc5rBx/aiIUYbavzT5deOr2E2lPtgFpQLGo4hAm1ZpwdzbtcHd0bNWO041FxsQyHnZfGxFZKtuANDjm3Dy9xenp2eWyiG2VdifcM7ayum5xWbyO3oE4xqTe1kbdbyL9Mvf+N2vVjrEAThmMIrT/Xr2M1c6uKeWVDpg7TEbLb3Ayxu0wS/i7192lTW0xWVhEAHjS3R9avFIBSUREDm1pQFKXnUjPtn03tkvutewxV91/3fO3me/pXafy500UkET6VPVdrlunds6Wk8GWPKZbNV62ePd13ZVu9YcWzqLuYJNQrfpxpGIZoRijsLSaApJIn8yxckSwzSfquO0GFkse08pYVSou3t2cwGT5w5RxtitwU4Zz5+wWHHMFabytLUg2UEASScBWyzc1eMzI3Fy3OZNgEMulz9/mrC/HGJ1Xz1OEcLlf1jHRYiQbDT4g1b0X6Vrydinl5g7JMb1TDUVgRIDyalVjXLWhYMvMwCkwnBAncGzBSLYy+IA0k2JU2rDURIpNHoIhrfCxrWkGdH2qVfurX6xi1JkSWt1CltNadiIikgQFJBERScLxdNkt66dRn5hII7vMdxLZJI+AtE/csLn/Fv5Cki90EMmA1x8n4fIPrBi7llCSvSUfkBwwc3yHkWjbMHdi9h1lTSKNVGXjlyXpIU4Ye7HX7sIiyQekvfn1eDP/5bXvKQ6JtGI216mtbYbl0pbdO8tW5cjBcAOS+fVlUuZepGsvVlWTKiINBatWfgij5YtDyH4cwK52ka5iia3uv61BBqRl3XvbLCjuBraqa1AplMgV5tfm2V5b8aGllYeEuVPQZP2qHWWM1TkwwwT1qMq+zaneZsxd5sPMdC3J+YvIsQpFoAxWrYy67IJ3sn+XLJj+yM0pR9udlKw+ua156ZI0yAxppSVZji30c1/r5jMlR3K8YiyrpX5WChxytYdj5NT7WG7RVTdVjmzFkrm1RLv0jiYgrevG2+qlUUWeHJlNC75Kt2b1C+aww27Sm2437dKb9ghtcsiwdVRddkuZ11VBy/vxZlMq5i7qyhORTvn0w3ZFDLuajKrHN19/WRzW6NrRZEjL7Fv8cPkAyppEpBtW99W1PU43e7zJaP0NATtxrtcrd0cZ0jJ2+c5g/jL7Nip+EJEO1JnRLgUMnZvL1tZWhbXgqDOktZbMY5rvTdU8JhFp0/w8o7UFCQc0LY4oV3QbntR1/W2FTgWkBauWKDJzFT+ISOdSKqXf1BZvue5FXXY7sE1ZqoofRGRXDt5hAUNOlCFtyd1mBQ+Lc5dWOe5fLRHZypbLAR0DBaQdzAelre+zbjkikR1UXcbbLKwJo0ITVlM3nWLkBhPS6qrriwJSx7Qfk7RlxATKzUHGqJb9kXT53EdlR5cUkEQyMF01IRTb3iPd7CiWkTtncHred0sO6/zG5eeXC6W2P88oZwpIPZhfskPvjeSYGOAnEfv4cXYR2DjDJbgPqHFAMrPnAE8Az7j7LTN7EPgQ8F3Ak8DfcPc/afo8Q6PfSTlWpi3OZYU2yr5/Evjc3Nc/D7zL3b8H+Drw5iYPbgB1McG6i4jINlasOXBtRZZNt9v2cWR7jTIkM7sfOAV+Dvh7ZmbADwL/bX2Tx4H/EXh3k+fBWLmekqlCQGSwlv1l24bvLzO7j1eViquW5TkZX72xLe5AuK2TMW55biPep6Zddv8I+Gngz9ZffxfwDXf/Vv3108B9DZ8DWJ3gT6cEzWdJq1ZbyM26w1AMlmMx/7u+z0IoV+4z3eY2Xi8kiAROYrXp+vR9bmlVEcau4kS75e5j7y47M7sFPOvuT+55/0fM7Akze2LfNsBll96QlkUYzpGIiGyvSYb0A8APmdlrgW8H/iPgF4B7zOy5dZZ0P/DMsju7+2PAYwDWdBBoegYf0Pau64KSM6hD7c2hf3x6oyGy3t4Zkru/3d3vd/cHgDcA/8rdfwT4VeCH65u9Cfho41bKUotbYAwoSeyU+eEvoIHuIdHE4250sbjqW6kKHJ6iGlN6TwfPcdRsyUUkdUOqVNtnXEk2a2VirLv/GvBr9edfBF7RxuOKiMjx0EoNe+q6gGbx3eGm55ruD7ip207jTtIn/f7JOgpI+1i6m+weVm1jseqPdlOwWfdUWzZJRKQv2qBvR+7W2mXl28V6JNyoLldGxpu2X4UQIjtTEcNhKCBlwt2ujfjuEqIUd0T2pyKGw1CXXS7MV4wrbR9qdukdFBE5NAWkDKzqrXOqMaim2U+K3XYa/BY5PgpIOZvuf3zt7N0sa+qT4pDI8VJAyt1iV563kzWJiByaAlLGlnVruTnmdq0bLpcAtW4+lbrxRIZNVXYD5LakHC+jk7mWRZKcqUR8fwpIA7N0Yc/MU4v5xdw1h0pSpxLx/SkgHQmffbzMmnIKU8qaRIZPY0jHYrH4Ybb8UZ6n9+lYk4gMhzKkI7Bqf555OS39P0/deHIo244NaQxpf8qQpOZMwsLZPHa9pnkzWnlCDmnbsaHqdgpK+1BAEhyqYBTD7I8uFKHzLTa6opLxYVi3/nCT20q61GUnK02C4Q57r+jag1UBVF15edk1wEwXLZG8KUOS1WJgFAOByeVVGeRNi61LPIbKEvtkO8qQ8jfIgGT6zWzssr/8st88FIEwKjgh9ZC0XFvvoPXrJdKNYXXZGVxua5fHJTcxlpg7zF088Z68xTlM+15EpFuDy5B04uhWIBLsarWRlQE8Dv6Hr7lPIt3KIyAN+SyQ0Ul8VdlrKErGHpvsgpGV+a4/dd+JtCf9gORQjqquoqEJoaBIv0Zgo0BkZCXMBawyxkEc26LZunq9tkJkmNIPSObAiDjEeWaxhHHRdysaqzKneGUqYIwlxFzLH0SkD8MqapBkBGI1bWnhotRCRFYZREAKIf8sY4hKGzGxMLuMrKz3ahIRuS6bgLQu6MRYHjworXo+BcdLsYxXLqGe05TjIq6L1u3RtGrBV3VeiqyX/hhSbVNRw6GLHlY93xCLL/axqiJvxISwMB7oY7AMz9YZNjkb+6xNp/Xs8pdNhtS2Q2QyQ8mW2lxOPxAhhiuXySjfTEm6kdrSQdpS4jCyyZDadohMZijZUltbMi97nFAEihAgjq9+w00piCRD25IfxtFmSG3alAnlnCnt885w1/uUBQQmVy6lsiZJjDbo694gM6RpADhUhpLa+Fab9nlnuMt9li3iClAQYDz85YgkH9qgr3uDDEg5BwCphCIQfXw1S6q78RSjRIZpEF1223SJ5dxtNgS7dmMEYjWPicvLyEpMHXkigzWIDGmbjEhZU7927fpbevt6W/VFypgkJaEI6HSzn0EEJDkOgYiVkWuxKg5wFVfJlsaQ9qeAJNmoVnvgykK7oc6aFI5E8jeIMSQ5bpNg1cKt8wsSaahJJDvKkCR/MTCKgcDk8iocx5Q5iWQki4AUCcOsktPIZ2PL5jGFIhBGA/x9ERm45AOSY1X3yxBXTXRjrHk1nYixpPCTK9c5msckkrLkx5CMaiVoG+K/TE6OqS6Fsqpd060uShtdudgkDPJ9jXQv1b+BoUk+Q5L+rZtDVM256GfhyXXPu3wh15Kxx+tBKYd3BdKrxd+nUAQia373T8YsLoclm2URkHLtsTuGBatzWgU5EBlZyfxEpjJGChWOy44igTEnxJNVY5Vah3EfjQKSmd1D9Tbg+6jixo8DXwA+DDwAfAl4vbt/vdHzZBqR7BgiUo/2ys7K4sqUxRhL3C3LDQKlXzb7sL8y5vOGbrnx5pvsoGmG9AvAP3f3Hzaz5wEvAH4G+KS7v9PM3ga8DXhrw+cRuaaN5YgCEcNxv3pmaeNkI7LK9L3qSX7vs6+wluPp3kUNZvZC4K8A7wFw9z9x928ADwOP1zd7HHhd00bKYbUxgJvCIPC2bShtxMTC7DKyEs8wI5d8WP3RjKwvbb9ra5IhPQj8AfBPzOwvAU8CPwm82N0v6tt8FXjxsjub2SPAIw2eXxKWwtjSNm1YXvwA5icsJE1KmEQ61qTs+7nATeDd7v79wB9Rdc/NuPt0LZdr3P0xd3/I3R9q0IZsOOAOPv8v0RVuUggmfQs2YRLA3GcXT/HFEhmQJhnS08DT7j5dr+UjVAHp983shrtfmNkN4NmmjRwCcyhHUJSj2XXlCIqBFHj1Wf7dhUCEGCjj5es1CageT6RDewckd/+qmX3FzP6Cu38BeBXw2fryJuCd9f8fbaWlA3FlXCMM5wQ+pGC0vBsvUIQAcYz68jLmBjiE6y9aIFZFY3XBgRtYWX3n+uOcXN5QWtO0yu6/Bz5QV9h9Efgxqm7AXzKzNwNfBl7f8DlErtgnG2sjgysLiEymZ6nqcUMxmCz3KFi9hNQqV35FDF+5JGLUS96BRgHJ3X8TWDYG9Komjyv9OXR13D5B4lD3WX7/q49TzWpqnh3GDRu6hYHM+l9MLvuwbRMSaOrRyWKlBumWAT4GG7c7yW0Vt2pMLdiE3JdXCUUg+vhqcYrb1vOYHIMTWDXBcBKMOJBV4evOssaPIcOlgCSVaad5x+YXr4hlyH6j50AkWJzbiQmilVRRZvPP065+uMbHkP1k/jkKKLKOApLMTDOXTh9/7vNJiFe2I8/F/HjUqnlMY06uXb/PydjMKYNVBRULbRhKN57IlAKSyI62GY+ycsTizXw8nd2+PcewMcx3bTpVXcUQ96yU46aAJNKyaj8mrmR/oahLxnfMk5b15k2z2GWFEMeYNaVQKCHtUEASyUx1Ah5f6V4tR1W3nrImyVnyO8aKdGmfMve+F441qh2U3S4vxbjaymBT+fjQ2JYXyYMyJDlqfcxpasv8ibYqSHFCnGTTlTdN8BQwZEoBSaRHXn9cWd245YSm6c3GC0sLTEYMZh6TDJ+67GQwNnWlddHV1vQxDWfEhNJGSy+7zSS9vr/OSdQS5ZIPZUiStF3WoNt0u20fZ9vnbHOF81ULupYjq9bKW2F+bteqG4TR1auq4od05jHNH17K3XebQnvKbc+FApIkrY/xmrYCYBtiLNevCu9x/ZnQuBbQ3MaMrqwtIetUa+euXZJ17ramwNSAApJIoqqAt7puruouXL/U+NJ5TABlhCXdjX1nTakWOvg2y5jssIahLKeAJHJs3PCFk+vlYrci/VFRg8ixsWoe0/w/sGTK2eV4KUMSaVnb27lPK/m6DhjVxNrlJeKH7srLpdBB2qWAJNKytgPHQTIXg2KhjszdNI9JDkpddpKkFOcM5fa8u1tYcMegGGsekxyOMiRJUhdZQV9jJLGMGQWlS9UOr8ZkoemHnsek7rvjoYAkg9D2uI3UpjusT790qLZbV0WetC+fgDSQTSHk434AABdISURBVE/mT5rr5jv25dA/5jYzh02PpYC1u2u/DlZlLCO0H5O0L4uAlHMsmt+yGwef+4O1wvGYxszuunmHfb4x9aZ1HT6PA5MRpfYJao0tmcc0CabiB2ksi4CUwgm7qeUzuNMIRlOHbosdYFr7dFuG6j29tMKW/OZGJ6gbTxrKIiAdq84zw40rc+Yvh8Orih52zy4iV8d3+hZCQRHmuvFOHLe03nRJ2hSQErTdMo4tPM/w41HyHMNOILJf1+U4kVfQMWwM1YfplekFI5992IL+Pg5OASlB+iMYlnXFFnb1w85S+V1ZehwbGndlx9vWW7TiOeuIVI7WN64Yu96x9SC5ibF+BJdWfki9H8V+l2OcZqnqvs2m03HluCWVIe2UTmesyRuv6d4s2Q4gr98tYRB2nROV8698ri9lMd6cJcnhJRWQzOs/zk37jmxjm/1L+uBWNavB30L1h3TZDZTLO/BQBBoffAb2eT1S/FXdpIuim2UP2cWPRsEoTUkFpFbl+Be+BQOIVdeXOZSmcmbpz7q/Mp3yZVfJjSHJNuo9bGzdfqIi/RrmW0LpUl4BKZclG3Jp54IcFwBdZzomqe6Zbpivv7T2PKjo4VjkFZAG2g2XilzGorbiVKtUT4KWtJH2ZPpmMxd5BaS+bfvLeISBM7nsypwYy2qxz7IYVrCV/hzh3/YhKSDtQr+MK+mELyJNKSCJtCy5bFEkEwpIIiKSBAUkkZap+1JkPwpIu1CFzUrqphKRphSQdrFtUcMRBq59soIu14h1tJ22dOAI/7YPKa+lg1Jdn24gdl0UtIlq6SOHSTdLH42YaA0LaZ/OP53KKyDl8suQSzsXHHrsoxwZsauoUUblR7JSMc7zb3To8gpIIiIN7LL1y/DXpU9PozEkM/u7ZvbbZvYZM/ugmX27mT1oZhMze8rMPmxmz2ursTtRX6+ILLDZx80XnUEOb++AZGb3AX8beMjdvw94DvAG4OeBd7n79wBfB97cRkN3b6BS8iZUNbc//exE9tO0yu65wJ82s+cCLwAugB8EPlJ//3HgdQ2fQ3qguTT7089uIOoyUNNGGgezd0By92eAfwD8HlUg+ibwJPANd/9WfbOngfuaNlIkZ8qYMmWOu+Gq7j2YJl12LwIeBh4Evhv4DuDVO9z/ETN7wsye2LcNIjlQxpQpN8y8unB9lEnj1O1rUmX3V4Hfdfc/ADCzXwZ+ALjHzJ5bZ0n3A88su7O7PwY8Vt9Xbz9EJC02+3CNA2auzryWNRlD+j2gMLMXmJkBrwI+C/wq8MP1bd4EfLRZE0WkKbfDX4ZsdnjHcsAH0mQMaUJVvHAX+HT9WI8BbwX+npk9BXwX8J4W2ikiLdiu4Lm9y5B5XRo+f1FcaqbRxFh3fwfwjoWrvwi8osnjikj7dK5s17IePf2Mmzm+xVWH9BbGjRAKKEpCETZWc+1T7ZVLhVgu7RSR1fIKSG0EkyHVTxgUY4icQNhcybVPtVcuFWK5tFNEVssrIOUcTLoa+Kz7DU5ixj+bJbrIePrKopS9iWwnr4CUqzoQTeczDKnXsCtdZDx9ZVHK3kS2o4B0INWchWqpYcWjYVNGJLIfbT/RpYVUSIHoOCgjEtmPMqSOzeYnKByJiKylgNQxryPSocKRuotEJFfqsmvLXOHCzHTc6IDUXSQiucorQ0q4PM3Mwaa7pxjT9YHTbfF1KWZXfbRp2+dM8eclkrO8AlJq85AWFlbMfR2vFLOrXdrU1koV2z5nLKOCkkiL8gpICZrOLUo5ezsWmwLJkOY2iQzRcAPSoQKEW74pkYhIQpIKSLMY0tZmLB1v+GLmSoyWyKkba8gLzorkJqkqu9yW1fHMihYOJadurCEvOCuSm6QCEqjnS0TkWCXVZSciIsdLAUmWajJOMvQxlqEfn0hfFJBkqSbjJMvuO6STuMaQRLqhgCQHoZO4iGyigCTSwJAyP5G+KSBJdlKaO6TMT6Q9CkiSHc0dEhkmBSRphbquRKQpBSRphTIQEWlKAWngUhpvGQL9bES6o4A0cH1lLimcuNtow6GOo+N1gEWykNxadtK/NoJYCl14XRxHF8eleCFSUYYk2do1e0khaxOR1RSQUuL7Xybh+N5n75qtpJC1ichq6rJLhlOOjCLs9y4+xkDg8CfcUISjPdFvOvZj/tmI7EMBKRHuxsQCIex5AitjZ+Fo3Ym1zxNuFyf8XR5z0+0UjER2o4AkG6V6Yu2iXb0cqx/oeY6vV1cyo4Ak0qNZLLKOo1Jd+62YJClTQJLBmFbRRdKrpovAyapvmncfj7p9eJFWKCDJYEQCY04o+m7ICmPlJyJrKSDJoKTcMZVmq0TSoXlIIiKSBAUkERFJggLSwKS6PE4X7Ur1WEVkPwpIA6M5QxW/8iGBi6vSTWQTFTXIIJmDd11LvQvzpAsuRFKgDCkjq7qouuy6yn2DP/M0LiKymTKkjPSxntw+j51qt6FUHDAc73jnPrPqmUS2tTFDMrP3mtmzZvaZueu+08w+YWa/U///ovp6M7NfNLOnzOxTZnazy8bLYaSU8Uhz5pdBqcuLu2ncTHayTZfd+4BXL1z3NuCT7v5S4JP11wCvAV5aXx4B3t1OM6VPynhE5BA2BiR3/3XgDxeufhh4vP78ceB1c9e/3yslcI+Z3WirsZIPZVUisqt9ixpe7O4X9edfBV5cf34f8JW52z1dX3eNmT1iZk+Y2RN7tkESpqyqf26XF5EcNC5qcHc3272OyN0fAx4D2Of+IrIFt7myAq8/KkJJmvbNkH5/2hVX//9sff0zwEvmbnd/fZ2I9MDqD25cFhno/Z8kat+A9DHgTfXnbwI+Onf936yr7Qrgm3NdeyLSE6Muw57rx3NMXXqSlI1ddmb2QeCVwL1m9jTwDuCdwC+Z2ZuBLwOvr2/+K8BrgaeAPwZ+rIM2i8he7Pq0IDdlTJKMjQHJ3d+44luvWnJbB36iaaNEpBvXkiHzarW9JVmS4pQcmpYOEjlmddHD/EV9eNIXLR0kcsxs9uHKVUqOpA8KSCKy1HyipO47OQQFJBG5ysDqaKQycTkkjSGJyHWzASWRw1FAEpGVpkUO0zlLIl1SQMqINuiTgzOwJdOXRLqggJSRQ23QNx9QtEFffrpYfUHZkRyCApJco4CSP6MqTFAgkZwoIIkMTT3ms2ylIJGUqexbJAO7ZDpWb1AukhsFJJFMVCsobA40CkeSKwUkkVy4YYo0MmAaQxIRkSQoIEk2NL9JZNjUZSfZOLZydJVsy7FRQBJJWb1fUf2pihVk0BSQRBJmdlkzp2AkQ6cxpERpvKRyjD+H2dI/XawBJJIwBaREHdt4ySrH+nOwaf+c4pEcEQUkydZgs6d6DTrFIjk2CkiSrWPNnkSGSgFJrmkj8xhs9iIinVGVnVzTRuah7GU3V2oXvLdmiPRKGZJIIqZ7GGnbCDlWCkgiqVAwkiOnLjuRnhngGcw32nb7i9ntTRthyG6UIUl2hlQwMV2DwRKfczRbK2Lazm0uKR+QJEkBaWCGdLJeRQUT/dkpHonsSAFpYHSyFpFcKSBlZjEDOoaMSESOg7n3P+nBqtHPI+dQjgYfYMZeYFZNtTF3gk1ae+wQCooxUD9+/SStPX5TpvWARKaedPeHFq9UQEpEVY80/B+Dc7m/T+sBY+GEn9pPU7FIZGZpQFLZdyKOZccbu/ZFi8dsa78UkcQpIElvushgFIRE8qWAJL0xvNUJoWZ+tUtQRLKigCT9WjWG5Lbz+JKraEAkawpIPfL64yQcx1m0GPv240aqczkId5iM+m7FcTsZU63UIQpIfTKHYBMiZd9NOYzRBPxk+yxmjyxJtueATQJ3Li76bspxG5333YJkKCAl4M7FWd9NOIhHb9xef4NpAJr+v20wUuBq7Py07xYcp9NzKEKAOL42beEYaaUGScc0qOwaXBSMJGcxUDIiWEl6s+cOSxmSiEhPzk/hlDPO7sD5XfCxHfV4kjKkPjg4Xn1SFn23RkQSYZNweW44QhsDkpm918yeNbPPzF33983s82b2KTP7Z2Z2z9z33m5mT5nZF8zsr3fV8Lw5o9JgNCHGIyloEJG17lyccefsAgsTKEdHGZK26bJ7H/A/A++fu+4TwNvd/Vtm9vPA24G3mtn3Am8A/iLw3cC/NLP/wt3/33abnafpL9hkZBSUR1PMICKbTQtL7pyfcbe4jXk8ujqHjRmSu/868IcL1/0Ld/9W/WUJ3F9//jDwIXf/9+7+u8BTwCtabG++vFrd2soAcdgreovI/s5Pq1NEsLI6XxxRrtRGUcOPAx+uP78Prkyqebq+7ug5MJoYMZYMfIeJ9qicW47UnYszOIU7Z/BotRXAUWgUkMzsZ4FvAR/Y476PAI80ef4s1FssGBAwddPtQsFIjtS0++70nHopDT+KeUp7V9mZ2Y8Ct4Af8ctNlZ4BXjJ3s/vr665x98fc/aFle2IMhQPliGqOQZ0diYjsJAZCadU8pYG/R9srIJnZq4GfBn7I3f947lsfA95gZs83sweBlwL/unkz82QOMZbcPb3g5tmpsiMR2cn5KZzePOPi7mk1RWTgvQYbu+zM7IPAK4F7zexp4B1UVXXPBz5h1Syu0t3/O3f/bTP7JeCzVF15P3GUFXYObs5kZEDkbt/tEZHsxVhWXS4n9TYrA+y+2xiQ3P2NS65+z5rb/xzwc00alTu3ap5RjCWE41mrTkS6c+fiDM4AbmG3A04c3JCSlg5qUV2/wGRUBSMFIhFpy5VCh4HS0kFtcZjUcwc0z0hEZHfKkNpiDoyIASJB2ZGIyI4UkJryuqsOiJPInTNtdiYisg912TUwnWc0mqB5RiIiDSlDasF8INq4K+oRC6EgbjuzT8sGiRwdBaQGDCD6rLpO1hubE33LQlX9QLvj1QebjAhEQPuXSxoUkBqzKjANbUJAB/RzSoObY2HCnYsLFIwkJQpI0i91zfXmXLFIEqOAtC+dQ/dybcFiBSMRqSkg7WG2PYlOprtZNn607ZiSiAyeApL0xw1rM6gruIlkTQFJ+tN2lYPikUjWNDFWRESSoIAkIiJJUEASEZEkKCCJiEgSFJBERCQJCkgiIpIEBSQREUmCApKIiCRBAUlERJKggCQiB3d63uz7h3pMOSwFJBE5uE1bX+yzNUYXjymHpbXsRI6G425MRsadi1OUMKTv9Bzu3j6Hopxddxfw8xOM2F/DOqKAJHIMHJiMsFF1YrvDWb/tka2FCIUXR7GYvbn3v6ePtboHQfeceiukvJrdP7frG/TJYTiUFji7c9F3S2SFZWNcj964DeMTsMH91Tzp7g8tXqkMSUQkAY/euF2lQyfjq98YXCxaTUUNIiKJ8JNYZUPzlyOKSApIIiKSBAUkERFJgsaQmjiGshcRkQNRhrQHo4pFuux+UQiXbXSxUoOkTxnSnnRiFelOFys1SPqUIYmISBIUkEREJAkKSCIikgSNIWViKCsVqbBBRFZRQMqE4ZSj/E/lVoXWvpshIglSQMqEuzGykpDxkvORgLsNcJ1IEWmDxpBERCQJCkgiIpIEddmJHIlI4Pxu362QVcoYsXjcY6wKSCLHwCBygo+P92SXvDG4xSMOR1t02ZnZe83sWTP7zJLv/ZSZuZndW39tZvaLZvaUmX3KzG520WgR2Ydd22pHl8Quff+K9GybMaT3Aa9evNLMXgL8NeD35q5+DfDS+vII8O7mTRQRkWOwMSC5+68Df7jkW+8CfppqzubUw8D7vVIC95jZjVZaKiIig7ZXlZ2ZPQw84+6/tfCt+4CvzH39dH3dssd4xMyeMLMn9mmDiIgMy85FDWb2AuBnqLrr9ubujwGP1Y85gEVxRESkiX2q7P488CDwW1ZNub8fuGtmrwCeAV4yd9v76+tERETW2jkgufungT83/drMvgQ85O5fM7OPAX/LzD4EnADfdPeLthor24uE6v8ynaWGylj03QQRSdjGgGRmHwReCdxrZk8D73D396y4+a8ArwWeAv4Y+LGW2il7CKEgFl4tsZ2A4iSZpohIgjYGJHd/44bvPzD3uQM/0bxZ0oZi7BDTmtyQUFNEJDFay05ERJKggCQiIknQWnYZiWWEImx9e+3OKiI5sWrYp+dGaB7SRj77sD2bfRARScqT7v7Q4pXKkDKh4CIiQ6cxJBERSUIqGdLXgC8D99afD4WOJ31DO6ahHQ8M75iGdjyw+zH9Z8uuTGIMacrMnljWr5grHU/6hnZMQzseGN4xDe14oL1jUpediIgkQQFJRESSkFpAeqzvBrRMx5O+oR3T0I4HhndMQzseaOmYkhpDEhGR45VahiQiIkcqiYBkZq82sy+Y2VNm9ra+27MrM3uJmf2qmX3WzH7bzH6yvv47zewTZvY79f8v6rutuzKz55jZ/21mH6+/ftDMJvVr9WEze17fbdyWmd1jZh8xs8+b2efM7L/O/TUys79b/859xsw+aGbfnttrZGbvNbNnzewzc9ctfV2s8ov1sX3KzG721/LlVhzP369/7z5lZv/MzO6Z+97b6+P5gpn99X5avd6yY5r73k+ZmZvZvfXXe79GvQckM3sO8I+B1wDfC7zRzL6331bt7FvAT7n79wIF8BP1MbwN+KS7vxT4ZP11bn4S+Nzc1z8PvMvdvwf4OvDmXlq1n18A/rm7vwz4S1THle1rZGb3AX+baoPM7wOeA7yB/F6j9wGvXrhu1evyGuCl9eUR4N0HauMu3sf14/kE8H3u/l8C/wZ4O0B9nngD8Bfr+9ypz4mpeR/Xjwkzewnw14Dfm7t679eo94AEvAJ4yt2/6O5/AnwIeLjnNu3E3S/c/W79+b+jOtHdR3Ucj9c3exx4XT8t3I+Z3Q+cArH+2oAfBD5S3ySbYzKzFwJ/BXgPgLv/ibt/g8xfI6rJ7X/azJ4LvAC4ILPXyN1/HfjDhatXvS4PA+/3SgncY2Y3DtPS7Sw7Hnf/F+7+rfrLEri//vxh4EPu/u/d/XepNjd9xcEau6UVrxHAu4Cf5upKm3u/RikEpPuAr8x9/XR9XZbM7AHg+4EJ8OK5Ldy/Cry4p2bt6x9R/bL9f/XX3wV8Y+4PK6fX6kHgD4B/UndBRjP7DjJ+jdz9GeAfUL07vQC+CTxJvq/RvFWvyxDOFz8O/B/159kej5k9DDzj7r+18K29jymFgDQYZvZngH8K/B13/7fz36t3082mpNHMbgHPuvuTfbelJc8FbgLvdvfvB/6Ihe65DF+jF1G9G30Q+G7gO1jSrZK73F6XdczsZ6m6+D/Qd1uaMLMXAD8D/A9tPm4KAekZ4CVzX99fX5cVM/s2qmD0AXf/5frq35+mqvX/z/bVvj38APBDZvYlqm7UH6Qag7mn7h6CvF6rp4Gn3X1Sf/0RqgCV82v0V4Hfdfc/cPf/APwy1euW62s0b9Xrku35wsx+FLgF/IhfzrfJ9Xj+PNUbod+qzxH3A3fN7D+hwTGlEJB+A3hpXRn0PKoBvo/13Kad1GMr7wE+5+7/cO5bHwPeVH/+JuCjh27bvtz97e5+v7s/QPWa/Ct3/xHgV4Efrm+WzTG5+1eBr5jZX6ivehXwWTJ+jai66goze0H9Ozg9pixfowWrXpePAX+zruQqgG/Ode0ly8xeTdX9/UPu/sdz3/oY8AYze76ZPUhVCPCv+2jjLtz90+7+59z9gfoc8TRws/472/81cvfeL8BrqSpP/h/gZ/tuzx7t/8tUXQqfAn6zvryWaszlk8DvAP8S+M6+27rn8b0S+Hj9+X9O9QfzFPC/Ac/vu307HMd/BTxRv07/O/Ci3F8j4H8CPg98Bvhfgefn9hoBH6QaA/sP9YntzateF6pdwf5xfa74NFWFYe/HsMXxPEU1rjI9P/wvc7f/2fp4vgC8pu/2b3tMC9//EnBv09dIKzWIiEgSUuiyExERUUASEZE0KCCJiEgSFJBERCQJCkgiIpIEBSQREUmCApKIiCRBAUlERJLw/wMGRQzrnWA+bwAAAABJRU5ErkJggg==",
      "text/plain": [
       "<Figure size 504x504 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaQAAAGeCAYAAADbrXX+AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3df4x0WX3f+fc3YHDGyTLYk7CPZ9idScwGOdZm/TCi7spRhEw2AfqRhz8sBGsl2ObUaBkncRJLBmw9y65alrAShWBtHlZTB8IQIcBLnGVEe7MhxJa1f9yKZ3ptwPyIJxjMjBqPkYFE8SrO7H73j3ururq6fte9dc+59Xk9uv10V1VXnerqvt8653zP95i7IyIi0rU/1nUDREREQAFJREQSoYAkIiJJUEASEZEkKCCJiEgSFJBERCQJrQUkM3uNmX3RzJ4ys7e39TgiItIP1sY6JDN7HvBvgP8OeBr4deBN7v65xh9MRER64fkt3e8rgafc/UsAZvYR4CFgYUAyu8fh/paacmCveJJXPAlPTr8GnnzF0tte3lD67uX3wMVdL4JvvajrphzWi77Ft+76NvjC1+eueAX38B/g5V/opFnz7vnCPXz95fNt3O1+vvBy4Du+rr/v5b7u7n9q/sK2AtK9wFdnvn4aGCy/+f3AEy015UDcpp8+YTD96gnAljy3KzeUvvvA6+H05l+Es5Oum3JYJ2ec3bwBRZy74glezxg+UHTSrIlQBGIZgf2DUeXrfIEAg6i/7+W+sujCtgLSWmb2MPBw9dV/0VUzGjWJSfodFMlHFYwkBW0lNTwDvHTm6/vqy6bc/VF3f9DdH4RrPbcsGQpGIiK7aisg/TrwMjN7wMxeALwReLylx+qO2/TwA0QiX3GITJ2c9etxGhaK0HUTZIlWhuzc/Tkz+xvA/wk8D3i/u/9WG4/VtWXDdA5Yo5HCGYcVUS/6glbIUdpnjurkbPPvz3QuTEN06WptDsndfxn45bbuPyXtB6PqPocr3ti55k+lCbsGmW0CWYYuEx+kTZ0lNWRrZmzO1yXJNR2VgBjLa5eFUFS9p7j88RxTwMrBNif2lIJAKu1oiYLRYah00A68zl64doKvJ3RaiEPrxUDJcOERGGOaacrDNif2ngSBLud0Zh9bc0vdUw9pU5tmLXQQjWIsiSv+lkIo8MLVQzp2kx5VSj0ruu19zD62ekHdUw9pC5OkuhxP7FYOwX3hUf2TNjiAO2Vg5ZuGg5gEoYSCkcgs9ZC2kGMgmghEGC9+BzgmUKAsvTYYThgbpzdP4LzjxiTaQxKZUEBaZYvFRa2fzt3ASkp2GFaYe2sey3glOSIQF/aQFJ6aEWPJyUXX0Yhke0hdZrDNPrYy6bqngLTGuiG6SYp36ydvg8iAuKok4AYcoLgao2IZiUUgllcDlw8ipqh0WLlm2e1Bc0gykUZAesWTVaHReZ2kq7FVz2iq5bbazMe97schWEkIlwUtYywhMI1S0+tMw3gHl2uWXU+C4zLqPR1GEkkNr3jyShWeS4eoxzNvi3VGbS2AXVQSyOuP1fZVl7dYkqdw5WAmacENog8IESiur2mCy7VO42Ab3af0yK7lgHocjEC9p0NJoof0JHMnfqP7Am0rglEbw3RX7tMcd8NmeygOpQ0ZWyAwJpaRUATGtj51KzCeTnIZ4PU4XOEDwnj598UALLn/2fuUxOzTW9nm+zLtFam3k64kAtIibnXvY1osruUItU0Fhok22lQHo0WXx7K6PBTVAXGjP6wwt93M5N7dYLi4kzTtJS1LVQ6h0JBeqg4VJDIMRqDeTsqSDUjVO/n68wP1ltbtZzRpRmvtWRaMaotWkq9aXd72H56VQxis+mFkumhLRDqRxBzSMofeX2jd41nLZYFWBSPciGy3srLtUiiBSBjbwqO0YeejriKbUMmgdCTbQ5pVDd/NnKybigoJZtMd7DH2tKjI66xArJ7Hqp9xj3tP1WCm12Xau26NrNL0OqTqfva+m6OUdA9pwuoPbSTdTQqlJnNybDgYhSJs3bNqQiwjY4ZVivmCoxzSfeJKSxwYBwilwXDMyc3T5h8khc3xUmhDg5oa4tYc1e6y6CFds+nkUhdp44myAycgrC34GgPE0UHbdEgxllzcPL0MRutO3vMJAqsy2FLJbkuhDdIrWQWkXRMd9h01miYzbP6QsgkHn30h3aa94ZxVqxYG2M0ba29bFnB65+L6FatO9goESYuxJESIvqindJj1e00tpD+0rAISbP8j3rc6dxuLX6Ua1gjjeCXtPFjAPWb4Z3SdYXix/nbgnJzdol+DX8crxpI7F6ecl2fXlkU4VWaqxcCdixaGcWuP3LhNSUGR4chhdgFp4lqiQ9uONCqFUKxNYNjlvhbeZ1kw6NHapo16326cj+Fkg/PTtGOkHlLW7lyctvoS3jk7hfWd8yRlG5Bmh+/W3W4Xra85ykRTwajp++oNg5EDZ59YfTOHMDYuztecyVKZXxLZQRZZdsvYBscupmV8jjwYSfsMMKuG+Fb9c7OltQevUDDamtYhpSPbHtJBKCLlZ9WkX67b/VI32weUdoN1O/0VIVTZfQpOG1GadjoUkGYcbG8jaYUD4yFEFleMDUMoMp6icrMN3yONOLnF4RMlMhwuVKHVtCggzVCHqDlhvqLrJt9ThL16MTZZkbpMLCEOyDUibZoS727YjXL9gtzZ4NFEMNlmLVUiFIzSooA049DFXPsuEmAw2vj21TLZHU4Q9XomA8piyLIpgUAkhIJBXD6k14cdcg1gNOD2cEkfKYbra5/aCByJByNJjwLSjNltGRSU9hOKwMgjZpsHmF0Xxbo5w9II46oTtKxExOTdcAyLHySOA04P1kHVP8ilfVQfcefWlmufMujtSP4UkGSqyTVH16wI8PvmGlQp0evTytcWhI1gjPAVrck+WAGOMbwNZ/H2yttdWS/TYjDSPI5MKCDJVHvrhJxg46pOzgLjAB4TONmXBaEosXJxD8uLKlh13s591WufRoM183zmnN/eYO3TnhSMZEIBaRVNKjXC3XjvrRPeumAo7dbFTTg7h13mjhoWYwmBhUN+IRSMQ2TQfTP3tnFyBEYo6vmmTSt7a1hP9qCAtMCkCsQ0DmlSqVVFCDAaXa28UY/jrdq916h22LXxYTYcirFkwGDpRop9KAw7r/BR1btdpyyu9qQ05yQ7UEBaYroHE4pFq+w673Tr4iafuFEt8Dy5OIf3nnLrzuX1ty1QsHx7CqsXHY0tEA415FMWjIsAtiSNz/u1nUb1xmyz52MD587pTKJEQ8Goi/klzWl1RwEpNW3sjNui2WB0Ze3R7LCXOZzcWvj9k6DE+dVqkBYCPlqRhm1OybDqXa3aeKkh06Kwy1LKQ0E5jOS7ymmxTZ+LG1i4vXDt08kZnN3c7H7mg8EugWHfgLLJ9ypotUMBqSPudmXTvEV/+FlOoBclIwYAjIcRysC4GHJ2Am+dqXgzDURLnNw8ZWwBZjpfZYwUMd1SC9UcVFFXS604q4ce+8LcqrVgfr1I7NlbqX8A60/gTZzkDxEoFIzaoYDUgckchM/Pk8ydtXI8iUUfEK0a6ywtcHJxk9MTqjo2a4LQFWcnnJ5c3Zbh9OSEdMMRUBZEqqrcE0YqKYQtqwvE9v55SqsUkDa1bq+LlofXzJxyyYLOdQKRuKw6QUvcmFaUW9cbWmTyPbNzTbeAcTAYXb3/Juy7BmvZ94ZItUDYs67t2qrZatvqeRw3BaQ11mZOHeg87xi2eRWe62IPToVnJ8TbZwQrGTKGMjY2f9TaGqyyYGhl1dZj6Cltw404DuxQ9lB6SgFpT9OM8FU9qAZ6T31MKd7UrYvLGfGbpyc8MolBGWz4N1nbFIhY2gOOnYkEQj2/tG5vIvWg+k0BaQvLwkrzscgb7XlNt3vP9Fx4whnnZ1WZm/PyEQJn2n22B9yoeo2MMB+svG2w8TRopSyEgkdiSVncyKC16ck2IB06IXrZPklNn+cdsHLInVsXa2+7KQu362rYeUak87PbRK/HdcYhixOTrGezH1b8alZrAZ2wRaHeLk3nIzN+E9iVbAMSZLFM57J3MsPMp2nfiwp5hiJwdqe5HT/vXJwC19NxmzJZfxSKQHTAq+cYSoPTO6u/ucE2qNfUT5PKKUMCi/YWmX2DksqQXirtyE3WASkV83HR5j6/PqRndRWIXPssV4UiUPiIkRswIlhJYMj56clOu5bOZtZBnbdQb7xXFounjhSM+s3cVo6K2Dgsq90rGfljXTegjxYFqGXHTjYtdHkgkQBUVRXKoXFxcs7pyU3OOFmZ8j2brDDrEzfOr1x3cvMUYjVUF8uo4HOMDGzJgVWLpiV/6iHtaX7YcN1ypUacnSRfvHKTtUfrbmMEwu16OPCmEhlksSo3wrFySFWufblQXL6xkfQoILVk0fBCo7Fq02B0csZO42Z7mh9229YJZ3wivrVee+VXqh+IXGf4RkN2I6ra8ApIKdo5IJnZS4EPAi+hOv8+6u7vMbPvBD4K3A98GXiDu39j/6bmYVGiRWu9pk16SWeTuj2HtUswmh2mO7txk0dgWnF9WLaTuKBkiH7Y5k8s0P16p31G3RMeGNnbPj2k54CfcvdzM/uTwJNm9kngR4FPufu7zOztwNuBt+3f1Pw1viyyJ7+Zs72pG7cfqeekzimp3vIOY1UkqI3AoWB0XKqsVgcWr3saB2uterwBPoiMb6y96UKTyvYnXK+o3hc7ByR3vwAu6s//vZl9HrgXeAh4VX2zx4Bf5cgD0qSOGazOyMtF072KaW/q5IwQIcYIOEUInFycc3JyyiOxTKI3k0IbZHfLa+tXfATjFjtHG24vtVgcUTI81EqKTjQyh2Rm9wPfT1Xv8iV1sAL4GtWQ3qLveRh4uInHz8FBh/JaFuvgMBn2KEqmUbcYhysVuhdZOr90dkJx+waMIo4xHNelgm7cnj4udBsUFIz6zcyhHBKK9a/zwZMj3IhWclb/6dy5OO3LIMnU3gHJzP4E8E+Bv+3u/85m3gK4u5stXr7q7o8Cj9b3kcESV7kmVPsTeUFV7HQMN08v1s5YrZpfunN6wfltGBYl0Qf4GdwMRgwzm+QpKEhLHMMKX5vy4G7YOKzJ6WuYQWRQbVw5DvRx5G6vgGRm30YVjD7k7r9UX/x7ZnbD3S/M7Abw7L6NlCU6Tv0ejIBoVdHQAB4D3IC3slt2XRWobkAM1S7h9R47Hp2449YbIttYN6Q3e8MyLK823957pmq9nzPCwrh380n7ZNkZ8D7g8+7+D2auehx4M/Cu+v+P79VCWS6z/vq+qeAiKRlEr5IgFvDCgZjlHHGX9ukh/QDw14DPmNlv1Jf9DFUg+kUzewvwFeAN+zVR+kLBSPpi0pPypWN7/SgLdmj7ZNn9Xyzv27561/uVjFiVyO5u2FDzOqAsvGPTddBZtJ7pkRu3KbtYDd8AVWqQnY0ZQhkYG5zcOYXzHRdY9IiCkRyCuVVbyvjiKv6F5bkZpIqrynT7iG3EWHJ654KT05ucntxsbD5rWcFVEZlRJ/ysrDibIfWQZON39dPANVmjUZ4Rz29Przcug8l0jcTZ8orfi5Icuppn2neo7drPZl4stYG5yBoKSLKVqqzPYLLz4NLbvZVPcH7buFjRc0opyaGJobYrP5sFRopGIispILUo10oM82aH9MoYGRDrEiirn2AoAqcni4fhUgpGTSljpCAu/bH05NdBpDWaQ+rQXieoA2/SFwmEWC2G3bTdhY+4cXbzynF2Q3NEIrKYAlLLGt8tduKAi2JDEaAYMYibF4esOlDGiOLKQVi8cKONZIZ1yRohFDsldIhIOzRkd8zcCFbtoLlKJIDbTpWK57+ljJHTkxNuwcqEhyasmxdSirZIWhSQjpgBvnyp+VRhoyqHoYHHLKLjVPvO1DsgAf2cUxKR7WjI7pitWMZwZUlDo2VQDMMoRnB+O8/V5LM05CfSnKx7SH3JYpPVqpc5LJ1/2kcownRfm12G8DTsJ9KcbANSH2ORUy3t0U4L82zZjtN7i4yIZTzsvjYislC2Aam3zKk23qW51O6ER8aWZtfNl8VrKUg7xrje1kbDbyLdMvfuN2vVjrGXvP5o5bCx+yxjtbNrSv3KqjfoMF7yPAcj3A5Twt+dapPBfW2wWFhEAHjS3R+cv1ABSUREDm1hQNKQnUjHNn03tk3fa9F9Lvv+VY/fZH9P7zrVf15HAUmkS3Umy2ZjBBsuBltwn27VfNn8t68arnSrPzRwFnUHG4eq6seRimWEYoTC0nIKSCJdsmq+MNj6E3XcdAOLBfdpZawyFee/3ZwwSaKZv6qM012B92U4d05vwcnp3veVrXhbW5CsoYAkkoCNyjftcZ+RuuM01xsaB4NYLnz8Jld9OcbwrHqcIoTL/bKOiYqRrNX7gDRZ25OsBW+XUm5unxzTO9VQBIYEmMvejMs2FGyYGTgFhhPiGI4tGMlGeh+QplKMSmtKTaTY5D44xgofkx7Q9aVWzVe/WMaoe0qouoUsdhy17HRmFxFJ3nEEJBERSd7xDNnB9bEa9ZxE9rLNeieRdfIISLvEDZv978oX6Sc6iGRgUuZqPFMNuBi5SijJzpIPSA6YOb7FTLStWTsxvWbRfSpSiWysShu/TEkPcczIi512FxZJPiDtzBeM0M1erdE7kVaMh+CTP8CGdhqW2obDO4uqcuSgvwHJ/HqZlJkX6dqLVeWkisgeApEYoGBMsJLohSJSQxzArg6RLmOJVfffVC8D0qLhvU0KiruBLRsaVBdK5Arza+tsgWoX3TsXp9zkxgFXOfXf9BQ0Xl21o4yxOgdm2Ds9qrRvc6q3GTPHbJiZ1JKcPUSOVSgCZbCqMuqiA6cI10+OZYyc3z7jQEUg+m/yIzenHG52UrL65LbipUtSL3tISy3o5dhcNeNrw3ymzpEcrxjLqtTPUoFF1R5CEQghUrS09fwxcep9LDcYqpsoh7akZG4t0SG9owlIq4bxNnpplJEnR2ZdwVdp1zR/wRyCLeyNLrLudpMhvcmI0DqHDFtHNWS3kHldAXnxON50ScXMoaE8EWmVTz5slsSwrfGwun/z1cf8tEbbjqaHtMiuyQ+Xd6Bek4i0w+qxuk17Rpua3t94QUbKfBsGzvV85faoh7SIXb4zmD2mV6PkBxFpQd0z2iaBoXUzvbWVWWENOOoe0koL1jHNjqZqHZOINGl2ndHKhIQDmiRHlEuGDQd1VZymQqcC0pxlJYrMXMkPItK6pofo9rGuLd5w3ouG7LZg63qpSn4QkW15XWqppQSGnKiHtCF3myY8zK9dWua4f7VEZCMblgM6BgpIW5gNSht/z6pyRCJbqIaMNymsCcNi8YJVScdkiZEbjElrqK4rCkgtW7Q1zOwvokKVbGrIGMr1QcaoKiVIunzmo3pHlxSQOqJOk2xjUjUhFJt+R7q9o1hG7pzCyVnXLTmssxuXn18WSm1+nVHOFJA6oFgkx8oAH0TsE8eZeWojDYussndAMrPnAU8Az7j7LTN7APgI8F3Ak8Bfc/c/2vdxRKQfTFucyxJNpH3/JPD5ma9/Hni3u38P8A3gLfvcuQHUyQSrDhHpoT0KAyyqsrLgLpdWZFl3u03vRza3Vw/JzO4DToCfA/6umRnwg8B/X9/kMeB/At67z+NgLK2nNK2GKyI94wQbX6k6biUwiNMO1sq/fK/KtRWjekl7XU3FWF6WZzBipvPm2KIdCDcxGOGW5zbiXdp3yO4fAj8N/Mn66+8Cvunuz9VfPw3cu+djAMs7+JNMtdle0rJqC7lZ9TQUg6Xv3A0blty8uJheVhY3KOZqpiz7W3BzImMIQxjM3LgcQryeSBAJDOKASXK9OZRWJWFsK44nb5a3/tajtnNAMrNbwLPu/qSZvWqH738YeHjXx5/eD1yeuXtyllZZPJH6TWY55Ly4Pb3sZihgFDc70bsRcdwGWNM1bqQV+/SQfgD4ITN7HfDtwH8GvAe428yeX/eS7gOeWfTN7v4o8CiA7TsJNPnl7NH2rqv+3pxePdXOHPrHpzfL23EMG0BkNL0sXhlSW83qj+qp5GPnpAZ3f4e73+fu9wNvBP6Vu/8I8CvAD9c3ezPw8b1bKQvNb4HRk5HK1pkf/gD1erc13dF05hd8l9/xNv4utPC4HW0UV30bVYLDU1RzSu9r4TGOmi04RFLXp0y1XeaVZL1GFsa6+68Cv1p//iXglU3cr4iIHA9VathR28PS8+8O1z3WJBFi3fCE5p2kS/r9k1UUkHaxcDfZHSyrrrrsj3ZdsFn1UBs2SUSkK9qgb0vu1tix9O1iPRNuVMeVmfF9269ECJGtKYnhMBSQMuFue5VRUdwR2Z2SGA5DQ3a5MF8yr7R5qNlmdFBE5NAUkDKwtDRKXZ9r395PisN2mvwWOT4KSDmbbju7bU7eLrc8DMUhkeOlgJS7+aE8b6bXJCJyaApIGVs0rOXm2IISK7kEqFXrqTSMJ9JvyrLrIbcF6XgZncxVFklyphTx3Skg9czCwp6Zdy1mi7lrDZWkTiniu1NAOhI+/XjZa8opTKnXJNJ/mkM6FvPJD9PyR3me3rWJoUj/qId0BJbtzzMrp9L/szSMJ4ey6dyQ5pB2px6S1JxxmDubx7S32lTlCTmkTeeGqtspKO1CAUlwqIJRDNM/ulCE1rfYaItSxvthVf3hfW4r6dKQnSw1DoY77FzRtQPLAqiG8vKybYCZFC2RvKmHJMvFwDAGAuPLizLoN823LvEYKgvs0ttRDyl/vQxIpt/MvV2Ol1+Om4ciEIYFA1IPSYs19Q5av155sfTfQ0mtX0N2Bpfb2uVx5CbGEnOHmcMTH8mbX8O06yF5may98wVbt0iaetdD0omjXYFIsKvZRlYG8Nj7H77WPmXGnMAYKyPZduuPTB4Bqc9ngYz+SJalvYaiZORxn10wsjI79KfhO5HmpB+QHMphNVTUNyEUFD0Y3w5EhlbCTMAqY+zFc5s3ravXaStE+in9gGQODIl9XGcWSxgVXbdib1XPKV5ZChhjCVHjJCKyuX4lNUgyArFatjR3qGshIsv0IiCFkH8vo49KGzK2MD2GVtZ7NYmIXJdNQFoVdGIsDx6Ulj2eguOlWMYrR6jXNOVYxHXeqj2alhV8PebByx5syyUHkP4cUm1dUsOhkx6WPV4fky92sSwjb8iYMDcf6COwDM/WGTa5Mzb71sNYu0p5l9p0qmeXv2x6SE07RE+mL72lJsvpByLEcOUYD/PtKclm3I1gY4KN8Q1KZqRWOkhbShxGNj2kph2iJ9OX3lJTWzIvup9QBIoQII6uXuGmLogkQ9uSH8bR9pCatK4nlHNPaZd3htt+T1lAYHzlKNVrksRog7729bKHNAkAh+qhpDa/1aRd3hlu8z2LirgCFAQY9b8ckeRDG/S1r5cBKecAIJVQBKKPrvaS6mE8xSjZRFV7cMCoGABRvzcZ6MWQ3SZDYjkPm/XBtsMYgVitY+LyGFp5NVtLZBU3zKta7QpGeehFD2mTHpF6Td3aduhv4e3rbdXn6WQjKQlFQKeb3fQiIMlxCESsjFyLVbGHVVwlW5pD2l1vAtKiITn1ivqlqvbAlUK7oe41KRyJ5K8Xc0gToQhVzelCgeiYjINVhVtnCxJpqkkkO73pIQEQIj4AGDEuQj+3rJDrYmAYA4Hx5UU4rslskaxkEZAiYf3i0yJQ+Agw3I1ohy+4ujUNKe5t0TqmUATCMPHXXkSuST4gObZZqWA3IE4LLLoX6VdadGOkdTWtiLGkqLrLU47WMYmkLPk5JKOqBG3r/lXLDaqTjW34PV3/y+TkmGoplGXtmmx1UdrwymHjkPx7FElTqn8DfZN8D0m6t2oNUbXmopvCk6sed3Eh15KRx+tBKYd3BdKp+d+nSQLV0t/BwYj5cliyXhYBKdfNvY6hYHVOVZADkaGVzC5kKmOkUOK4bCkSGDEgDpbNVaoO4y72CkhmdjfV24Dvo4obPw58EfgocD/wZeAN7v6NvR4n04hkxxCROrRT76wsrixZjLHE3bLcIFC6ZdMPuytjPm/oFhutv8kW9u0hvQf45+7+w2b2AuAu4GeAT7n7u8zs7cDbgbft+Tgi1zRRjigQMfzapnFNnGxElpm8Vx3k9z77Cms4nu6c1GBmLwL+EvA+AHf/I3f/JvAQ8Fh9s8eA1+/bSDmsJiZwU5gE3rQNpQ0ZW5geQyvxDHvkkg+rP9okASvTo+l3bfv0kB4Afh/4x2b2F4AngZ8EXuLuF/Vtvga8ZNE3m9nDwMN7PL4cyC7BJYW5pU3asDj5AcwHzO+0rQ6TSLv2CUjPB24Cf9Pdx2b2HqrhuSl3d7PFbzXd/VHgUYBlt+mTaWWb2aea6P4+i07ShY+unaAXGQejDyUygo0ZByhGl6+Xo7kmkTbtE5CeBp5290m9lo9RBaTfM7Mb7n5hZjeAZ/dtZB+YQzmEohxOLyuHUGSQ4BUJ1cl4g7cNbs6Y4fobJi4QIQbKePlcxgHl44m0aOeA5O5fM7Ovmtmfc/cvAq8GPlcfbwbeVf//8UZa2hNXhr9C98NaE8XICXEMCzLXylBg5mzURXKjGAcgUNYZsSkM321j8TBeoAgB4uj6z0ERKh9e7SNLuP6iBWKVNFaPWriBldU11+9nQJLDG5nbN8vubwIfqjPsvgT8GFWixC+a2VuArwBv2PMxpGXVn6hh7rjBaFAHoFrBgpPwivuiGFWp1OVlUGrSLuneTSzgLQuIjCdnqep+Q5FFL1dqVpeQWubKr4jhS39/tSV6G/YKSO7+G8CDC6569T73K93w+m2hTb/Y/Y7MnDIYEFYmRewSJA71PYu//+r9VKua9u8BxjUbuoWerPqPZWTEfr9e+9r0oRVwDi+LSg2SITcG5vXc2WWiQ1GCFyPMq8SB3MurhCIQfXR1+6VJUN9khBODesuURcbBerHRpFEVPB5NPt/jfqS/ki+uKvkyryreDkaXPQAvRlSna8tubmmRSRHXMZfH0MqNK4usKx48aHYhfHdm167UUy+7HNJv6iFJJ9xgHGKWGeKz81HL1jGNGFy7fJcT6mToswhXf1ChCL0ZxhOZUEAS2dImPTsrh8zfzEdsvY7JMWwEs0ObTpVXkfr+kzVsozAAABr8SURBVCLbUkASaVi1H9PV9cGhqFPGt+wnLZqLmowGLkqEOMZeU5cJEtIsBSSRzFQn4NGVaapyWA3rqdckOVNSgxy1Xer0dV04tprgN9wuj2JUbWWwLn28b5QM0S/qIclR62JNU1NmT7RuYF5V28hlKG/SwVPAkAkFJGmdmdcnn4E2LZzj9celWeIbLmia3Gw0V1pgPKQX65jkOGjITtrnhtVHmxPQ64bS2hhq2/c+DWfImNKGC4/tVpBe319nEHtfSF96RD0kOZhdOkfb1KBbd7tN72fTx2yiPt7EsoKu5dCqWnkLTIL7yp+pG2Gu+HqV/JDOOqbZp5dy53ldaE+57blQQJKkdTFf01QAbEKM5bQqfBmvPt44UFVyWHUmNK4FNLcRQ8aLby/XVLVzV5ZknbmtKTDtQQFJJFFVwKtSFKqhwavDg5HAINbbICyxcB0TQBlhwXDjvr2m2YSKXe4r1UQHr7JG1txo8xqGspgCksixccPnTq6XxW73F0JBMfJpVW+dn2VTSmoQOTYLirk2Wex2EB2vKsYqGMlW1ENqk7nGlI9Qk8kOk/uDyzmrthbmVgtrF6eIbzX85vsHo1wSHaRZCkgtcDcMZzrsrL+oo9J0ssOy+6uCxPWq4jsxKObyyNxN65jkoDRk1wZzGA8ZB0WiXaW4Zii3x93eXMEdg2KkdUxyOOohtaSMsf5jziwomVcZRVQVFroqpdxGSnVXJX9iGRsNSm0P3U1UO7sa47mHOfQ6Jg3fHQ8FpDbU23f7KL+J3XEwinF1wskpnDY9byM1uzooWCXnjUDrmKQF+QSkjDY9sZlexrzZk2ZI6Px5WW8OygIgYl6diros99PkffUlYB1yCPDaS29Vj2XI4v2YRPaRRUDKKBYBVHXb5i7zavwDnxnmsMLxmEgWnls1RBcvA9Pk5NMGo9pBtdq0rj3mwHhYB9l+iITpWp8ufnlswTqmcTAlP8jesghISZywt7GgwYtXcCcSjOBKA23+4rYe8gDL2ifbMlTv6fshlpHRAKqCNh38fdiC39zoBA3jyZ6yCEjHqvWe4REso8/h6VVJD5v3LspQVL3rlN7QUFdoCIHtN2oXqSggJWizMo4NPE7/41HyHMMGENl86DKOSGeot+YYNgKISf9S+fTDBvT3cXAKSAnSH0G/rEpCsKsfNpPgiXL2KWzatis73jbbnOWPWUekcri6lcXI9Y6tA8ktjPUjONr8GV1+SPM4xmWWq7L7fPrxOH82E5PluHLckuohbdWdzti+b7xs3cmrHKZbHSCnxU072mZNlOEzr5cfbLi2KXm19lIxWt9LksNLKiCZ1/Fo3b4jm9hk/5IuuDVa327+KZbDOv02wTznUISjKO637Xqny6oHgyR/ZZdpI+lm0V228SNRMEpTUgGpUTn9ZW/JsWnwLodVKZeJGEsCMZHNqWUdrwv0wCDLML3qryzH5yPd6m9A6rHJBLJRrf24urW1QlFOJvXi+vj+qf99YWlaXgEp1WG4eQduZ1MlcfpWD86pXoZqeCbRObWMbbCjdzOP08zdSAaSy7JbKYdglLE+BSOcqkr1OKikjTQntzpmmckrIHVt01/GIwycyWX1mU/n0yiLfgVb6c4R/m0fkgLSNvTLuJRO+CKyLwUkkYYl11sUyYQCkoiIJEEBSaRhGr4U2Y0C0jaUYbOUhqlEZF8KSNvYNKnhgIErJLIQdpdeQZt1YJ10fjbSI3pT2iotjM2Yu2HDkpPzC87ObxPCfvXrDrkwtlq06jBuZyfXIWMthZXm6fzTqrwCUi6/DLm0c86h5z7KoRHbihql6vnJcsUoz7/RvssrIImI7GGbrV9Ui+/w9ppDMrO/Y2a/ZWafNbMPm9m3m9kDZjY2s6fM7KNm9oKmGrsVjfWKyBybflx/6AxyeDsHJDO7F/hbwIPu/n3A84A3Aj8PvNvdvwf4BvCWJhq6fQOPq0t+5+K00ftT1tzu9LMT2c2+WXbPB/64mT0fuAu4AH4Q+Fh9/WPA6/d8DNnA2QncPD3h5ulJI/entTS708+uJ+o0UDuGbawTsXNAcvdngL8P/C5VIPoW8CTwTXd/rr7Z08C9+zZS1jg7mf53doKqWydGPaZMmeNuuLJ7D2afIbsXAw8BDwDfDXwH8Jotvv9hM3vCzJ7YtQ0yYxKNJDnqMWXKDTOvDq7PMmmeunn7ZNn9ZeB33P33Aczsl4AfAO42s+fXvaT7gGcWfbO7Pwo8Wn+v3n6ISFps+uEaB8xcg3kN2ycg/S5QmNldwP8DvBp4AvgV4IeBjwBvBj6+byNljZOzK1/uu0BW+kdv5ps1/XHqB9uofeaQxlTJC+fAZ+r7ehR4G/B3zewp4LuA9zXQTlnj5AzOzm9zdn6766ZIwjZLeG7u6DOvU8NnD8Wn/ey1MNbd3wm8c+7iLwGv3Od+ZXuP3Ni/dJD0m86VzVo0oqef8X6Or7hqj97CmDlljBvXsdsl2yuXDLFc2ikiy+VVOqiJ9Mse5U9UQwaOj6yqURpXF0fdJdsrlwyxXNopIsvl1UPKOZi4Nd47m5RBMYNBzPhns0AbPZ6uelHqvYlsJq+AlKs6EE3WM/Ro1LA1bfR4uupFqfcmshkFpAOp1ixUpYYVj/pNPSKR3eQ1h5Sbua6QAtFxUI9IZDfqIbVsuj5B4UhEZCUFpJZ5HZEOFY40XCQiudKQXVNmEhemJvNGB6ThIhHJVV49pITT08wcbLJ7ijGpD5xui69LsXfVRZs2fcwUf14iOcsrIKW2DmmytmjSOyLvOl4p9q62adO6ALFpANn0MWMZFZREGpRXQErQZG1Ryr23Y7EukPRpbZNIH/U3IB0qQLjl2yUSEUlIUgFpGkNmh8J2PZq6nxWHmatjtEBOw1h9LjgrkpuksuxyK6vjmSUtHEpOw1h9LjgrkpukAhJo5EtE5FglNWQnIiLHSwFJFtpnnqTvcyx9f34iXVFAkoX2mSdZ9L19OolrDkmkHQpIchA6iYvIOgpIInvoU89PpGsKSJKdlNYOqecn0hwFJMmO1g6J9JMCkjRCQ1cisi8FJGmEeiAisi8FpJ5Lab6lD/SzEWmPAlLPddVzSeHE3UQbDvU8Wq4DLJKF5GrZSfeaCGIpDOG18TzaeF6KFyIV9ZAkW9v2XlLotYnIcgpICfGZ48oXGxzjcHzvs7ftraTQaxOR5TRklwgHDMfdMBzGw62+P8ZA4PAn3FCEoz3Rr3vux/yzEdmFAlJC3A3MoRxSFlu+oy9ja+Fo1Ym1yxNuGyf8be5z3e0UjES2o4Aka6V6Ym2jXZ08Vz/Q4xzfqK5kRgFJpEPTWGQtR6U691sxSVKmgJQIc3Dz6rw07ro1eZpk0UXSy6aLwGDZlZPXvUWH6oSJ7EMBKRXmWDkkWEjwdJqHSGDEgKLrhiwxUv9EZCUFpES4G2MLUKcmpDpvk7qUB6bSbJVIOhSQEqNAJCLHSgtjRUQkCQpIPZNqeZw22pXqcxWR3Sgg9UyqQ36HXjPkgDs4ju9Si6npw5XpJrKO5pCklwwn2JjAEIYJBAPzpBMuRFKgHlJGlg1RtTl0lfsGf5M1SebdHiKynnpIGemintwu953EsKEbEaccGsVIEWHWbCHfNplVjySyqbU9JDN7v5k9a2afnbnsO83sk2b22/X/L64vNzP7BTN7ysw+bWY322y8HEZKPZ5tuKFgtID5ZVBq83C37odKJSubDNl9AHjN3GVvBz7l7i8DPlV/DfBa4GX18TDw3maaKV1KoscjIr23NiC5+68BfzB38UPAY/XnjwGvn7n8g14pgbvN7EZTjZV85NqrEpHu7JrU8BJ3v6g//xrwkvrze4Gvztzu6fqya8zsYTN7wsye2LENkjD1qrrndnmI5GDvpAZ3d7Pt84jc/VHgUYBdvl9ENuA2k1bg9UdFKEnTrj2k35sMxdX/P1tf/gzw0pnb3VdfJiIdsPqDG5dJBnr/J4naNSA9Dry5/vzNwMdnLv/rdbZdAXxrZmhPRDpi1GnYM+N4jmlIT5KydsjOzD4MvAq4x8yeBt4JvAv4RTN7C/AV4A31zX8ZeB3wFPCHwI+10GYR2YldXxbkph6TJGNtQHL3Ny256tULbuvAT+zbKBFpx7XOkHlVbW9BL0lxSg5NpYNEjlmd9DB7aAxPuqLSQSLHzKYfrlykzpF0QQFJRBaa7Shp+E4OQQFJRK4ysDoaKU1cDklzSCJy3XRCSeRwFJBEZKlJksNkzZJImxSQMqIN+uTgDGzB8iWRNiggZeRQG/TNBpRsN+g7Ym1UX1DvSA5BAUmuUUDJn1ElJiiQSE4UkET6pp7zWVQpSCRlSvsWycA2PR2rNygXyY0CkkgmqgoK6wONwpHkSgFJJBdumCKN9JjmkEREJAkKSJINrW8S6TcN2Uk2ji0dXSnbcmwUkERSVu9XVH+qZAXpNQUkkYSZXebMKRhJ32kOKVGaL6kc489hWvqnjRpAIglTQErUsc2XLHOsPwebjM8pHskRUUCSbPW291TXoFMskmOjgCTZOtbek0hfKSDJNU30PHrbexGR1ijLTq5poueh3st2ruQueGfNEOmUekgiiZjsYaRtI+RYKSCJpELBSI6chuxEOmaAZ7DeaNPtL6a3N22EIdtRD0my06eEiUkNBkt8zdG0VsSknZscKT8hSZICUs/06WS9jBImurNVPBLZkgJSz+hkLSK5UkBKhTmxjMRYrrzZfA/oGHpEInIczL37RQ9WzX4eNXcY22VwifQz0Iy8wKxaamPuBBs3dt8hFBQjoL7/+kEau/99meoBiUw86e4Pzl+ogJSIKh+p/z8G53J/n8YDxtwJP7WfpmKRyNTCgKS070Qcy443du2LBp+zrfxSRBKngCSdaaMHoyAkki8FJOmM4Y0uCDXzq0OCIpIVBSTp1rI5JLet55dcSQMiWVNA6pDXH8eh32fRQawLzmyz8ZzyXA7CHcbDrltx3AYjqkodooDUJXMINiayeu1R7kqG4KPLpLpN//h26CXJ5hywceDOxUXXTTluw7OuW5AMBaQE3Lk47boJrYrlGeUwYiNfnVk3CUCT/zcNRgpcezs76boFx+nkDIoQII6uLVs4RqrUIK2LsSQGKG24OrNuElS2DS4KRpKzGCgZEqwkvdVzh6WAJK27c3HKzdMTihAUO0RmnJ3Ayc1TTu9cEEMe25C0SQGpCw6OV5+URdetEZFE2DhcnhuO0NqAZGbvN7NnzeyzM5f9PTP7gpl92sz+mZndPXPdO8zsKTP7opn91bYanjdnWBoMx2uLqYrIcbhzccqd0wssjKFcM7zdU5skNXwA+F+AD85c9kngHe7+nJn9PPAO4G1m9r3AG4E/D3w38C/N7L9y9/+32WbnafILNh4aBWXvkxlEZHOTxJI7Z6ecF7cxj0eX57C2h+Tuvwb8wdxl/8Ldn6u/LIH76s8fAj7i7v/R3X8HeAp4ZYPtzZdX1a2tDBD7WclbRPZ3dlKdIoKV1fniiPpKTaR9/zjw0frze+HKopqn68uOngPDsRFjibYw2pDSueVI3bk4hRO4cwqPbLN2L3N7BSQz+1ngOeBDO3zvw8DD+zx+FurVoAYETMN021AwkiM1Gb47OaMupeFHsU5p5yw7M/tR4BbwI365qdIzwEtnbnZffdk17v6ouz+4aE+MvnCgHFaVCkLdOxIR2UoMhNKqdUo9f4+2U0Ays9cAPw38kLv/4cxVjwNvNLMXmtkDwMuAf71/M/NkXi0KPT+54ObpiXpHIrKVyTqli/OTaolIz0cN1g7ZmdmHgVcB95jZ08A7qbLqXgh80qqqgKW7/w/u/ltm9ovA56iG8n7iKDPsHNyc8dCAyHnX7RGR7MVYVkMug3qblR4O360NSO7+pgUXv2/F7X8O+Ll9GpU7t2qdUYwlhP7XqhOR9t25OIVTgFvY7YATezelpOKqDZpUsx4Pq2CkQCQiTbmS6NBTKh3UFIdxvXZA64xERLanHlJTzIEhMUAkqHckIrIlBaQGFSWEulbqObcJRIqg4CQisgkFpD3N7oLqgwgeL680p4xDzrX5mYjIWgpITXG7lvHibkQriTc6aVFyQiiIm67sU9kgkaOjgLQnozp3LrzOwX2Lk3DPjcyJm25ApmDUHq8+2HhIIALqwksaFJAasPQUOy091bfVArux6QfpkptjYcydiwsUjCQlCkjSLQ3NdeZMsUgSo4C0K51Dd3KtYLGCkYjUFJB2MN2eRCfT7SyaP9p0TklEek8BSbrjhjUZ1BXcRLKmgCTdaTrLQfFIJGuqZSciIklQQBIRkSQoIImISBIUkEREJAkKSCIikgQFJBERSYICkoiIJEEBSUREkqCAJCIiSVBAEpGDOznb7/pD3acclgKSiBzcuq0vdtkao437lMNSLTuRo+G4G+OhcefiBHUY0ndyBue3z6Aop5edA342wIjdNawlCkgix8CB8RAbVie2O5x22x7ZWIhQeHEUxezNvfs9fazRPQja59RbIeXV7O65Xd+gTw7DobTA6Z2LrlsiSyya43rkxm0YDcB691fzpLs/OH+hekgiIgl45Mbtqjs0GF29onexaDklNYiIJMIHseoNzR5HFJEUkEREJAkKSCIikgTNIe3jGNJeREQORD2kHRhVLNKx/aEQLptoo1KDpE89pB3pxCrSnjYqNUj61EMSEZEkKCCJiEgSFJBERCQJmkPKRM6VipTMICKbUEDKhOGUwzxP6+Y+TU1UcBKRZRSQMuFuDK0kZFhyvsQYuAKRiKymOSRpXREChuPm2Q47ikj71EOS1oUiAGOG1HNhHbdHRNKkgCStmwwzRkLHLTlukcDZedetkGXKGLF43G/ZFJBEjoFBZICPjvdkl7wRuMUjDkcbzCGZ2fvN7Fkz++yC637KzNzM7qm/NjP7BTN7ysw+bWY322i0iOzCrm21oyOxo+tfkY5tktTwAeA18xea2UuBvwL87szFrwVeVh8PA+/dv4kiInIM1gYkd/814A8WXPVu4Kep5qknHgI+6JUSuNvMbjTSUhER6bWd0r7N7CHgGXf/zbmr7gW+OvP10/Vli+7jYTN7wsye2KUNIiLSL1snNZjZXcDPUA3X7czdHwUere9Tq1NERI7cLll2fxZ4APhNMwO4Dzg3s1cCzwAvnbntffVlIiIiK20dkNz9M8CfnnxtZl8GHnT3r5vZ48DfMLOPAAPgW+5+0VRjZXOTNT+xTKfUUBmLrpsgIglbG5DM7MPAq4B7zOxp4J3u/r4lN/9l4HXAU8AfAj/WUDtlByEUxMKrctsJKAbJNEVEErQ2ILn7m9Zcf//M5w78xP7NkiYUI4eY1uKGhJoiIolRcVUREUmCApKIiCRBtewyEssIxeYFSrUZnojkxKppn44boXVIa/n0w+Zs+kFEJClPuvuD8xeqh5QJBRcR6TvNIYmISBJS6SF9HfgKcE/9eV/o+aSvb8+pb88H+vec+vZ8YPvn9F8uujCJOaQJM3ti0bhirvR80te359S35wP9e059ez7Q3HPSkJ2IiCRBAUlERJKQWkB6tOsGNEzPJ319e059ez7Qv+fUt+cDDT2npOaQRETkeKXWQxIRkSOVREAys9eY2RfN7Ckze3vX7dmWmb3UzH7FzD5nZr9lZj9ZX/6dZvZJM/vt+v8Xd93WbZnZ88zs/zazT9RfP2Bm4/q1+qiZvaDrNm7KzO42s4+Z2RfM7PNm9t/m/hqZ2d+pf+c+a2YfNrNvz+01MrP3m9mzZvbZmcsWvi5W+YX6uX3azG521/LFljyfv1f/3n3azP6Zmd09c9076ufzRTP7q920erVFz2nmup8yMzeze+qvd36NOg9IZvY84B8BrwW+F3iTmX1vt63a2nPAT7n79wIF8BP1c3g78Cl3fxnwqfrr3Pwk8PmZr38eeLe7fw/wDeAtnbRqN+8B/rm7vxz4C1TPK9vXyMzuBf4W1QaZ3wc8D3gj+b1GHwBeM3fZstfltcDL6uNh4L0HauM2PsD15/NJ4Pvc/b8G/g3wDoD6PPFG4M/X33OnPiem5gNcf06Y2UuBvwL87szFO79GnQck4JXAU+7+JXf/I+AjwEMdt2kr7n7h7uf15/+e6kR3L9XzeKy+2WPA67tp4W7M7D7gBIj11wb8IPCx+ibZPCczexHwl4D3Abj7H7n7N8n8NaJa3P7Hzez5wF3ABZm9Ru7+a8AfzF287HV5CPigV0rgbjO7cZiWbmbR83H3f+Huz9VflsB99ecPAR9x9//o7r9DtbnpKw/W2A0teY0A3g38NFcrbe78GqUQkO4Fvjrz9dP1ZVkys/uB7wfGwEtmtnD/GvCSjpq1q39I9cv2/9VffxfwzZk/rJxeqweA3wf+cT0EGc3sO8j4NXL3Z4C/T/Xu9AL4FvAk+b5Gs5a9Ln04X/w48H/Un2f7fMzsIeAZd//Nuat2fk4pBKTeMLM/AfxT4G+7+7+bva7eTTeblEYzuwU86+5Pdt2WhjwfuAm8192/H/gPzA3PZfgavZjq3egDwHcD38GCYZXc5fa6rGJmP0s1xP+hrtuyDzO7C/gZ4H9s8n5TCEjPAC+d+fq++rKsmNm3UQWjD7n7L9UX/96kq1r//2xX7dvBDwA/ZGZfphpG/UGqOZi76+EhyOu1ehp42t3H9dcfowpQOb9Gfxn4HXf/fXf/T8AvUb1uub5Gs5a9LtmeL8zsR4FbwI/45XqbXJ/Pn6V6I/Sb9TniPuDczP5z9nhOKQSkXwdeVmcGvYBqgu/xjtu0lXpu5X3A5939H8xc9Tjw5vrzNwMfP3TbduXu73D3+9z9fqrX5F+5+48AvwL8cH2zbJ6Tu38N+KqZ/bn6olcDnyPj14hqqK4ws7vq38HJc8ryNZqz7HV5HPjrdSZXAXxrZmgvWWb2Gqrh7x9y9z+cuepx4I1m9kIze4AqEeBfd9HGbbj7Z9z9T7v7/fU54mngZv13tvtr5O6dH8DrqDJP/i3ws123Z4f2/0WqIYVPA79RH6+jmnP5FPDbwL8EvrPrtu74/F4FfKL+/M9Q/cE8BfxvwAu7bt8Wz+O/AZ6oX6f/HXhx7q8R8D8DXwA+C/wT4IW5vUbAh6nmwP5TfWJ7y7LXhWpXsH9Unys+Q5Vh2Plz2OD5PEU1rzI5P/yvM7f/2fr5fBF4bdft3/Q5zV3/ZeCefV8jVWoQEZEkpDBkJyIiooAkIiJpUEASEZEkKCCJiEgSFJBERCQJCkgiIpIEBSQREUmCApKIiCTh/wf7dqMu5KuWYAAAAABJRU5ErkJggg==",
      "text/plain": [
       "<Figure size 504x504 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "# calculate the predicted image\n",
    "outputs = np.zeros((height,width))\n",
    "for i in range(HEIGHT):\n",
    "    print(\"i=\"+str(i))\n",
    "    for j in range(WIDTH):\n",
    "        target = int(target_mat[i,j])\n",
    "        if target == 0 :\n",
    "            continue\n",
    "        else :\n",
    "            image_patch=Patch(i,j)\n",
    "            image_patch= np.transpose(image_patch, (1,2,0))\n",
    "            X_test_image = image_patch.reshape(1,image_patch.shape[0],image_patch.shape[1], image_patch.shape[2], 1).astype('float32')                                   \n",
    "            prediction = (model.predict(X_test_image))\n",
    "            prediction = np.argmax(prediction, axis=1)\n",
    "            outputs[i][j] = prediction+1\n",
    "ground_truth = spectral.imshow(classes = target_mat,figsize =(7,7))\n",
    "predict_image = spectral.imshow(classes = outputs.astype(int),figsize =(7,7))\n",
    "spectral.save_rgb(\"predictions.jpg\", outputs.astype(int), colors=spectral.spy_colors)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "id": "TZ3L8dINF6gy"
   },
   "outputs": [],
   "source": [
    "spectral.save_rgb(\"LK 1.jpg\", target_mat, colors=spectral.spy_colors)"
   ]
  }
 ],
 "metadata": {
  "accelerator": "GPU",
  "colab": {
   "authorship_tag": "ABX9TyPPFjFvgnh+/kYbqYVKA2rq",
   "collapsed_sections": [],
   "machine_shape": "hm",
   "mount_file_id": "1gBYV_S3gsomjVTlBvDmISD45Vrmjc7ph",
   "provenance": []
  },
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.8"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}
