{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "_cell_guid": "b1076dfc-b9ad-4769-8c92-a6c4dae69d19",
    "_uuid": "8f2839f25d086af736a60e9eeb907d3b93b6e0e5",
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    }
   },
   "outputs": [],
   "source": [
    "# This Python 3 environment comes with many helpful analytics libraries installed\n",
    "# It is defined by the kaggle/python Docker image: https://github.com/kaggle/docker-python\n",
    "# For example, here's several helpful packages to load\n",
    "\n",
    "import numpy as np # linear algebra\n",
    "import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)\n",
    "\n",
    "# Input data files are available in the read-only \"../input/\" directory\n",
    "# For example, running this (by clicking run or pressing Shift+Enter) will list all files under the input directory\n",
    "\n",
    "import os\n",
    "for dirname, _, filenames in os.walk('/kaggle/input'):\n",
    "    for filename in filenames:\n",
    "        print(os.path.join(dirname, filename))\n",
    "\n",
    "# You can write up to 20GB to the current directory (/kaggle/working/) that gets preserved as output when you create a version using \"Save & Run All\" \n",
    "# You can also write temporary files to /kaggle/temp/, but they won't be saved outside of the current session"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Step1.准备数据"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "_cell_guid": "79c7e3d0-c299-4dcb-8224-4455121ee9b0",
    "_uuid": "d629ff2d2480ee46fbb7e2d37f6b5fab8052498a"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "img_name[-1]: 1500_c6s3_086542_02.jpg\n",
      "img_path[-1]: ../input/market-1501/Market-1501-v15.09.15/bounding_box_train/1500_c6s3_086542_02.jpg\n",
      "Number of person IDs:  751\n",
      "train_person_ids.shape:  (10348, 751)\n",
      "val_person_ids.shape:  (2588, 751)\n"
     ]
    }
   ],
   "source": [
    "\"\"\"\n",
    "Market-1501 dataset\n",
    "train data: 1000 IDs, 20 photos / ID\n",
    "problem: ReID => Search / Retrieval => visual feature, given a photo\n",
    "gallery / database: image with IDs\n",
    "query图像: 查询对应IDs\n",
    "\"\"\"\n",
    "from sklearn.preprocessing import LabelEncoder, OneHotEncoder\n",
    "from sklearn.model_selection import train_test_split\n",
    "import os\n",
    "import warnings\n",
    "warnings.filterwarnings('ignore')\n",
    "# data目录\n",
    "data_folder = '../input/market-1501/Market-1501-v15.09.15/bounding_box_train/'\n",
    "image_names = os.listdir(data_folder)\n",
    "image_names = sorted(image_names, key=lambda x:x[:4])\n",
    "image_names = image_names[:len(image_names)-1]  \n",
    "\n",
    "img_name = image_names  \n",
    "img_path = []\n",
    "for i in range(len(img_name)):\n",
    "    img_path.append(data_folder + img_name[i])    \n",
    "print('img_name[-1]:', img_name[-1])\n",
    "print('img_path[-1]:', img_path[-1])\n",
    "\n",
    "person_id_original_list = [x[:4] for x in img_name] \n",
    "nbr_person_ids = len(set(person_id_original_list))\n",
    "print('Number of person IDs: ', nbr_person_ids)\n",
    "\n",
    "id_encoder = LabelEncoder()   \n",
    "id_encoder.fit(person_id_original_list)\n",
    "person_id_encoded = id_encoder.transform(person_id_original_list) \n",
    "\n",
    "OneHotEncoder = OneHotEncoder()\n",
    "person_id_encoded = OneHotEncoder.fit_transform(person_id_encoded.reshape(-1, 1)).toarray() # one-hot encoding\n",
    "person_id_encoded = person_id_encoded.astype(int)\n",
    "\n",
    "\n",
    "train_img_path, val_img_path, train_person_ids, val_person_ids = train_test_split(img_path, person_id_encoded, test_size=0.2,\n",
    "                                                                                  random_state=20, shuffle=True)\n",
    "print('train_person_ids.shape: ', train_person_ids.shape)\n",
    "print('val_person_ids.shape: ', val_person_ids.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "\"\"\"\n",
    "使用triplet loss 一次需要输入三张图片x，对应两个y\n",
    "\"\"\"\n",
    "import numpy as np\n",
    "import cv2\n",
    "import imgaug as ia\n",
    "import imgaug.augmenters as iaa\n",
    "from sklearn.utils import shuffle as shuffle_tuple\n",
    "import random\n",
    "\n",
    "sometimes = lambda aug: iaa.Sometimes(0.5, aug)\n",
    "seq = iaa.Sequential(\n",
    "    [\n",
    "        # apply the following augmenters to most images\n",
    "        iaa.Fliplr(0.5), # horizontally flip 50% of all images\n",
    "        iaa.Flipud(0.2), # vertically flip 20% of all images\n",
    "        # Cutout / Random Erasing 随机擦除一小块区域\n",
    "        iaa.Cutout(nb_iterations=1, size=0.2, squared=False),\n",
    "        # crop images by -5% to 10% of their height/width\n",
    "        sometimes(iaa.CropAndPad(\n",
    "            percent=(-0.05, 0.1),\n",
    "            pad_mode=ia.ALL,\n",
    "            pad_cval=(0, 255)\n",
    "        )),\n",
    "        sometimes(iaa.Affine(\n",
    "            scale={\"x\": (0.8, 1.2), \"y\": (0.8, 1.2)}, # scale images to 80-120% of their size, individually per axis\n",
    "            translate_percent={\"x\": (-0.2, 0.2), \"y\": (-0.2, 0.2)}, # translate by -20 to +20 percent (per axis)\n",
    "            rotate=(-45, 45), # rotate by -45 to +45 degrees\n",
    "            shear=(-16, 16), # shear by -16 to +16 degrees\n",
    "            order=[0, 1], # use nearest neighbour or bilinear interpolation (fast)\n",
    "            cval=(0, 255), # if mode is constant, use a cval between 0 and 255\n",
    "            mode=ia.ALL # use any of scikit-image's warping modes (see 2nd image from the top for examples)\n",
    "        )),\n",
    "    ],\n",
    "    random_order=True\n",
    ")\n",
    "\n",
    "def load_img_batch(img_path_list, img_label_list, img_width, img_height, nbr_classes):\n",
    "    batch_size = len(img_path_list)\n",
    "    X_batch = np.zeros((batch_size, img_height, img_width, 3))\n",
    "    if nbr_classes:\n",
    "        Y_batch = np.zeros((batch_size, nbr_classes))\n",
    "\n",
    "    for i in range(batch_size):\n",
    "        img_path = img_path_list[i]\n",
    "        img_bgr = cv2.imread(img_path)  # img.shape: (128, 64, 3)\n",
    "        if img_bgr.shape != (img_height, img_width, 3):\n",
    "            img_bgr = cv2.resize(img_bgr, (img_width, img_height))\n",
    "        img = img_bgr[:, :, ::-1]\n",
    "        X_batch[i] = img\n",
    "\n",
    "        if img_label_list is not None:\n",
    "            # label = img_label_list[i]  老师这一句写错了\n",
    "            label_index = np.argmax(img_label_list[i])\n",
    "            Y_batch[i, label_index] = 1\n",
    " \n",
    "    if img_label_list is not None:\n",
    "        return (X_batch, Y_batch)\n",
    " \n",
    "    else:\n",
    "        return X_batch\n",
    "    \n",
    "def generator_batch(img_path_list, img_label_list, nbr_classes, img_width, img_height,\n",
    "                     batch_size = 32, augment = False, shuffle = False):\n",
    "    assert len(img_path_list) == len(img_label_list)\n",
    "    N = len(img_path_list)\n",
    "\n",
    "    if shuffle:  # 保证path和label一起\n",
    "        img_path_list, img_label_list = shuffle_tuple(img_path_list, img_label_list)\n",
    "\n",
    "    batch_index = 0  \n",
    "    while True:\n",
    "        current_index = (batch_index * batch_size) % N\n",
    "        if N >= (current_index + batch_size):\n",
    "            current_batch_size = batch_size\n",
    "            batch_index += 1\n",
    "        else:\n",
    "            current_batch_size = N - current_index  \n",
    "            batch_index = 0\n",
    "\n",
    "        X_batch, Y_batch = load_img_batch(img_path_list[current_index : (current_index + current_batch_size)],\n",
    "                                          img_label_list[current_index : (current_index + current_batch_size)],\n",
    "                                          img_width, img_height, nbr_classes)\n",
    "        if augment:   # augment = True\n",
    "            X_batch = X_batch.astype(np.uint8) \n",
    "            X_batch_aug = seq.augment_images(X_batch)\n",
    "            X_batch = X_batch_aug\n",
    "        X_batch = X_batch / 255\n",
    "        X_batch = (X_batch - np.array([0.485, 0.456, 0.406])) / np.array([0.229, 0.224, 0.225])\n",
    "        \n",
    "        y_batch = []  # triplet 的接口要求labelencoding 而不是 onehot\n",
    "        for item in Y_batch:\n",
    "            y_batch.append(np.argmax(item))\n",
    "        y_batch = np.array(y_batch).astype(np.uint8)\n",
    "        \n",
    "        yield (X_batch, [y_batch, Y_batch])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "img_width = 64\n",
    "img_height = 128\n",
    "batch_size = 64   \n",
    "nbr_epochs = 200"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/resnet/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5\n",
      "94773248/94765736 [==============================] - 1s 0us/step\n",
      "Model: \"functional_3\"\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "input_2 (InputLayer)            [(None, 128, 64, 3)] 0                                            \n",
      "__________________________________________________________________________________________________\n",
      "conv1_pad (ZeroPadding2D)       (None, 134, 70, 3)   0           input_2[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "conv1_conv (Conv2D)             (None, 64, 32, 64)   9472        conv1_pad[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "conv1_bn (BatchNormalization)   (None, 64, 32, 64)   256         conv1_conv[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "conv1_relu (Activation)         (None, 64, 32, 64)   0           conv1_bn[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "pool1_pad (ZeroPadding2D)       (None, 66, 34, 64)   0           conv1_relu[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "pool1_pool (MaxPooling2D)       (None, 32, 16, 64)   0           pool1_pad[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_1_conv (Conv2D)    (None, 32, 16, 64)   4160        pool1_pool[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_1_bn (BatchNormali (None, 32, 16, 64)   256         conv2_block1_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_1_relu (Activation (None, 32, 16, 64)   0           conv2_block1_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_2_conv (Conv2D)    (None, 32, 16, 64)   36928       conv2_block1_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_2_bn (BatchNormali (None, 32, 16, 64)   256         conv2_block1_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_2_relu (Activation (None, 32, 16, 64)   0           conv2_block1_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_0_conv (Conv2D)    (None, 32, 16, 256)  16640       pool1_pool[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_3_conv (Conv2D)    (None, 32, 16, 256)  16640       conv2_block1_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_0_bn (BatchNormali (None, 32, 16, 256)  1024        conv2_block1_0_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_3_bn (BatchNormali (None, 32, 16, 256)  1024        conv2_block1_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_add (Add)          (None, 32, 16, 256)  0           conv2_block1_0_bn[0][0]          \n",
      "                                                                 conv2_block1_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_out (Activation)   (None, 32, 16, 256)  0           conv2_block1_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_1_conv (Conv2D)    (None, 32, 16, 64)   16448       conv2_block1_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_1_bn (BatchNormali (None, 32, 16, 64)   256         conv2_block2_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_1_relu (Activation (None, 32, 16, 64)   0           conv2_block2_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_2_conv (Conv2D)    (None, 32, 16, 64)   36928       conv2_block2_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_2_bn (BatchNormali (None, 32, 16, 64)   256         conv2_block2_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_2_relu (Activation (None, 32, 16, 64)   0           conv2_block2_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_3_conv (Conv2D)    (None, 32, 16, 256)  16640       conv2_block2_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_3_bn (BatchNormali (None, 32, 16, 256)  1024        conv2_block2_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_add (Add)          (None, 32, 16, 256)  0           conv2_block1_out[0][0]           \n",
      "                                                                 conv2_block2_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_out (Activation)   (None, 32, 16, 256)  0           conv2_block2_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_1_conv (Conv2D)    (None, 32, 16, 64)   16448       conv2_block2_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_1_bn (BatchNormali (None, 32, 16, 64)   256         conv2_block3_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_1_relu (Activation (None, 32, 16, 64)   0           conv2_block3_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_2_conv (Conv2D)    (None, 32, 16, 64)   36928       conv2_block3_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_2_bn (BatchNormali (None, 32, 16, 64)   256         conv2_block3_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_2_relu (Activation (None, 32, 16, 64)   0           conv2_block3_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_3_conv (Conv2D)    (None, 32, 16, 256)  16640       conv2_block3_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_3_bn (BatchNormali (None, 32, 16, 256)  1024        conv2_block3_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_add (Add)          (None, 32, 16, 256)  0           conv2_block2_out[0][0]           \n",
      "                                                                 conv2_block3_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_out (Activation)   (None, 32, 16, 256)  0           conv2_block3_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_1_conv (Conv2D)    (None, 16, 8, 128)   32896       conv2_block3_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_1_bn (BatchNormali (None, 16, 8, 128)   512         conv3_block1_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_1_relu (Activation (None, 16, 8, 128)   0           conv3_block1_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_2_conv (Conv2D)    (None, 16, 8, 128)   147584      conv3_block1_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_2_bn (BatchNormali (None, 16, 8, 128)   512         conv3_block1_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_2_relu (Activation (None, 16, 8, 128)   0           conv3_block1_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_0_conv (Conv2D)    (None, 16, 8, 512)   131584      conv2_block3_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_3_conv (Conv2D)    (None, 16, 8, 512)   66048       conv3_block1_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_0_bn (BatchNormali (None, 16, 8, 512)   2048        conv3_block1_0_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_3_bn (BatchNormali (None, 16, 8, 512)   2048        conv3_block1_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_add (Add)          (None, 16, 8, 512)   0           conv3_block1_0_bn[0][0]          \n",
      "                                                                 conv3_block1_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_out (Activation)   (None, 16, 8, 512)   0           conv3_block1_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_1_conv (Conv2D)    (None, 16, 8, 128)   65664       conv3_block1_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_1_bn (BatchNormali (None, 16, 8, 128)   512         conv3_block2_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_1_relu (Activation (None, 16, 8, 128)   0           conv3_block2_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_2_conv (Conv2D)    (None, 16, 8, 128)   147584      conv3_block2_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_2_bn (BatchNormali (None, 16, 8, 128)   512         conv3_block2_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_2_relu (Activation (None, 16, 8, 128)   0           conv3_block2_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_3_conv (Conv2D)    (None, 16, 8, 512)   66048       conv3_block2_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_3_bn (BatchNormali (None, 16, 8, 512)   2048        conv3_block2_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_add (Add)          (None, 16, 8, 512)   0           conv3_block1_out[0][0]           \n",
      "                                                                 conv3_block2_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_out (Activation)   (None, 16, 8, 512)   0           conv3_block2_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_1_conv (Conv2D)    (None, 16, 8, 128)   65664       conv3_block2_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_1_bn (BatchNormali (None, 16, 8, 128)   512         conv3_block3_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_1_relu (Activation (None, 16, 8, 128)   0           conv3_block3_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_2_conv (Conv2D)    (None, 16, 8, 128)   147584      conv3_block3_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_2_bn (BatchNormali (None, 16, 8, 128)   512         conv3_block3_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_2_relu (Activation (None, 16, 8, 128)   0           conv3_block3_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_3_conv (Conv2D)    (None, 16, 8, 512)   66048       conv3_block3_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_3_bn (BatchNormali (None, 16, 8, 512)   2048        conv3_block3_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_add (Add)          (None, 16, 8, 512)   0           conv3_block2_out[0][0]           \n",
      "                                                                 conv3_block3_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_out (Activation)   (None, 16, 8, 512)   0           conv3_block3_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_1_conv (Conv2D)    (None, 16, 8, 128)   65664       conv3_block3_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_1_bn (BatchNormali (None, 16, 8, 128)   512         conv3_block4_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_1_relu (Activation (None, 16, 8, 128)   0           conv3_block4_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_2_conv (Conv2D)    (None, 16, 8, 128)   147584      conv3_block4_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_2_bn (BatchNormali (None, 16, 8, 128)   512         conv3_block4_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_2_relu (Activation (None, 16, 8, 128)   0           conv3_block4_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_3_conv (Conv2D)    (None, 16, 8, 512)   66048       conv3_block4_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_3_bn (BatchNormali (None, 16, 8, 512)   2048        conv3_block4_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_add (Add)          (None, 16, 8, 512)   0           conv3_block3_out[0][0]           \n",
      "                                                                 conv3_block4_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_out (Activation)   (None, 16, 8, 512)   0           conv3_block4_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_1_conv (Conv2D)    (None, 8, 4, 256)    131328      conv3_block4_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_1_bn (BatchNormali (None, 8, 4, 256)    1024        conv4_block1_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_1_relu (Activation (None, 8, 4, 256)    0           conv4_block1_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_2_conv (Conv2D)    (None, 8, 4, 256)    590080      conv4_block1_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_2_bn (BatchNormali (None, 8, 4, 256)    1024        conv4_block1_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_2_relu (Activation (None, 8, 4, 256)    0           conv4_block1_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_0_conv (Conv2D)    (None, 8, 4, 1024)   525312      conv3_block4_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_3_conv (Conv2D)    (None, 8, 4, 1024)   263168      conv4_block1_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_0_bn (BatchNormali (None, 8, 4, 1024)   4096        conv4_block1_0_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_3_bn (BatchNormali (None, 8, 4, 1024)   4096        conv4_block1_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_add (Add)          (None, 8, 4, 1024)   0           conv4_block1_0_bn[0][0]          \n",
      "                                                                 conv4_block1_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_out (Activation)   (None, 8, 4, 1024)   0           conv4_block1_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_1_conv (Conv2D)    (None, 8, 4, 256)    262400      conv4_block1_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_1_bn (BatchNormali (None, 8, 4, 256)    1024        conv4_block2_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_1_relu (Activation (None, 8, 4, 256)    0           conv4_block2_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_2_conv (Conv2D)    (None, 8, 4, 256)    590080      conv4_block2_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_2_bn (BatchNormali (None, 8, 4, 256)    1024        conv4_block2_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_2_relu (Activation (None, 8, 4, 256)    0           conv4_block2_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_3_conv (Conv2D)    (None, 8, 4, 1024)   263168      conv4_block2_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_3_bn (BatchNormali (None, 8, 4, 1024)   4096        conv4_block2_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_add (Add)          (None, 8, 4, 1024)   0           conv4_block1_out[0][0]           \n",
      "                                                                 conv4_block2_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_out (Activation)   (None, 8, 4, 1024)   0           conv4_block2_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_1_conv (Conv2D)    (None, 8, 4, 256)    262400      conv4_block2_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_1_bn (BatchNormali (None, 8, 4, 256)    1024        conv4_block3_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_1_relu (Activation (None, 8, 4, 256)    0           conv4_block3_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_2_conv (Conv2D)    (None, 8, 4, 256)    590080      conv4_block3_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_2_bn (BatchNormali (None, 8, 4, 256)    1024        conv4_block3_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_2_relu (Activation (None, 8, 4, 256)    0           conv4_block3_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_3_conv (Conv2D)    (None, 8, 4, 1024)   263168      conv4_block3_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_3_bn (BatchNormali (None, 8, 4, 1024)   4096        conv4_block3_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_add (Add)          (None, 8, 4, 1024)   0           conv4_block2_out[0][0]           \n",
      "                                                                 conv4_block3_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_out (Activation)   (None, 8, 4, 1024)   0           conv4_block3_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_1_conv (Conv2D)    (None, 8, 4, 256)    262400      conv4_block3_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_1_bn (BatchNormali (None, 8, 4, 256)    1024        conv4_block4_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_1_relu (Activation (None, 8, 4, 256)    0           conv4_block4_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_2_conv (Conv2D)    (None, 8, 4, 256)    590080      conv4_block4_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_2_bn (BatchNormali (None, 8, 4, 256)    1024        conv4_block4_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_2_relu (Activation (None, 8, 4, 256)    0           conv4_block4_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_3_conv (Conv2D)    (None, 8, 4, 1024)   263168      conv4_block4_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_3_bn (BatchNormali (None, 8, 4, 1024)   4096        conv4_block4_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_add (Add)          (None, 8, 4, 1024)   0           conv4_block3_out[0][0]           \n",
      "                                                                 conv4_block4_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_out (Activation)   (None, 8, 4, 1024)   0           conv4_block4_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_1_conv (Conv2D)    (None, 8, 4, 256)    262400      conv4_block4_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_1_bn (BatchNormali (None, 8, 4, 256)    1024        conv4_block5_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_1_relu (Activation (None, 8, 4, 256)    0           conv4_block5_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_2_conv (Conv2D)    (None, 8, 4, 256)    590080      conv4_block5_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_2_bn (BatchNormali (None, 8, 4, 256)    1024        conv4_block5_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_2_relu (Activation (None, 8, 4, 256)    0           conv4_block5_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_3_conv (Conv2D)    (None, 8, 4, 1024)   263168      conv4_block5_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_3_bn (BatchNormali (None, 8, 4, 1024)   4096        conv4_block5_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_add (Add)          (None, 8, 4, 1024)   0           conv4_block4_out[0][0]           \n",
      "                                                                 conv4_block5_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_out (Activation)   (None, 8, 4, 1024)   0           conv4_block5_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_1_conv (Conv2D)    (None, 8, 4, 256)    262400      conv4_block5_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_1_bn (BatchNormali (None, 8, 4, 256)    1024        conv4_block6_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_1_relu (Activation (None, 8, 4, 256)    0           conv4_block6_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_2_conv (Conv2D)    (None, 8, 4, 256)    590080      conv4_block6_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_2_bn (BatchNormali (None, 8, 4, 256)    1024        conv4_block6_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_2_relu (Activation (None, 8, 4, 256)    0           conv4_block6_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_3_conv (Conv2D)    (None, 8, 4, 1024)   263168      conv4_block6_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_3_bn (BatchNormali (None, 8, 4, 1024)   4096        conv4_block6_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_add (Add)          (None, 8, 4, 1024)   0           conv4_block5_out[0][0]           \n",
      "                                                                 conv4_block6_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_out (Activation)   (None, 8, 4, 1024)   0           conv4_block6_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_1_conv (Conv2D)    (None, 4, 2, 512)    524800      conv4_block6_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_1_bn (BatchNormali (None, 4, 2, 512)    2048        conv5_block1_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_1_relu (Activation (None, 4, 2, 512)    0           conv5_block1_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_2_conv (Conv2D)    (None, 4, 2, 512)    2359808     conv5_block1_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_2_bn (BatchNormali (None, 4, 2, 512)    2048        conv5_block1_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_2_relu (Activation (None, 4, 2, 512)    0           conv5_block1_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_0_conv (Conv2D)    (None, 4, 2, 2048)   2099200     conv4_block6_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_3_conv (Conv2D)    (None, 4, 2, 2048)   1050624     conv5_block1_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_0_bn (BatchNormali (None, 4, 2, 2048)   8192        conv5_block1_0_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_3_bn (BatchNormali (None, 4, 2, 2048)   8192        conv5_block1_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_add (Add)          (None, 4, 2, 2048)   0           conv5_block1_0_bn[0][0]          \n",
      "                                                                 conv5_block1_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_out (Activation)   (None, 4, 2, 2048)   0           conv5_block1_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_1_conv (Conv2D)    (None, 4, 2, 512)    1049088     conv5_block1_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_1_bn (BatchNormali (None, 4, 2, 512)    2048        conv5_block2_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_1_relu (Activation (None, 4, 2, 512)    0           conv5_block2_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_2_conv (Conv2D)    (None, 4, 2, 512)    2359808     conv5_block2_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_2_bn (BatchNormali (None, 4, 2, 512)    2048        conv5_block2_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_2_relu (Activation (None, 4, 2, 512)    0           conv5_block2_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_3_conv (Conv2D)    (None, 4, 2, 2048)   1050624     conv5_block2_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_3_bn (BatchNormali (None, 4, 2, 2048)   8192        conv5_block2_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_add (Add)          (None, 4, 2, 2048)   0           conv5_block1_out[0][0]           \n",
      "                                                                 conv5_block2_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_out (Activation)   (None, 4, 2, 2048)   0           conv5_block2_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_1_conv (Conv2D)    (None, 4, 2, 512)    1049088     conv5_block2_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_1_bn (BatchNormali (None, 4, 2, 512)    2048        conv5_block3_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_1_relu (Activation (None, 4, 2, 512)    0           conv5_block3_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_2_conv (Conv2D)    (None, 4, 2, 512)    2359808     conv5_block3_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_2_bn (BatchNormali (None, 4, 2, 512)    2048        conv5_block3_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_2_relu (Activation (None, 4, 2, 512)    0           conv5_block3_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_3_conv (Conv2D)    (None, 4, 2, 2048)   1050624     conv5_block3_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_3_bn (BatchNormali (None, 4, 2, 2048)   8192        conv5_block3_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_add (Add)          (None, 4, 2, 2048)   0           conv5_block2_out[0][0]           \n",
      "                                                                 conv5_block3_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_out (Activation)   (None, 4, 2, 2048)   0           conv5_block3_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "max_pool (GlobalMaxPooling2D)   (None, 2048)         0           conv5_block3_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "dense_1 (Dense)                 (None, 751)          1538799     max_pool[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "triplet (Lambda)                (None, 2048)         0           max_pool[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "softmax (Activation)            (None, 751)          0           dense_1[0][0]                    \n",
      "==================================================================================================\n",
      "Total params: 25,126,511\n",
      "Trainable params: 25,073,391\n",
      "Non-trainable params: 53,120\n",
      "__________________________________________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "from tensorflow.keras.callbacks import ModelCheckpoint \n",
    "from tensorflow.keras.applications.mobilenet_v2 import  MobileNetV2\n",
    "from tensorflow.keras.applications.resnet50 import ResNet50\n",
    "import tensorflow as tf\n",
    "from tensorflow.keras.models import Model\n",
    "from tensorflow.keras.optimizers import SGD, Adam, RMSprop\n",
    "from tensorflow.keras.layers import Dense, Activation, Input, concatenate, Lambda\n",
    "from tensorflow.keras.losses import categorical_crossentropy\n",
    "import tensorflow_addons as tfa\n",
    "from keras import backend as K\n",
    "\n",
    "triplet_semi_hard_loss = tfa.losses.TripletSemiHardLoss(margin=0.3)\n",
    "# 定义CNN model\n",
    "# cnn_model = MobileNetV2(include_top=False, weights='imagenet', alpha=0.5,\n",
    "#                         input_shape=(img_height, img_width, 3), pooling='max')  # include_top指全连接层的输出\n",
    "cnn_model = ResNet50(include_top=False, weights='imagenet',\n",
    "                     input_shape=(img_height, img_width, 3), pooling='max')  \n",
    "\n",
    "global_pool = cnn_model.layers[-1].output\n",
    "dense_normalized = Lambda(lambda x: K.l2_normalize(x, axis=1), name='triplet')(global_pool) # 自定义了层过后只能保存model_weights\n",
    "dense = Dense(nbr_person_ids)(global_pool)\n",
    "softmax_output = Activation('softmax', name='softmax')(dense)\n",
    "semi_hard_triplet_model = Model(cnn_model.input, outputs=[dense_normalized, softmax_output])\n",
    "\n",
    "semi_hard_triplet_model.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [],
   "source": [
    "# optimizer = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)\n",
    "optimizer = SGD(lr=0.01)\n",
    "# optimizer = RMSprop(lr=0.01)  # RMSprop的学习率不能设为0.01太大了\n",
    "# optimizer = Adam(lr=0.01)  # 学习率设置多少合适\n",
    "# 标签平滑，减少过拟合\n",
    "def crossentropy_label_smoothing(y_true, y_pred):\n",
    "    label_smoothing= 0.1\n",
    "    return categorical_crossentropy(y_true, y_pred, label_smoothing=label_smoothing)\n",
    "\n",
    "USE_Label_Smoothing = True\n",
    "if USE_Label_Smoothing:\n",
    "    # 注意crossentropy_label_smoothing, triplet_loss的量纲需要根据打印的日志调节到一个量纲\n",
    "    semi_hard_triplet_model.compile(loss=[triplet_semi_hard_loss, crossentropy_label_smoothing],\n",
    "                                    loss_weights=[5, 1],  # triplet_loss 权重设太大网络不收敛？\n",
    "                                    optimizer=optimizer,\n",
    "                                    metrics=['accuracy'])\n",
    "else:\n",
    "    semi_hard_triplet_model.compile(loss=[triplet_semi_hard_loss, 'categorical_crossentropy'],\n",
    "                                    loss_weights=[5, 1],\n",
    "                                    optimizer=optimizer,\n",
    "                                    metrics=['accuracy'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "train_generator = generator_batch(img_path_list=train_img_path,\n",
    "                                            img_label_list=train_person_ids,\n",
    "                                            nbr_classes=nbr_person_ids,\n",
    "                                            img_width=img_width,\n",
    "                                            img_height=img_height,\n",
    "                                            batch_size=batch_size,\n",
    "                                            shuffle=True,\n",
    "                                            augment=True)\n",
    "\n",
    "val_generator = generator_batch(img_path_list=val_img_path,\n",
    "                                          img_label_list=val_person_ids,\n",
    "                                          nbr_classes=nbr_person_ids,\n",
    "                                          img_width=img_width,\n",
    "                                          img_height=img_height,\n",
    "                                          batch_size=batch_size,\n",
    "                                          shuffle=False,\n",
    "                                          augment=False)\n",
    "checkpoint = ModelCheckpoint('/kaggle/working/semi_hard_triplet_model-{epoch:02d}-{val_softmax_accuracy:.3f}.h5',\n",
    "                             monitor='val_softmax_accuracy',\n",
    "                             verbose=1, save_best_only=False)\n",
    "# 使用keras搭建模型，训练时验证集上val_acc达到1了，但在测试数据集上面模型还没有完全收敛。\n",
    "# 由于在ModelCheckpoint的参数设置时设置了仅保留最佳模型，如果模型没有收敛的话会导致无法保存最新的更好的模型。"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 9.2577 - triplet_loss: 0.2859 - softmax_loss: 7.8283 - triplet_accuracy: 5.7982e-04 - softmax_accuracy: 0.0118\n",
      "Epoch 00001: saving model to /kaggle/working/semi_hard_triplet_model-01-0.002.h5\n",
      "162/162 [==============================] - 45s 280ms/step - loss: 9.2577 - triplet_loss: 0.2859 - softmax_loss: 7.8283 - triplet_accuracy: 5.7982e-04 - softmax_accuracy: 0.0118 - val_loss: 8.1131 - val_triplet_loss: 0.2936 - val_softmax_loss: 6.6452 - val_triplet_accuracy: 0.0023 - val_softmax_accuracy: 0.0023\n",
      "Epoch 2/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 7.7677 - triplet_loss: 0.2745 - softmax_loss: 6.3952 - triplet_accuracy: 2.8991e-04 - softmax_accuracy: 0.0396- ETA: 0s - loss: 7.7744 - triplet_loss: 0.2744 - softmax_loss: 6.4026 - triplet_accuracy: 2.9481e-04 - softmax_accuracy: 0.0\n",
      "Epoch 00002: saving model to /kaggle/working/semi_hard_triplet_model-02-0.003.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 7.7677 - triplet_loss: 0.2745 - softmax_loss: 6.3952 - triplet_accuracy: 2.8991e-04 - softmax_accuracy: 0.0396 - val_loss: 8.0784 - val_triplet_loss: 0.2892 - val_softmax_loss: 6.6324 - val_triplet_accuracy: 0.0039 - val_softmax_accuracy: 0.0031\n",
      "Epoch 3/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 7.1081 - triplet_loss: 0.2702 - softmax_loss: 5.7572 - triplet_accuracy: 9.6637e-04 - softmax_accuracy: 0.0894\n",
      "Epoch 00003: saving model to /kaggle/working/semi_hard_triplet_model-03-0.010.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 7.1081 - triplet_loss: 0.2702 - softmax_loss: 5.7572 - triplet_accuracy: 9.6637e-04 - softmax_accuracy: 0.0894 - val_loss: 7.9180 - val_triplet_loss: 0.2753 - val_softmax_loss: 6.5414 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.0104\n",
      "Epoch 4/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 6.4465 - triplet_loss: 0.2639 - softmax_loss: 5.1271 - triplet_accuracy: 9.6637e-04 - softmax_accuracy: 0.1552\n",
      "Epoch 00004: saving model to /kaggle/working/semi_hard_triplet_model-04-0.059.h5\n",
      "162/162 [==============================] - 32s 199ms/step - loss: 6.4465 - triplet_loss: 0.2639 - softmax_loss: 5.1271 - triplet_accuracy: 9.6637e-04 - softmax_accuracy: 0.1552 - val_loss: 7.1374 - val_triplet_loss: 0.2636 - val_softmax_loss: 5.8193 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.0591\n",
      "Epoch 5/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 5.8142 - triplet_loss: 0.2592 - softmax_loss: 4.5183 - triplet_accuracy: 0.0017 - softmax_accuracy: 0.2413\n",
      "Epoch 00005: saving model to /kaggle/working/semi_hard_triplet_model-05-0.206.h5\n",
      "162/162 [==============================] - 30s 188ms/step - loss: 5.8142 - triplet_loss: 0.2592 - softmax_loss: 4.5183 - triplet_accuracy: 0.0017 - softmax_accuracy: 0.2413 - val_loss: 5.9762 - val_triplet_loss: 0.2575 - val_softmax_loss: 4.6885 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.2060\n",
      "Epoch 6/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 5.2841 - triplet_loss: 0.2546 - softmax_loss: 4.0110 - triplet_accuracy: 8.6973e-04 - softmax_accuracy: 0.3242\n",
      "Epoch 00006: saving model to /kaggle/working/semi_hard_triplet_model-06-0.374.h5\n",
      "162/162 [==============================] - 31s 191ms/step - loss: 5.2841 - triplet_loss: 0.2546 - softmax_loss: 4.0110 - triplet_accuracy: 8.6973e-04 - softmax_accuracy: 0.3242 - val_loss: 4.9875 - val_triplet_loss: 0.2511 - val_softmax_loss: 3.7322 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.3736\n",
      "Epoch 7/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 4.8234 - triplet_loss: 0.2528 - softmax_loss: 3.5595 - triplet_accuracy: 0.0013 - softmax_accuracy: 0.4095\n",
      "Epoch 00007: saving model to /kaggle/working/semi_hard_triplet_model-07-0.442.h5\n",
      "162/162 [==============================] - 31s 191ms/step - loss: 4.8234 - triplet_loss: 0.2528 - softmax_loss: 3.5595 - triplet_accuracy: 0.0013 - softmax_accuracy: 0.4095 - val_loss: 4.6503 - val_triplet_loss: 0.2462 - val_softmax_loss: 3.4193 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.4417\n",
      "Epoch 8/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 4.3670 - triplet_loss: 0.2431 - softmax_loss: 3.1517 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.4886\n",
      "Epoch 00008: saving model to /kaggle/working/semi_hard_triplet_model-08-0.508.h5\n",
      "162/162 [==============================] - 32s 196ms/step - loss: 4.3670 - triplet_loss: 0.2431 - softmax_loss: 3.1517 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.4886 - val_loss: 4.2922 - val_triplet_loss: 0.2449 - val_softmax_loss: 3.0674 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.5081\n",
      "Epoch 9/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 4.1118 - triplet_loss: 0.2414 - softmax_loss: 2.9049 - triplet_accuracy: 0.0012 - softmax_accuracy: 0.5508\n",
      "Epoch 00009: saving model to /kaggle/working/semi_hard_triplet_model-09-0.573.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 4.1118 - triplet_loss: 0.2414 - softmax_loss: 2.9049 - triplet_accuracy: 0.0012 - softmax_accuracy: 0.5508 - val_loss: 4.0714 - val_triplet_loss: 0.2404 - val_softmax_loss: 2.8696 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.5734\n",
      "Epoch 10/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 3.8561 - triplet_loss: 0.2396 - softmax_loss: 2.6580 - triplet_accuracy: 0.0012 - softmax_accuracy: 0.6111\n",
      "Epoch 00010: saving model to /kaggle/working/semi_hard_triplet_model-10-0.605.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 3.8561 - triplet_loss: 0.2396 - softmax_loss: 2.6580 - triplet_accuracy: 0.0012 - softmax_accuracy: 0.6111 - val_loss: 4.0288 - val_triplet_loss: 0.2404 - val_softmax_loss: 2.8267 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.6051\n",
      "Epoch 11/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 3.6657 - triplet_loss: 0.2329 - softmax_loss: 2.5012 - triplet_accuracy: 5.7982e-04 - softmax_accuracy: 0.6530\n",
      "Epoch 00011: saving model to /kaggle/working/semi_hard_triplet_model-11-0.638.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 3.6657 - triplet_loss: 0.2329 - softmax_loss: 2.5012 - triplet_accuracy: 5.7982e-04 - softmax_accuracy: 0.6530 - val_loss: 3.8723 - val_triplet_loss: 0.2380 - val_softmax_loss: 2.6824 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.6383\n",
      "Epoch 12/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 3.5169 - triplet_loss: 0.2283 - softmax_loss: 2.3752 - triplet_accuracy: 5.7982e-04 - softmax_accuracy: 0.6824\n",
      "Epoch 00012: saving model to /kaggle/working/semi_hard_triplet_model-12-0.637.h5\n",
      "162/162 [==============================] - 30s 188ms/step - loss: 3.5169 - triplet_loss: 0.2283 - softmax_loss: 2.3752 - triplet_accuracy: 5.7982e-04 - softmax_accuracy: 0.6824 - val_loss: 3.7341 - val_triplet_loss: 0.2397 - val_softmax_loss: 2.5356 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.6368\n",
      "Epoch 13/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 3.3550 - triplet_loss: 0.2235 - softmax_loss: 2.2374 - triplet_accuracy: 8.6973e-04 - softmax_accuracy: 0.7177\n",
      "Epoch 00013: saving model to /kaggle/working/semi_hard_triplet_model-13-0.665.h5\n",
      "162/162 [==============================] - 32s 198ms/step - loss: 3.3550 - triplet_loss: 0.2235 - softmax_loss: 2.2374 - triplet_accuracy: 8.6973e-04 - softmax_accuracy: 0.7177 - val_loss: 3.7685 - val_triplet_loss: 0.2383 - val_softmax_loss: 2.5768 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.6650\n",
      "Epoch 14/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 3.2511 - triplet_loss: 0.2211 - softmax_loss: 2.1457 - triplet_accuracy: 7.7310e-04 - softmax_accuracy: 0.7512\n",
      "Epoch 00014: saving model to /kaggle/working/semi_hard_triplet_model-14-0.684.h5\n",
      "162/162 [==============================] - 31s 192ms/step - loss: 3.2511 - triplet_loss: 0.2211 - softmax_loss: 2.1457 - triplet_accuracy: 7.7310e-04 - softmax_accuracy: 0.7512 - val_loss: 3.5805 - val_triplet_loss: 0.2364 - val_softmax_loss: 2.3984 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.6839\n",
      "Epoch 15/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 3.1359 - triplet_loss: 0.2157 - softmax_loss: 2.0576 - triplet_accuracy: 7.7310e-04 - softmax_accuracy: 0.7665\n",
      "Epoch 00015: saving model to /kaggle/working/semi_hard_triplet_model-15-0.678.h5\n",
      "162/162 [==============================] - 32s 196ms/step - loss: 3.1359 - triplet_loss: 0.2157 - softmax_loss: 2.0576 - triplet_accuracy: 7.7310e-04 - softmax_accuracy: 0.7665 - val_loss: 3.6861 - val_triplet_loss: 0.2387 - val_softmax_loss: 2.4924 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.6781\n",
      "Epoch 16/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 3.0470 - triplet_loss: 0.2124 - softmax_loss: 1.9852 - triplet_accuracy: 7.7310e-04 - softmax_accuracy: 0.7907\n",
      "Epoch 00016: saving model to /kaggle/working/semi_hard_triplet_model-16-0.723.h5\n",
      "162/162 [==============================] - 30s 187ms/step - loss: 3.0470 - triplet_loss: 0.2124 - softmax_loss: 1.9852 - triplet_accuracy: 7.7310e-04 - softmax_accuracy: 0.7907 - val_loss: 3.4032 - val_triplet_loss: 0.2354 - val_softmax_loss: 2.2265 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.7230\n",
      "Epoch 17/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.9846 - triplet_loss: 0.2091 - softmax_loss: 1.9390 - triplet_accuracy: 0.0012 - softmax_accuracy: 0.8042\n",
      "Epoch 00017: saving model to /kaggle/working/semi_hard_triplet_model-17-0.720.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 2.9846 - triplet_loss: 0.2091 - softmax_loss: 1.9390 - triplet_accuracy: 0.0012 - softmax_accuracy: 0.8042 - val_loss: 3.4095 - val_triplet_loss: 0.2334 - val_softmax_loss: 2.2427 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.7202\n",
      "Epoch 18/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.8989 - triplet_loss: 0.2032 - softmax_loss: 1.8828 - triplet_accuracy: 0.0014 - softmax_accuracy: 0.8169\n",
      "Epoch 00018: saving model to /kaggle/working/semi_hard_triplet_model-18-0.718.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 2.8989 - triplet_loss: 0.2032 - softmax_loss: 1.8828 - triplet_accuracy: 0.0014 - softmax_accuracy: 0.8169 - val_loss: 3.3772 - val_triplet_loss: 0.2339 - val_softmax_loss: 2.2079 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.7179\n",
      "Epoch 19/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.8345 - triplet_loss: 0.1974 - softmax_loss: 1.8474 - triplet_accuracy: 0.0011 - softmax_accuracy: 0.8259- ETA: 6s - loss: 2.8486 - triplet_loss: 0.1996 - softmax_loss: 1.8504 - triplet_accur\n",
      "Epoch 00019: saving model to /kaggle/working/semi_hard_triplet_model-19-0.766.h5\n",
      "162/162 [==============================] - 32s 196ms/step - loss: 2.8345 - triplet_loss: 0.1974 - softmax_loss: 1.8474 - triplet_accuracy: 0.0011 - softmax_accuracy: 0.8259 - val_loss: 3.2040 - val_triplet_loss: 0.2280 - val_softmax_loss: 2.0639 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.7662\n",
      "Epoch 20/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.7714 - triplet_loss: 0.1950 - softmax_loss: 1.7964 - triplet_accuracy: 7.7310e-04 - softmax_accuracy: 0.8418\n",
      "Epoch 00020: saving model to /kaggle/working/semi_hard_triplet_model-20-0.752.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 2.7714 - triplet_loss: 0.1950 - softmax_loss: 1.7964 - triplet_accuracy: 7.7310e-04 - softmax_accuracy: 0.8418 - val_loss: 3.2380 - val_triplet_loss: 0.2277 - val_softmax_loss: 2.0995 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.7515\n",
      "Epoch 21/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.7191 - triplet_loss: 0.1908 - softmax_loss: 1.7650 - triplet_accuracy: 5.7982e-04 - softmax_accuracy: 0.8492\n",
      "Epoch 00021: saving model to /kaggle/working/semi_hard_triplet_model-21-0.770.h5\n",
      "162/162 [==============================] - 32s 199ms/step - loss: 2.7191 - triplet_loss: 0.1908 - softmax_loss: 1.7650 - triplet_accuracy: 5.7982e-04 - softmax_accuracy: 0.8492 - val_loss: 3.1875 - val_triplet_loss: 0.2263 - val_softmax_loss: 2.0561 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.7697\n",
      "Epoch 22/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.6743 - triplet_loss: 0.1865 - softmax_loss: 1.7419 - triplet_accuracy: 5.7982e-04 - softmax_accuracy: 0.8603\n",
      "Epoch 00022: saving model to /kaggle/working/semi_hard_triplet_model-22-0.767.h5\n",
      "162/162 [==============================] - 31s 192ms/step - loss: 2.6743 - triplet_loss: 0.1865 - softmax_loss: 1.7419 - triplet_accuracy: 5.7982e-04 - softmax_accuracy: 0.8603 - val_loss: 3.1712 - val_triplet_loss: 0.2246 - val_softmax_loss: 2.0483 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.7666\n",
      "Epoch 23/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.6032 - triplet_loss: 0.1816 - softmax_loss: 1.6951 - triplet_accuracy: 0.0011 - softmax_accuracy: 0.8774\n",
      "Epoch 00023: saving model to /kaggle/working/semi_hard_triplet_model-23-0.766.h5\n",
      "162/162 [==============================] - 32s 197ms/step - loss: 2.6032 - triplet_loss: 0.1816 - softmax_loss: 1.6951 - triplet_accuracy: 0.0011 - softmax_accuracy: 0.8774 - val_loss: 3.1856 - val_triplet_loss: 0.2250 - val_softmax_loss: 2.0608 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.7662\n",
      "Epoch 24/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.5336 - triplet_loss: 0.1732 - softmax_loss: 1.6677 - triplet_accuracy: 9.6637e-04 - softmax_accuracy: 0.8819\n",
      "Epoch 00024: saving model to /kaggle/working/semi_hard_triplet_model-24-0.794.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 2.5336 - triplet_loss: 0.1732 - softmax_loss: 1.6677 - triplet_accuracy: 9.6637e-04 - softmax_accuracy: 0.8819 - val_loss: 3.0633 - val_triplet_loss: 0.2216 - val_softmax_loss: 1.9553 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.7940\n",
      "Epoch 25/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.5355 - triplet_loss: 0.1721 - softmax_loss: 1.6750 - triplet_accuracy: 0.0017 - softmax_accuracy: 0.8756\n",
      "Epoch 00025: saving model to /kaggle/working/semi_hard_triplet_model-25-0.756.h5\n",
      "162/162 [==============================] - 32s 196ms/step - loss: 2.5355 - triplet_loss: 0.1721 - softmax_loss: 1.6750 - triplet_accuracy: 0.0017 - softmax_accuracy: 0.8756 - val_loss: 3.2392 - val_triplet_loss: 0.2258 - val_softmax_loss: 2.1103 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.7558\n",
      "Epoch 26/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.4640 - triplet_loss: 0.1670 - softmax_loss: 1.6291 - triplet_accuracy: 0.0015 - softmax_accuracy: 0.8903\n",
      "Epoch 00026: saving model to /kaggle/working/semi_hard_triplet_model-26-0.706.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 2.4640 - triplet_loss: 0.1670 - softmax_loss: 1.6291 - triplet_accuracy: 0.0015 - softmax_accuracy: 0.8903 - val_loss: 3.4164 - val_triplet_loss: 0.2270 - val_softmax_loss: 2.2816 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.7063\n",
      "Epoch 27/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.4386 - triplet_loss: 0.1645 - softmax_loss: 1.6163 - triplet_accuracy: 0.0014 - softmax_accuracy: 0.8941\n",
      "Epoch 00027: saving model to /kaggle/working/semi_hard_triplet_model-27-0.788.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 2.4386 - triplet_loss: 0.1645 - softmax_loss: 1.6163 - triplet_accuracy: 0.0014 - softmax_accuracy: 0.8941 - val_loss: 3.0817 - val_triplet_loss: 0.2232 - val_softmax_loss: 1.9658 - val_triplet_accuracy: 0.0019 - val_softmax_accuracy: 0.7879\n",
      "Epoch 28/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.3944 - triplet_loss: 0.1588 - softmax_loss: 1.6006 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.8977\n",
      "Epoch 00028: saving model to /kaggle/working/semi_hard_triplet_model-28-0.793.h5\n",
      "162/162 [==============================] - 31s 193ms/step - loss: 2.3944 - triplet_loss: 0.1588 - softmax_loss: 1.6006 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.8977 - val_loss: 3.0258 - val_triplet_loss: 0.2180 - val_softmax_loss: 1.9357 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.7929\n",
      "Epoch 29/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.3526 - triplet_loss: 0.1540 - softmax_loss: 1.5826 - triplet_accuracy: 0.0017 - softmax_accuracy: 0.9047\n",
      "Epoch 00029: saving model to /kaggle/working/semi_hard_triplet_model-29-0.783.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 2.3526 - triplet_loss: 0.1540 - softmax_loss: 1.5826 - triplet_accuracy: 0.0017 - softmax_accuracy: 0.9047 - val_loss: 3.0555 - val_triplet_loss: 0.2211 - val_softmax_loss: 1.9500 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.7832\n",
      "Epoch 30/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.3437 - triplet_loss: 0.1538 - softmax_loss: 1.5746 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9075\n",
      "Epoch 00030: saving model to /kaggle/working/semi_hard_triplet_model-30-0.779.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 2.3437 - triplet_loss: 0.1538 - softmax_loss: 1.5746 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9075 - val_loss: 3.1272 - val_triplet_loss: 0.2220 - val_softmax_loss: 2.0174 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.7790\n",
      "Epoch 31/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.2699 - triplet_loss: 0.1440 - softmax_loss: 1.5500 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9141\n",
      "Epoch 00031: saving model to /kaggle/working/semi_hard_triplet_model-31-0.796.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 2.2699 - triplet_loss: 0.1440 - softmax_loss: 1.5500 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9141 - val_loss: 3.0116 - val_triplet_loss: 0.2192 - val_softmax_loss: 1.9156 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.7956\n",
      "Epoch 32/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.2358 - triplet_loss: 0.1400 - softmax_loss: 1.5356 - triplet_accuracy: 0.0015 - softmax_accuracy: 0.9155\n",
      "Epoch 00032: saving model to /kaggle/working/semi_hard_triplet_model-32-0.795.h5\n",
      "162/162 [==============================] - 31s 193ms/step - loss: 2.2358 - triplet_loss: 0.1400 - softmax_loss: 1.5356 - triplet_accuracy: 0.0015 - softmax_accuracy: 0.9155 - val_loss: 3.0289 - val_triplet_loss: 0.2195 - val_softmax_loss: 1.9315 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.7952\n",
      "Epoch 33/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.1995 - triplet_loss: 0.1341 - softmax_loss: 1.5289 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9143\n",
      "Epoch 00033: saving model to /kaggle/working/semi_hard_triplet_model-33-0.787.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 2.1995 - triplet_loss: 0.1341 - softmax_loss: 1.5289 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9143 - val_loss: 3.0408 - val_triplet_loss: 0.2201 - val_softmax_loss: 1.9403 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.7875\n",
      "Epoch 34/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.1844 - triplet_loss: 0.1334 - softmax_loss: 1.5176 - triplet_accuracy: 0.0017 - softmax_accuracy: 0.9200\n",
      "Epoch 00034: saving model to /kaggle/working/semi_hard_triplet_model-34-0.791.h5\n",
      "162/162 [==============================] - 32s 198ms/step - loss: 2.1844 - triplet_loss: 0.1334 - softmax_loss: 1.5176 - triplet_accuracy: 0.0017 - softmax_accuracy: 0.9200 - val_loss: 3.0358 - val_triplet_loss: 0.2180 - val_softmax_loss: 1.9460 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.7910\n",
      "Epoch 35/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.1153 - triplet_loss: 0.1218 - softmax_loss: 1.5062 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9238\n",
      "Epoch 00035: saving model to /kaggle/working/semi_hard_triplet_model-35-0.806.h5\n",
      "162/162 [==============================] - 31s 191ms/step - loss: 2.1153 - triplet_loss: 0.1218 - softmax_loss: 1.5062 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9238 - val_loss: 2.9515 - val_triplet_loss: 0.2162 - val_softmax_loss: 1.8703 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8064\n",
      "Epoch 36/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.0885 - triplet_loss: 0.1210 - softmax_loss: 1.4837 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9300\n",
      "Epoch 00036: saving model to /kaggle/working/semi_hard_triplet_model-36-0.800.h5\n",
      "162/162 [==============================] - 31s 193ms/step - loss: 2.0885 - triplet_loss: 0.1210 - softmax_loss: 1.4837 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9300 - val_loss: 2.9479 - val_triplet_loss: 0.2133 - val_softmax_loss: 1.8813 - val_triplet_accuracy: 0.0023 - val_softmax_accuracy: 0.8002\n",
      "Epoch 37/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.0778 - triplet_loss: 0.1185 - softmax_loss: 1.4855 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9285\n",
      "Epoch 00037: saving model to /kaggle/working/semi_hard_triplet_model-37-0.778.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 2.0778 - triplet_loss: 0.1185 - softmax_loss: 1.4855 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9285 - val_loss: 3.0936 - val_triplet_loss: 0.2162 - val_softmax_loss: 2.0127 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.7782\n",
      "Epoch 38/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.0410 - triplet_loss: 0.1140 - softmax_loss: 1.4711 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9328\n",
      "Epoch 00038: saving model to /kaggle/working/semi_hard_triplet_model-38-0.768.h5\n",
      "162/162 [==============================] - 32s 199ms/step - loss: 2.0410 - triplet_loss: 0.1140 - softmax_loss: 1.4711 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9328 - val_loss: 3.1540 - val_triplet_loss: 0.2231 - val_softmax_loss: 2.0384 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.7682\n",
      "Epoch 39/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 2.0154 - triplet_loss: 0.1100 - softmax_loss: 1.4653 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9335\n",
      "Epoch 00039: saving model to /kaggle/working/semi_hard_triplet_model-39-0.820.h5\n",
      "162/162 [==============================] - 31s 191ms/step - loss: 2.0154 - triplet_loss: 0.1100 - softmax_loss: 1.4653 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9335 - val_loss: 2.8907 - val_triplet_loss: 0.2119 - val_softmax_loss: 1.8311 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8199\n",
      "Epoch 40/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.9807 - triplet_loss: 0.1062 - softmax_loss: 1.4498 - triplet_accuracy: 0.0014 - softmax_accuracy: 0.9376\n",
      "Epoch 00040: saving model to /kaggle/working/semi_hard_triplet_model-40-0.808.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.9807 - triplet_loss: 0.1062 - softmax_loss: 1.4498 - triplet_accuracy: 0.0014 - softmax_accuracy: 0.9376 - val_loss: 2.9223 - val_triplet_loss: 0.2104 - val_softmax_loss: 1.8705 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8080\n",
      "Epoch 41/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.9283 - triplet_loss: 0.0978 - softmax_loss: 1.4391 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9380\n",
      "Epoch 00041: saving model to /kaggle/working/semi_hard_triplet_model-41-0.813.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 1.9283 - triplet_loss: 0.0978 - softmax_loss: 1.4391 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9380 - val_loss: 2.8889 - val_triplet_loss: 0.2091 - val_softmax_loss: 1.8436 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8130\n",
      "Epoch 42/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.9015 - triplet_loss: 0.0949 - softmax_loss: 1.4272 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9436\n",
      "Epoch 00042: saving model to /kaggle/working/semi_hard_triplet_model-42-0.806.h5\n",
      "162/162 [==============================] - 33s 201ms/step - loss: 1.9015 - triplet_loss: 0.0949 - softmax_loss: 1.4272 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9436 - val_loss: 2.9384 - val_triplet_loss: 0.2108 - val_softmax_loss: 1.8842 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8064\n",
      "Epoch 43/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.8867 - triplet_loss: 0.0913 - softmax_loss: 1.4300 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9413\n",
      "Epoch 00043: saving model to /kaggle/working/semi_hard_triplet_model-43-0.811.h5\n",
      "162/162 [==============================] - 31s 192ms/step - loss: 1.8867 - triplet_loss: 0.0913 - softmax_loss: 1.4300 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9413 - val_loss: 2.9138 - val_triplet_loss: 0.2109 - val_softmax_loss: 1.8595 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8107\n",
      "Epoch 44/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.8867 - triplet_loss: 0.0919 - softmax_loss: 1.4270 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9423\n",
      "Epoch 00044: saving model to /kaggle/working/semi_hard_triplet_model-44-0.801.h5\n",
      "162/162 [==============================] - 32s 201ms/step - loss: 1.8867 - triplet_loss: 0.0919 - softmax_loss: 1.4270 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9423 - val_loss: 2.9631 - val_triplet_loss: 0.2095 - val_softmax_loss: 1.9155 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8006\n",
      "Epoch 45/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.8447 - triplet_loss: 0.0859 - softmax_loss: 1.4154 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9451\n",
      "Epoch 00045: saving model to /kaggle/working/semi_hard_triplet_model-45-0.825.h5\n",
      "162/162 [==============================] - 32s 196ms/step - loss: 1.8447 - triplet_loss: 0.0859 - softmax_loss: 1.4154 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9451 - val_loss: 2.8394 - val_triplet_loss: 0.2081 - val_softmax_loss: 1.7991 - val_triplet_accuracy: 0.0019 - val_softmax_accuracy: 0.8253\n",
      "Epoch 46/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.8038 - triplet_loss: 0.0795 - softmax_loss: 1.4064 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9471\n",
      "Epoch 00046: saving model to /kaggle/working/semi_hard_triplet_model-46-0.800.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.8038 - triplet_loss: 0.0795 - softmax_loss: 1.4064 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9471 - val_loss: 2.9606 - val_triplet_loss: 0.2127 - val_softmax_loss: 1.8974 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8002\n",
      "Epoch 47/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.7749 - triplet_loss: 0.0764 - softmax_loss: 1.3930 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9502\n",
      "Epoch 00047: saving model to /kaggle/working/semi_hard_triplet_model-47-0.839.h5\n",
      "162/162 [==============================] - 32s 197ms/step - loss: 1.7749 - triplet_loss: 0.0764 - softmax_loss: 1.3930 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9502 - val_loss: 2.8075 - val_triplet_loss: 0.2078 - val_softmax_loss: 1.7686 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8393\n",
      "Epoch 48/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.7756 - triplet_loss: 0.0755 - softmax_loss: 1.3980 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9494\n",
      "Epoch 00048: saving model to /kaggle/working/semi_hard_triplet_model-48-0.827.h5\n",
      "162/162 [==============================] - 32s 197ms/step - loss: 1.7756 - triplet_loss: 0.0755 - softmax_loss: 1.3980 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9494 - val_loss: 2.8629 - val_triplet_loss: 0.2078 - val_softmax_loss: 1.8239 - val_triplet_accuracy: 0.0023 - val_softmax_accuracy: 0.8269\n",
      "Epoch 49/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.7392 - triplet_loss: 0.0711 - softmax_loss: 1.3837 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9521\n",
      "Epoch 00049: saving model to /kaggle/working/semi_hard_triplet_model-49-0.833.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 1.7392 - triplet_loss: 0.0711 - softmax_loss: 1.3837 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9521 - val_loss: 2.8817 - val_triplet_loss: 0.2129 - val_softmax_loss: 1.8173 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8327\n",
      "Epoch 50/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.7358 - triplet_loss: 0.0713 - softmax_loss: 1.3793 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9535\n",
      "Epoch 00050: saving model to /kaggle/working/semi_hard_triplet_model-50-0.817.h5\n",
      "162/162 [==============================] - 31s 191ms/step - loss: 1.7358 - triplet_loss: 0.0713 - softmax_loss: 1.3793 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9535 - val_loss: 2.9087 - val_triplet_loss: 0.2097 - val_softmax_loss: 1.8602 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8172\n",
      "Epoch 51/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.7030 - triplet_loss: 0.0652 - softmax_loss: 1.3768 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9526\n",
      "Epoch 00051: saving model to /kaggle/working/semi_hard_triplet_model-51-0.822.h5\n",
      "162/162 [==============================] - 31s 191ms/step - loss: 1.7030 - triplet_loss: 0.0652 - softmax_loss: 1.3768 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9526 - val_loss: 2.8403 - val_triplet_loss: 0.2062 - val_softmax_loss: 1.8093 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8223\n",
      "Epoch 52/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.6758 - triplet_loss: 0.0605 - softmax_loss: 1.3736 - triplet_accuracy: 0.0014 - softmax_accuracy: 0.9532\n",
      "Epoch 00052: saving model to /kaggle/working/semi_hard_triplet_model-52-0.798.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 1.6758 - triplet_loss: 0.0605 - softmax_loss: 1.3736 - triplet_accuracy: 0.0014 - softmax_accuracy: 0.9532 - val_loss: 2.9724 - val_triplet_loss: 0.2132 - val_softmax_loss: 1.9065 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.7975\n",
      "Epoch 53/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.6538 - triplet_loss: 0.0595 - softmax_loss: 1.3564 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9587\n",
      "Epoch 00053: saving model to /kaggle/working/semi_hard_triplet_model-53-0.828.h5\n",
      "162/162 [==============================] - 32s 196ms/step - loss: 1.6538 - triplet_loss: 0.0595 - softmax_loss: 1.3564 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9587 - val_loss: 2.8786 - val_triplet_loss: 0.2112 - val_softmax_loss: 1.8227 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8281\n",
      "Epoch 54/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.6435 - triplet_loss: 0.0579 - softmax_loss: 1.3540 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9569\n",
      "Epoch 00054: saving model to /kaggle/working/semi_hard_triplet_model-54-0.833.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 1.6435 - triplet_loss: 0.0579 - softmax_loss: 1.3540 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9569 - val_loss: 2.8423 - val_triplet_loss: 0.2087 - val_softmax_loss: 1.7987 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8335\n",
      "Epoch 55/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.6251 - triplet_loss: 0.0554 - softmax_loss: 1.3483 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9610\n",
      "Epoch 00055: saving model to /kaggle/working/semi_hard_triplet_model-55-0.838.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.6251 - triplet_loss: 0.0554 - softmax_loss: 1.3483 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9610 - val_loss: 2.8441 - val_triplet_loss: 0.2078 - val_softmax_loss: 1.8053 - val_triplet_accuracy: 0.0019 - val_softmax_accuracy: 0.8377\n",
      "Epoch 56/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.6087 - triplet_loss: 0.0541 - softmax_loss: 1.3381 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9643\n",
      "Epoch 00056: saving model to /kaggle/working/semi_hard_triplet_model-56-0.831.h5\n",
      "162/162 [==============================] - 31s 192ms/step - loss: 1.6087 - triplet_loss: 0.0541 - softmax_loss: 1.3381 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9643 - val_loss: 2.8320 - val_triplet_loss: 0.2097 - val_softmax_loss: 1.7837 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8308\n",
      "Epoch 57/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.5987 - triplet_loss: 0.0527 - softmax_loss: 1.3350 - triplet_accuracy: 0.0027 - softmax_accuracy: 0.9617\n",
      "Epoch 00057: saving model to /kaggle/working/semi_hard_triplet_model-57-0.818.h5\n",
      "162/162 [==============================] - 33s 201ms/step - loss: 1.5987 - triplet_loss: 0.0527 - softmax_loss: 1.3350 - triplet_accuracy: 0.0027 - softmax_accuracy: 0.9617 - val_loss: 2.8736 - val_triplet_loss: 0.2080 - val_softmax_loss: 1.8337 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8176\n",
      "Epoch 58/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.5698 - triplet_loss: 0.0490 - softmax_loss: 1.3248 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9661\n",
      "Epoch 00058: saving model to /kaggle/working/semi_hard_triplet_model-58-0.833.h5\n",
      "162/162 [==============================] - 31s 192ms/step - loss: 1.5698 - triplet_loss: 0.0490 - softmax_loss: 1.3248 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9661 - val_loss: 2.8182 - val_triplet_loss: 0.2061 - val_softmax_loss: 1.7878 - val_triplet_accuracy: 0.0019 - val_softmax_accuracy: 0.8331\n",
      "Epoch 59/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.5485 - triplet_loss: 0.0460 - softmax_loss: 1.3184 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9667\n",
      "Epoch 00059: saving model to /kaggle/working/semi_hard_triplet_model-59-0.837.h5\n",
      "162/162 [==============================] - 32s 199ms/step - loss: 1.5485 - triplet_loss: 0.0460 - softmax_loss: 1.3184 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9667 - val_loss: 2.7621 - val_triplet_loss: 0.2020 - val_softmax_loss: 1.7523 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8369\n",
      "Epoch 60/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.5208 - triplet_loss: 0.0419 - softmax_loss: 1.3113 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9663\n",
      "Epoch 00060: saving model to /kaggle/working/semi_hard_triplet_model-60-0.835.h5\n",
      "162/162 [==============================] - 30s 188ms/step - loss: 1.5208 - triplet_loss: 0.0419 - softmax_loss: 1.3113 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9663 - val_loss: 2.8382 - val_triplet_loss: 0.2071 - val_softmax_loss: 1.8026 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8354\n",
      "Epoch 61/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.4987 - triplet_loss: 0.0379 - softmax_loss: 1.3092 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9666\n",
      "Epoch 00061: saving model to /kaggle/working/semi_hard_triplet_model-61-0.854.h5\n",
      "162/162 [==============================] - 31s 192ms/step - loss: 1.4987 - triplet_loss: 0.0379 - softmax_loss: 1.3092 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9666 - val_loss: 2.7100 - val_triplet_loss: 0.1988 - val_softmax_loss: 1.7159 - val_triplet_accuracy: 0.0023 - val_softmax_accuracy: 0.8543\n",
      "Epoch 62/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.4886 - triplet_loss: 0.0369 - softmax_loss: 1.3043 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9699\n",
      "Epoch 00062: saving model to /kaggle/working/semi_hard_triplet_model-62-0.847.h5\n",
      "162/162 [==============================] - 30s 185ms/step - loss: 1.4886 - triplet_loss: 0.0369 - softmax_loss: 1.3043 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9699 - val_loss: 2.7570 - val_triplet_loss: 0.2019 - val_softmax_loss: 1.7474 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8470\n",
      "Epoch 63/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.4693 - triplet_loss: 0.0343 - softmax_loss: 1.2976 - triplet_accuracy: 0.0029 - softmax_accuracy: 0.9698\n",
      "Epoch 00063: saving model to /kaggle/working/semi_hard_triplet_model-63-0.789.h5\n",
      "162/162 [==============================] - 31s 191ms/step - loss: 1.4693 - triplet_loss: 0.0343 - softmax_loss: 1.2976 - triplet_accuracy: 0.0029 - softmax_accuracy: 0.9698 - val_loss: 3.0081 - val_triplet_loss: 0.2122 - val_softmax_loss: 1.9471 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.7894\n",
      "Epoch 64/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.4852 - triplet_loss: 0.0365 - softmax_loss: 1.3026 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9680\n",
      "Epoch 00064: saving model to /kaggle/working/semi_hard_triplet_model-64-0.827.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 1.4852 - triplet_loss: 0.0365 - softmax_loss: 1.3026 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9680 - val_loss: 2.8351 - val_triplet_loss: 0.2046 - val_softmax_loss: 1.8122 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8273\n",
      "Epoch 65/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.4595 - triplet_loss: 0.0329 - softmax_loss: 1.2951 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9698\n",
      "Epoch 00065: saving model to /kaggle/working/semi_hard_triplet_model-65-0.812.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.4595 - triplet_loss: 0.0329 - softmax_loss: 1.2951 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9698 - val_loss: 2.9363 - val_triplet_loss: 0.2103 - val_softmax_loss: 1.8849 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8118\n",
      "Epoch 66/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.4695 - triplet_loss: 0.0338 - softmax_loss: 1.3006 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9657\n",
      "Epoch 00066: saving model to /kaggle/working/semi_hard_triplet_model-66-0.853.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 1.4695 - triplet_loss: 0.0338 - softmax_loss: 1.3006 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9657 - val_loss: 2.7405 - val_triplet_loss: 0.2004 - val_softmax_loss: 1.7386 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8528\n",
      "Epoch 67/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.4384 - triplet_loss: 0.0303 - softmax_loss: 1.2867 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9719\n",
      "Epoch 00067: saving model to /kaggle/working/semi_hard_triplet_model-67-0.848.h5\n",
      "162/162 [==============================] - 32s 200ms/step - loss: 1.4384 - triplet_loss: 0.0303 - softmax_loss: 1.2867 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9719 - val_loss: 2.7645 - val_triplet_loss: 0.2021 - val_softmax_loss: 1.7542 - val_triplet_accuracy: 0.0019 - val_softmax_accuracy: 0.8478\n",
      "Epoch 68/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.4159 - triplet_loss: 0.0274 - softmax_loss: 1.2789 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9742\n",
      "Epoch 00068: saving model to /kaggle/working/semi_hard_triplet_model-68-0.844.h5\n",
      "162/162 [==============================] - 31s 191ms/step - loss: 1.4159 - triplet_loss: 0.0274 - softmax_loss: 1.2789 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9742 - val_loss: 2.7619 - val_triplet_loss: 0.2026 - val_softmax_loss: 1.7491 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8443\n",
      "Epoch 69/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.4225 - triplet_loss: 0.0285 - softmax_loss: 1.2800 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9724\n",
      "Epoch 00069: saving model to /kaggle/working/semi_hard_triplet_model-69-0.840.h5\n",
      "162/162 [==============================] - 32s 197ms/step - loss: 1.4225 - triplet_loss: 0.0285 - softmax_loss: 1.2800 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9724 - val_loss: 2.7573 - val_triplet_loss: 0.2011 - val_softmax_loss: 1.7517 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8396\n",
      "Epoch 70/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.3856 - triplet_loss: 0.0231 - softmax_loss: 1.2703 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9746\n",
      "Epoch 00070: saving model to /kaggle/working/semi_hard_triplet_model-70-0.852.h5\n",
      "162/162 [==============================] - 30s 185ms/step - loss: 1.3856 - triplet_loss: 0.0231 - softmax_loss: 1.2703 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9746 - val_loss: 2.6913 - val_triplet_loss: 0.1949 - val_softmax_loss: 1.7168 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8516\n",
      "Epoch 71/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.3930 - triplet_loss: 0.0258 - softmax_loss: 1.2640 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9757\n",
      "Epoch 00071: saving model to /kaggle/working/semi_hard_triplet_model-71-0.848.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 1.3930 - triplet_loss: 0.0258 - softmax_loss: 1.2640 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9757 - val_loss: 2.6982 - val_triplet_loss: 0.1983 - val_softmax_loss: 1.7066 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8481\n",
      "Epoch 72/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.3598 - triplet_loss: 0.0212 - softmax_loss: 1.2537 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9807\n",
      "Epoch 00072: saving model to /kaggle/working/semi_hard_triplet_model-72-0.854.h5\n",
      "162/162 [==============================] - 30s 186ms/step - loss: 1.3598 - triplet_loss: 0.0212 - softmax_loss: 1.2537 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9807 - val_loss: 2.6960 - val_triplet_loss: 0.1998 - val_softmax_loss: 1.6968 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8539\n",
      "Epoch 73/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.3729 - triplet_loss: 0.0235 - softmax_loss: 1.2555 - triplet_accuracy: 0.0015 - softmax_accuracy: 0.9777\n",
      "Epoch 00073: saving model to /kaggle/working/semi_hard_triplet_model-73-0.851.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.3729 - triplet_loss: 0.0235 - softmax_loss: 1.2555 - triplet_accuracy: 0.0015 - softmax_accuracy: 0.9777 - val_loss: 2.7245 - val_triplet_loss: 0.1986 - val_softmax_loss: 1.7316 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8512\n",
      "Epoch 74/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.3657 - triplet_loss: 0.0223 - softmax_loss: 1.2543 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9783\n",
      "Epoch 00074: saving model to /kaggle/working/semi_hard_triplet_model-74-0.849.h5\n",
      "162/162 [==============================] - 30s 186ms/step - loss: 1.3657 - triplet_loss: 0.0223 - softmax_loss: 1.2543 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9783 - val_loss: 2.7132 - val_triplet_loss: 0.1975 - val_softmax_loss: 1.7256 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8485\n",
      "Epoch 75/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.3627 - triplet_loss: 0.0218 - softmax_loss: 1.2538 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9787\n",
      "Epoch 00075: saving model to /kaggle/working/semi_hard_triplet_model-75-0.857.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 1.3627 - triplet_loss: 0.0218 - softmax_loss: 1.2538 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9787 - val_loss: 2.6697 - val_triplet_loss: 0.1965 - val_softmax_loss: 1.6870 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8570\n",
      "Epoch 76/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.3483 - triplet_loss: 0.0196 - softmax_loss: 1.2503 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9775\n",
      "Epoch 00076: saving model to /kaggle/working/semi_hard_triplet_model-76-0.859.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 1.3483 - triplet_loss: 0.0196 - softmax_loss: 1.2503 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9775 - val_loss: 2.6946 - val_triplet_loss: 0.1980 - val_softmax_loss: 1.7048 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.8594\n",
      "Epoch 77/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.3333 - triplet_loss: 0.0178 - softmax_loss: 1.2445 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9799\n",
      "Epoch 00077: saving model to /kaggle/working/semi_hard_triplet_model-77-0.847.h5\n",
      "162/162 [==============================] - 31s 191ms/step - loss: 1.3333 - triplet_loss: 0.0178 - softmax_loss: 1.2445 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9799 - val_loss: 2.7193 - val_triplet_loss: 0.1999 - val_softmax_loss: 1.7198 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8470\n",
      "Epoch 78/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.3366 - triplet_loss: 0.0188 - softmax_loss: 1.2425 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9793\n",
      "Epoch 00078: saving model to /kaggle/working/semi_hard_triplet_model-78-0.871.h5\n",
      "162/162 [==============================] - 32s 197ms/step - loss: 1.3366 - triplet_loss: 0.0188 - softmax_loss: 1.2425 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9793 - val_loss: 2.6607 - val_triplet_loss: 0.1976 - val_softmax_loss: 1.6729 - val_triplet_accuracy: 0.0019 - val_softmax_accuracy: 0.8709\n",
      "Epoch 79/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.3362 - triplet_loss: 0.0192 - softmax_loss: 1.2400 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9804\n",
      "Epoch 00079: saving model to /kaggle/working/semi_hard_triplet_model-79-0.838.h5\n",
      "162/162 [==============================] - 30s 187ms/step - loss: 1.3362 - triplet_loss: 0.0192 - softmax_loss: 1.2400 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9804 - val_loss: 2.8027 - val_triplet_loss: 0.2046 - val_softmax_loss: 1.7797 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8377\n",
      "Epoch 80/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.3362 - triplet_loss: 0.0194 - softmax_loss: 1.2392 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9821\n",
      "Epoch 00080: saving model to /kaggle/working/semi_hard_triplet_model-80-0.855.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.3362 - triplet_loss: 0.0194 - softmax_loss: 1.2392 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9821 - val_loss: 2.6949 - val_triplet_loss: 0.1997 - val_softmax_loss: 1.6965 - val_triplet_accuracy: 0.0023 - val_softmax_accuracy: 0.8555\n",
      "Epoch 81/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.3174 - triplet_loss: 0.0171 - softmax_loss: 1.2321 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9807\n",
      "Epoch 00081: saving model to /kaggle/working/semi_hard_triplet_model-81-0.852.h5\n",
      "162/162 [==============================] - 30s 187ms/step - loss: 1.3174 - triplet_loss: 0.0171 - softmax_loss: 1.2321 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9807 - val_loss: 2.6928 - val_triplet_loss: 0.1992 - val_softmax_loss: 1.6967 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8520\n",
      "Epoch 82/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.3125 - triplet_loss: 0.0166 - softmax_loss: 1.2293 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9817\n",
      "Epoch 00082: saving model to /kaggle/working/semi_hard_triplet_model-82-0.863.h5\n",
      "162/162 [==============================] - 31s 193ms/step - loss: 1.3125 - triplet_loss: 0.0166 - softmax_loss: 1.2293 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9817 - val_loss: 2.6506 - val_triplet_loss: 0.1969 - val_softmax_loss: 1.6660 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8632\n",
      "Epoch 83/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.3235 - triplet_loss: 0.0174 - softmax_loss: 1.2366 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9794\n",
      "Epoch 00083: saving model to /kaggle/working/semi_hard_triplet_model-83-0.826.h5\n",
      "162/162 [==============================] - 30s 187ms/step - loss: 1.3235 - triplet_loss: 0.0174 - softmax_loss: 1.2366 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9794 - val_loss: 2.8299 - val_triplet_loss: 0.2037 - val_softmax_loss: 1.8113 - val_triplet_accuracy: 0.0023 - val_softmax_accuracy: 0.8261\n",
      "Epoch 84/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.3093 - triplet_loss: 0.0162 - softmax_loss: 1.2281 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9810\n",
      "Epoch 00084: saving model to /kaggle/working/semi_hard_triplet_model-84-0.857.h5\n",
      "162/162 [==============================] - 32s 196ms/step - loss: 1.3093 - triplet_loss: 0.0162 - softmax_loss: 1.2281 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9810 - val_loss: 2.6294 - val_triplet_loss: 0.1916 - val_softmax_loss: 1.6713 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8574\n",
      "Epoch 85/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2989 - triplet_loss: 0.0154 - softmax_loss: 1.2218 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9832\n",
      "Epoch 00085: saving model to /kaggle/working/semi_hard_triplet_model-85-0.873.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 1.2989 - triplet_loss: 0.0154 - softmax_loss: 1.2218 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9832 - val_loss: 2.6291 - val_triplet_loss: 0.1958 - val_softmax_loss: 1.6502 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8729\n",
      "Epoch 86/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2944 - triplet_loss: 0.0148 - softmax_loss: 1.2205 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9848\n",
      "Epoch 00086: saving model to /kaggle/working/semi_hard_triplet_model-86-0.857.h5\n",
      "162/162 [==============================] - 32s 198ms/step - loss: 1.2944 - triplet_loss: 0.0148 - softmax_loss: 1.2205 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9848 - val_loss: 2.6879 - val_triplet_loss: 0.1975 - val_softmax_loss: 1.7003 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8574\n",
      "Epoch 87/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2889 - triplet_loss: 0.0151 - softmax_loss: 1.2136 - triplet_accuracy: 0.0032 - softmax_accuracy: 0.9848\n",
      "Epoch 00087: saving model to /kaggle/working/semi_hard_triplet_model-87-0.867.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.2889 - triplet_loss: 0.0151 - softmax_loss: 1.2136 - triplet_accuracy: 0.0032 - softmax_accuracy: 0.9848 - val_loss: 2.6210 - val_triplet_loss: 0.1936 - val_softmax_loss: 1.6529 - val_triplet_accuracy: 0.0023 - val_softmax_accuracy: 0.8671\n",
      "Epoch 88/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2674 - triplet_loss: 0.0122 - softmax_loss: 1.2065 - triplet_accuracy: 0.0029 - softmax_accuracy: 0.9865\n",
      "Epoch 00088: saving model to /kaggle/working/semi_hard_triplet_model-88-0.868.h5\n",
      "162/162 [==============================] - 31s 193ms/step - loss: 1.2674 - triplet_loss: 0.0122 - softmax_loss: 1.2065 - triplet_accuracy: 0.0029 - softmax_accuracy: 0.9865 - val_loss: 2.6373 - val_triplet_loss: 0.1957 - val_softmax_loss: 1.6587 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8679\n",
      "Epoch 89/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2602 - triplet_loss: 0.0109 - softmax_loss: 1.2058 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9876\n",
      "Epoch 00089: saving model to /kaggle/working/semi_hard_triplet_model-89-0.859.h5\n",
      "162/162 [==============================] - 30s 187ms/step - loss: 1.2602 - triplet_loss: 0.0109 - softmax_loss: 1.2058 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9876 - val_loss: 2.6478 - val_triplet_loss: 0.1949 - val_softmax_loss: 1.6733 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8594\n",
      "Epoch 90/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2719 - triplet_loss: 0.0124 - softmax_loss: 1.2099 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9842\n",
      "Epoch 00090: saving model to /kaggle/working/semi_hard_triplet_model-90-0.867.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.2719 - triplet_loss: 0.0124 - softmax_loss: 1.2099 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9842 - val_loss: 2.6247 - val_triplet_loss: 0.1947 - val_softmax_loss: 1.6511 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8671\n",
      "Epoch 91/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2392 - triplet_loss: 0.0092 - softmax_loss: 1.1934 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9889\n",
      "Epoch 00091: saving model to /kaggle/working/semi_hard_triplet_model-91-0.859.h5\n",
      "162/162 [==============================] - 31s 192ms/step - loss: 1.2392 - triplet_loss: 0.0092 - softmax_loss: 1.1934 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9889 - val_loss: 2.6383 - val_triplet_loss: 0.1940 - val_softmax_loss: 1.6685 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8586\n",
      "Epoch 92/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2510 - triplet_loss: 0.0094 - softmax_loss: 1.2041 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9842\n",
      "Epoch 00092: saving model to /kaggle/working/semi_hard_triplet_model-92-0.860.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.2510 - triplet_loss: 0.0094 - softmax_loss: 1.2041 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9842 - val_loss: 2.6152 - val_triplet_loss: 0.1941 - val_softmax_loss: 1.6447 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8601\n",
      "Epoch 93/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2476 - triplet_loss: 0.0106 - softmax_loss: 1.1944 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9875\n",
      "Epoch 00093: saving model to /kaggle/working/semi_hard_triplet_model-93-0.871.h5\n",
      "162/162 [==============================] - 30s 187ms/step - loss: 1.2476 - triplet_loss: 0.0106 - softmax_loss: 1.1944 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9875 - val_loss: 2.5892 - val_triplet_loss: 0.1921 - val_softmax_loss: 1.6286 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8709\n",
      "Epoch 94/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2423 - triplet_loss: 0.0096 - softmax_loss: 1.1944 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9875\n",
      "Epoch 00094: saving model to /kaggle/working/semi_hard_triplet_model-94-0.865.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.2423 - triplet_loss: 0.0096 - softmax_loss: 1.1944 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9875 - val_loss: 2.6188 - val_triplet_loss: 0.1934 - val_softmax_loss: 1.6516 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8651\n",
      "Epoch 95/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2444 - triplet_loss: 0.0103 - softmax_loss: 1.1929 - triplet_accuracy: 0.0027 - softmax_accuracy: 0.9876\n",
      "Epoch 00095: saving model to /kaggle/working/semi_hard_triplet_model-95-0.874.h5\n",
      "162/162 [==============================] - 30s 185ms/step - loss: 1.2444 - triplet_loss: 0.0103 - softmax_loss: 1.1929 - triplet_accuracy: 0.0027 - softmax_accuracy: 0.9876 - val_loss: 2.6200 - val_triplet_loss: 0.1934 - val_softmax_loss: 1.6532 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8736\n",
      "Epoch 96/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2436 - triplet_loss: 0.0100 - softmax_loss: 1.1938 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9859\n",
      "Epoch 00096: saving model to /kaggle/working/semi_hard_triplet_model-96-0.863.h5\n",
      "162/162 [==============================] - 32s 197ms/step - loss: 1.2436 - triplet_loss: 0.0100 - softmax_loss: 1.1938 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9859 - val_loss: 2.6800 - val_triplet_loss: 0.1968 - val_softmax_loss: 1.6961 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.8632\n",
      "Epoch 97/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2539 - triplet_loss: 0.0122 - softmax_loss: 1.1929 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9871\n",
      "Epoch 00097: saving model to /kaggle/working/semi_hard_triplet_model-97-0.866.h5\n",
      "162/162 [==============================] - 33s 201ms/step - loss: 1.2539 - triplet_loss: 0.0122 - softmax_loss: 1.1929 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9871 - val_loss: 2.6589 - val_triplet_loss: 0.1975 - val_softmax_loss: 1.6715 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8663\n",
      "Epoch 98/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2361 - triplet_loss: 0.0093 - softmax_loss: 1.1898 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9877\n",
      "Epoch 00098: saving model to /kaggle/working/semi_hard_triplet_model-98-0.856.h5\n",
      "162/162 [==============================] - 31s 193ms/step - loss: 1.2361 - triplet_loss: 0.0093 - softmax_loss: 1.1898 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9877 - val_loss: 2.6492 - val_triplet_loss: 0.1955 - val_softmax_loss: 1.6715 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8559\n",
      "Epoch 99/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2318 - triplet_loss: 0.0091 - softmax_loss: 1.1865 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9882\n",
      "Epoch 00099: saving model to /kaggle/working/semi_hard_triplet_model-99-0.862.h5\n",
      "162/162 [==============================] - 30s 185ms/step - loss: 1.2318 - triplet_loss: 0.0091 - softmax_loss: 1.1865 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9882 - val_loss: 2.6381 - val_triplet_loss: 0.1928 - val_softmax_loss: 1.6741 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.8617\n",
      "Epoch 100/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2251 - triplet_loss: 0.0081 - softmax_loss: 1.1847 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9869\n",
      "Epoch 00100: saving model to /kaggle/working/semi_hard_triplet_model-100-0.867.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 1.2251 - triplet_loss: 0.0081 - softmax_loss: 1.1847 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9869 - val_loss: 2.6321 - val_triplet_loss: 0.1919 - val_softmax_loss: 1.6727 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8667\n",
      "Epoch 101/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2340 - triplet_loss: 0.0096 - softmax_loss: 1.1860 - triplet_accuracy: 0.0027 - softmax_accuracy: 0.9866\n",
      "Epoch 00101: saving model to /kaggle/working/semi_hard_triplet_model-101-0.875.h5\n",
      "162/162 [==============================] - 29s 181ms/step - loss: 1.2340 - triplet_loss: 0.0096 - softmax_loss: 1.1860 - triplet_accuracy: 0.0027 - softmax_accuracy: 0.9866 - val_loss: 2.6027 - val_triplet_loss: 0.1925 - val_softmax_loss: 1.6400 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8752\n",
      "Epoch 102/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2239 - triplet_loss: 0.0078 - softmax_loss: 1.1849 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9882\n",
      "Epoch 00102: saving model to /kaggle/working/semi_hard_triplet_model-102-0.878.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.2239 - triplet_loss: 0.0078 - softmax_loss: 1.1849 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9882 - val_loss: 2.5667 - val_triplet_loss: 0.1886 - val_softmax_loss: 1.6236 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8775\n",
      "Epoch 103/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2122 - triplet_loss: 0.0072 - softmax_loss: 1.1762 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9906\n",
      "Epoch 00103: saving model to /kaggle/working/semi_hard_triplet_model-103-0.869.h5\n",
      "162/162 [==============================] - 30s 187ms/step - loss: 1.2122 - triplet_loss: 0.0072 - softmax_loss: 1.1762 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9906 - val_loss: 2.6007 - val_triplet_loss: 0.1914 - val_softmax_loss: 1.6437 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.8690\n",
      "Epoch 104/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2102 - triplet_loss: 0.0071 - softmax_loss: 1.1745 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9892\n",
      "Epoch 00104: saving model to /kaggle/working/semi_hard_triplet_model-104-0.871.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.2102 - triplet_loss: 0.0071 - softmax_loss: 1.1745 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9892 - val_loss: 2.5717 - val_triplet_loss: 0.1871 - val_softmax_loss: 1.6360 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.8709\n",
      "Epoch 105/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2111 - triplet_loss: 0.0083 - softmax_loss: 1.1698 - triplet_accuracy: 0.0029 - softmax_accuracy: 0.9917\n",
      "Epoch 00105: saving model to /kaggle/working/semi_hard_triplet_model-105-0.868.h5\n",
      "162/162 [==============================] - 31s 192ms/step - loss: 1.2111 - triplet_loss: 0.0083 - softmax_loss: 1.1698 - triplet_accuracy: 0.0029 - softmax_accuracy: 0.9917 - val_loss: 2.6167 - val_triplet_loss: 0.1897 - val_softmax_loss: 1.6682 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8682\n",
      "Epoch 106/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2106 - triplet_loss: 0.0076 - softmax_loss: 1.1725 - triplet_accuracy: 0.0027 - softmax_accuracy: 0.9898\n",
      "Epoch 00106: saving model to /kaggle/working/semi_hard_triplet_model-106-0.834.h5\n",
      "162/162 [==============================] - 30s 186ms/step - loss: 1.2106 - triplet_loss: 0.0076 - softmax_loss: 1.1725 - triplet_accuracy: 0.0027 - softmax_accuracy: 0.9898 - val_loss: 2.8044 - val_triplet_loss: 0.1955 - val_softmax_loss: 1.8270 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8338\n",
      "Epoch 107/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2077 - triplet_loss: 0.0073 - softmax_loss: 1.1711 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9907\n",
      "Epoch 00107: saving model to /kaggle/working/semi_hard_triplet_model-107-0.867.h5\n",
      "162/162 [==============================] - 32s 197ms/step - loss: 1.2077 - triplet_loss: 0.0073 - softmax_loss: 1.1711 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9907 - val_loss: 2.6475 - val_triplet_loss: 0.1926 - val_softmax_loss: 1.6846 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8675\n",
      "Epoch 108/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2170 - triplet_loss: 0.0087 - softmax_loss: 1.1733 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9899\n",
      "Epoch 00108: saving model to /kaggle/working/semi_hard_triplet_model-108-0.868.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 1.2170 - triplet_loss: 0.0087 - softmax_loss: 1.1733 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9899 - val_loss: 2.6362 - val_triplet_loss: 0.1938 - val_softmax_loss: 1.6674 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8679\n",
      "Epoch 109/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2191 - triplet_loss: 0.0096 - softmax_loss: 1.1708 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9899\n",
      "Epoch 00109: saving model to /kaggle/working/semi_hard_triplet_model-109-0.858.h5\n",
      "162/162 [==============================] - 32s 196ms/step - loss: 1.2191 - triplet_loss: 0.0096 - softmax_loss: 1.1708 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9899 - val_loss: 2.6980 - val_triplet_loss: 0.2009 - val_softmax_loss: 1.6936 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8578\n",
      "Epoch 110/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2032 - triplet_loss: 0.0073 - softmax_loss: 1.1666 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9905\n",
      "Epoch 00110: saving model to /kaggle/working/semi_hard_triplet_model-110-0.862.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 1.2032 - triplet_loss: 0.0073 - softmax_loss: 1.1666 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9905 - val_loss: 2.6669 - val_triplet_loss: 0.1961 - val_softmax_loss: 1.6863 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8621\n",
      "Epoch 111/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.2079 - triplet_loss: 0.0082 - softmax_loss: 1.1670 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9903\n",
      "Epoch 00111: saving model to /kaggle/working/semi_hard_triplet_model-111-0.860.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.2079 - triplet_loss: 0.0082 - softmax_loss: 1.1670 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9903 - val_loss: 2.6799 - val_triplet_loss: 0.1975 - val_softmax_loss: 1.6922 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8597\n",
      "Epoch 112/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1849 - triplet_loss: 0.0049 - softmax_loss: 1.1602 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9921\n",
      "Epoch 00112: saving model to /kaggle/working/semi_hard_triplet_model-112-0.876.h5\n",
      "162/162 [==============================] - 30s 185ms/step - loss: 1.1849 - triplet_loss: 0.0049 - softmax_loss: 1.1602 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9921 - val_loss: 2.5780 - val_triplet_loss: 0.1921 - val_softmax_loss: 1.6173 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8764\n",
      "Epoch 113/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1860 - triplet_loss: 0.0055 - softmax_loss: 1.1583 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9922\n",
      "Epoch 00113: saving model to /kaggle/working/semi_hard_triplet_model-113-0.878.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.1860 - triplet_loss: 0.0055 - softmax_loss: 1.1583 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9922 - val_loss: 2.5699 - val_triplet_loss: 0.1892 - val_softmax_loss: 1.6240 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8779\n",
      "Epoch 114/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1891 - triplet_loss: 0.0059 - softmax_loss: 1.1596 - triplet_accuracy: 0.0029 - softmax_accuracy: 0.9922\n",
      "Epoch 00114: saving model to /kaggle/working/semi_hard_triplet_model-114-0.859.h5\n",
      "162/162 [==============================] - 30s 188ms/step - loss: 1.1891 - triplet_loss: 0.0059 - softmax_loss: 1.1596 - triplet_accuracy: 0.0029 - softmax_accuracy: 0.9922 - val_loss: 2.7076 - val_triplet_loss: 0.1978 - val_softmax_loss: 1.7188 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8586\n",
      "Epoch 115/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1916 - triplet_loss: 0.0063 - softmax_loss: 1.1600 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9909- ETA: 9s - loss: 1.1861 - triplet_loss: 0.0052 - softmax_loss: 1.1601\n",
      "Epoch 00115: saving model to /kaggle/working/semi_hard_triplet_model-115-0.868.h5\n",
      "162/162 [==============================] - 32s 196ms/step - loss: 1.1916 - triplet_loss: 0.0063 - softmax_loss: 1.1600 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9909 - val_loss: 2.6126 - val_triplet_loss: 0.1938 - val_softmax_loss: 1.6434 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8682\n",
      "Epoch 116/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1944 - triplet_loss: 0.0068 - softmax_loss: 1.1604 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9898\n",
      "Epoch 00116: saving model to /kaggle/working/semi_hard_triplet_model-116-0.872.h5\n",
      "162/162 [==============================] - 31s 188ms/step - loss: 1.1944 - triplet_loss: 0.0068 - softmax_loss: 1.1604 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9898 - val_loss: 2.6111 - val_triplet_loss: 0.1939 - val_softmax_loss: 1.6417 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8721\n",
      "Epoch 117/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1866 - triplet_loss: 0.0058 - softmax_loss: 1.1574 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9907\n",
      "Epoch 00117: saving model to /kaggle/working/semi_hard_triplet_model-117-0.868.h5\n",
      "162/162 [==============================] - 32s 198ms/step - loss: 1.1866 - triplet_loss: 0.0058 - softmax_loss: 1.1574 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9907 - val_loss: 2.6334 - val_triplet_loss: 0.1934 - val_softmax_loss: 1.6664 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8679\n",
      "Epoch 118/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1820 - triplet_loss: 0.0057 - softmax_loss: 1.1534 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9924\n",
      "Epoch 00118: saving model to /kaggle/working/semi_hard_triplet_model-118-0.876.h5\n",
      "162/162 [==============================] - 36s 222ms/step - loss: 1.1820 - triplet_loss: 0.0057 - softmax_loss: 1.1534 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9924 - val_loss: 2.5656 - val_triplet_loss: 0.1885 - val_softmax_loss: 1.6231 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8764\n",
      "Epoch 119/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1776 - triplet_loss: 0.0047 - softmax_loss: 1.1539 - triplet_accuracy: 0.0014 - softmax_accuracy: 0.9915\n",
      "Epoch 00119: saving model to /kaggle/working/semi_hard_triplet_model-119-0.875.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.1776 - triplet_loss: 0.0047 - softmax_loss: 1.1539 - triplet_accuracy: 0.0014 - softmax_accuracy: 0.9915 - val_loss: 2.5995 - val_triplet_loss: 0.1925 - val_softmax_loss: 1.6370 - val_triplet_accuracy: 0.0019 - val_softmax_accuracy: 0.8748\n",
      "Epoch 120/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1815 - triplet_loss: 0.0058 - softmax_loss: 1.1523 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9914\n",
      "Epoch 00120: saving model to /kaggle/working/semi_hard_triplet_model-120-0.878.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 1.1815 - triplet_loss: 0.0058 - softmax_loss: 1.1523 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9914 - val_loss: 2.5829 - val_triplet_loss: 0.1905 - val_softmax_loss: 1.6306 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8783\n",
      "Epoch 121/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1704 - triplet_loss: 0.0043 - softmax_loss: 1.1487 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9930- ETA: 5s - loss: 1.1707 - triplet_loss: 0.0041 - softmax_loss: 1.1505 - triplet_accuracy:\n",
      "Epoch 00121: saving model to /kaggle/working/semi_hard_triplet_model-121-0.881.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.1704 - triplet_loss: 0.0043 - softmax_loss: 1.1487 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9930 - val_loss: 2.5490 - val_triplet_loss: 0.1891 - val_softmax_loss: 1.6037 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8814\n",
      "Epoch 122/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1667 - triplet_loss: 0.0044 - softmax_loss: 1.1448 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9923\n",
      "Epoch 00122: saving model to /kaggle/working/semi_hard_triplet_model-122-0.862.h5\n",
      "162/162 [==============================] - 30s 187ms/step - loss: 1.1667 - triplet_loss: 0.0044 - softmax_loss: 1.1448 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9923 - val_loss: 2.6588 - val_triplet_loss: 0.1945 - val_softmax_loss: 1.6864 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8617\n",
      "Epoch 123/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1806 - triplet_loss: 0.0063 - softmax_loss: 1.1491 - triplet_accuracy: 0.0027 - softmax_accuracy: 0.9924\n",
      "Epoch 00123: saving model to /kaggle/working/semi_hard_triplet_model-123-0.873.h5\n",
      "162/162 [==============================] - 31s 193ms/step - loss: 1.1806 - triplet_loss: 0.0063 - softmax_loss: 1.1491 - triplet_accuracy: 0.0027 - softmax_accuracy: 0.9924 - val_loss: 2.5886 - val_triplet_loss: 0.1905 - val_softmax_loss: 1.6362 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8729\n",
      "Epoch 124/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1693 - triplet_loss: 0.0050 - softmax_loss: 1.1444 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9926\n",
      "Epoch 00124: saving model to /kaggle/working/semi_hard_triplet_model-124-0.881.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 1.1693 - triplet_loss: 0.0050 - softmax_loss: 1.1444 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9926 - val_loss: 2.6031 - val_triplet_loss: 0.1942 - val_softmax_loss: 1.6324 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8814\n",
      "Epoch 125/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1795 - triplet_loss: 0.0060 - softmax_loss: 1.1495 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9930\n",
      "Epoch 00125: saving model to /kaggle/working/semi_hard_triplet_model-125-0.879.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.1795 - triplet_loss: 0.0060 - softmax_loss: 1.1495 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9930 - val_loss: 2.5954 - val_triplet_loss: 0.1935 - val_softmax_loss: 1.6279 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8791\n",
      "Epoch 126/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1681 - triplet_loss: 0.0047 - softmax_loss: 1.1446 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9919\n",
      "Epoch 00126: saving model to /kaggle/working/semi_hard_triplet_model-126-0.873.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 1.1681 - triplet_loss: 0.0047 - softmax_loss: 1.1446 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9919 - val_loss: 2.5955 - val_triplet_loss: 0.1920 - val_softmax_loss: 1.6354 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8733\n",
      "Epoch 127/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1744 - triplet_loss: 0.0054 - softmax_loss: 1.1472 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9923\n",
      "Epoch 00127: saving model to /kaggle/working/semi_hard_triplet_model-127-0.883.h5\n",
      "162/162 [==============================] - 32s 197ms/step - loss: 1.1744 - triplet_loss: 0.0054 - softmax_loss: 1.1472 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9923 - val_loss: 2.5461 - val_triplet_loss: 0.1884 - val_softmax_loss: 1.6040 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8833\n",
      "Epoch 128/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1569 - triplet_loss: 0.0039 - softmax_loss: 1.1375 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9950\n",
      "Epoch 00128: saving model to /kaggle/working/semi_hard_triplet_model-128-0.882.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.1569 - triplet_loss: 0.0039 - softmax_loss: 1.1375 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9950 - val_loss: 2.5734 - val_triplet_loss: 0.1915 - val_softmax_loss: 1.6160 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8818\n",
      "Epoch 129/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1562 - triplet_loss: 0.0039 - softmax_loss: 1.1367 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9938\n",
      "Epoch 00129: saving model to /kaggle/working/semi_hard_triplet_model-129-0.845.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 1.1562 - triplet_loss: 0.0039 - softmax_loss: 1.1367 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9938 - val_loss: 2.7714 - val_triplet_loss: 0.2021 - val_softmax_loss: 1.7607 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8454\n",
      "Epoch 130/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1619 - triplet_loss: 0.0041 - softmax_loss: 1.1414 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9925\n",
      "Epoch 00130: saving model to /kaggle/working/semi_hard_triplet_model-130-0.889.h5\n",
      "162/162 [==============================] - 31s 192ms/step - loss: 1.1619 - triplet_loss: 0.0041 - softmax_loss: 1.1414 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9925 - val_loss: 2.5839 - val_triplet_loss: 0.1923 - val_softmax_loss: 1.6221 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8887\n",
      "Epoch 131/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1596 - triplet_loss: 0.0044 - softmax_loss: 1.1375 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9941\n",
      "Epoch 00131: saving model to /kaggle/working/semi_hard_triplet_model-131-0.883.h5\n",
      "162/162 [==============================] - 31s 192ms/step - loss: 1.1596 - triplet_loss: 0.0044 - softmax_loss: 1.1375 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9941 - val_loss: 2.5543 - val_triplet_loss: 0.1882 - val_softmax_loss: 1.6134 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8833\n",
      "Epoch 132/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1647 - triplet_loss: 0.0055 - softmax_loss: 1.1373 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9932\n",
      "Epoch 00132: saving model to /kaggle/working/semi_hard_triplet_model-132-0.880.h5\n",
      "162/162 [==============================] - 32s 200ms/step - loss: 1.1647 - triplet_loss: 0.0055 - softmax_loss: 1.1373 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9932 - val_loss: 2.5519 - val_triplet_loss: 0.1860 - val_softmax_loss: 1.6220 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8802\n",
      "Epoch 133/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1539 - triplet_loss: 0.0035 - softmax_loss: 1.1363 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9932\n",
      "Epoch 00133: saving model to /kaggle/working/semi_hard_triplet_model-133-0.885.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 1.1539 - triplet_loss: 0.0035 - softmax_loss: 1.1363 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9932 - val_loss: 2.5337 - val_triplet_loss: 0.1857 - val_softmax_loss: 1.6050 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8852\n",
      "Epoch 134/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1565 - triplet_loss: 0.0045 - softmax_loss: 1.1341 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9935\n",
      "Epoch 00134: saving model to /kaggle/working/semi_hard_triplet_model-134-0.885.h5\n",
      "162/162 [==============================] - 32s 198ms/step - loss: 1.1565 - triplet_loss: 0.0045 - softmax_loss: 1.1341 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9935 - val_loss: 2.5425 - val_triplet_loss: 0.1890 - val_softmax_loss: 1.5975 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8849\n",
      "Epoch 135/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1511 - triplet_loss: 0.0037 - softmax_loss: 1.1328 - triplet_accuracy: 0.0014 - softmax_accuracy: 0.9945\n",
      "Epoch 00135: saving model to /kaggle/working/semi_hard_triplet_model-135-0.880.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.1511 - triplet_loss: 0.0037 - softmax_loss: 1.1328 - triplet_accuracy: 0.0014 - softmax_accuracy: 0.9945 - val_loss: 2.5724 - val_triplet_loss: 0.1894 - val_softmax_loss: 1.6256 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8798\n",
      "Epoch 136/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1445 - triplet_loss: 0.0034 - softmax_loss: 1.1274 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9947\n",
      "Epoch 00136: saving model to /kaggle/working/semi_hard_triplet_model-136-0.876.h5\n",
      "162/162 [==============================] - 32s 198ms/step - loss: 1.1445 - triplet_loss: 0.0034 - softmax_loss: 1.1274 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9947 - val_loss: 2.5672 - val_triplet_loss: 0.1889 - val_softmax_loss: 1.6229 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8756\n",
      "Epoch 137/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1434 - triplet_loss: 0.0032 - softmax_loss: 1.1277 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9948\n",
      "Epoch 00137: saving model to /kaggle/working/semi_hard_triplet_model-137-0.869.h5\n",
      "162/162 [==============================] - 31s 193ms/step - loss: 1.1434 - triplet_loss: 0.0032 - softmax_loss: 1.1277 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9948 - val_loss: 2.6010 - val_triplet_loss: 0.1902 - val_softmax_loss: 1.6498 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8686\n",
      "Epoch 138/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1475 - triplet_loss: 0.0038 - softmax_loss: 1.1284 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9954\n",
      "Epoch 00138: saving model to /kaggle/working/semi_hard_triplet_model-138-0.881.h5\n",
      "162/162 [==============================] - 32s 199ms/step - loss: 1.1475 - triplet_loss: 0.0038 - softmax_loss: 1.1284 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9954 - val_loss: 2.5735 - val_triplet_loss: 0.1878 - val_softmax_loss: 1.6343 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8810\n",
      "Epoch 139/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1396 - triplet_loss: 0.0025 - softmax_loss: 1.1271 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9949\n",
      "Epoch 00139: saving model to /kaggle/working/semi_hard_triplet_model-139-0.886.h5\n",
      "162/162 [==============================] - 31s 191ms/step - loss: 1.1396 - triplet_loss: 0.0025 - softmax_loss: 1.1271 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9949 - val_loss: 2.5206 - val_triplet_loss: 0.1862 - val_softmax_loss: 1.5898 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.8860\n",
      "Epoch 140/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1489 - triplet_loss: 0.0043 - softmax_loss: 1.1275 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9950\n",
      "Epoch 00140: saving model to /kaggle/working/semi_hard_triplet_model-140-0.884.h5\n",
      "162/162 [==============================] - 33s 202ms/step - loss: 1.1489 - triplet_loss: 0.0043 - softmax_loss: 1.1275 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9950 - val_loss: 2.5313 - val_triplet_loss: 0.1848 - val_softmax_loss: 1.6072 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8845\n",
      "Epoch 141/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1448 - triplet_loss: 0.0033 - softmax_loss: 1.1283 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9943\n",
      "Epoch 00141: saving model to /kaggle/working/semi_hard_triplet_model-141-0.882.h5\n",
      "162/162 [==============================] - 32s 194ms/step - loss: 1.1448 - triplet_loss: 0.0033 - softmax_loss: 1.1283 - triplet_accuracy: 0.0016 - softmax_accuracy: 0.9943 - val_loss: 2.5368 - val_triplet_loss: 0.1880 - val_softmax_loss: 1.5969 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8818\n",
      "Epoch 142/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1475 - triplet_loss: 0.0043 - softmax_loss: 1.1262 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9946\n",
      "Epoch 00142: saving model to /kaggle/working/semi_hard_triplet_model-142-0.882.h5\n",
      "162/162 [==============================] - 32s 199ms/step - loss: 1.1475 - triplet_loss: 0.0043 - softmax_loss: 1.1262 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9946 - val_loss: 2.5688 - val_triplet_loss: 0.1904 - val_softmax_loss: 1.6170 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8821\n",
      "Epoch 143/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1488 - triplet_loss: 0.0042 - softmax_loss: 1.1278 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9950- ETA: 7s - loss: 1.1496 - triplet_loss: 0.0042 - softmax_loss: 1.1285 - tri\n",
      "Epoch 00143: saving model to /kaggle/working/semi_hard_triplet_model-143-0.876.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 1.1488 - triplet_loss: 0.0042 - softmax_loss: 1.1278 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9950 - val_loss: 2.5617 - val_triplet_loss: 0.1878 - val_softmax_loss: 1.6225 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8760\n",
      "Epoch 144/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1427 - triplet_loss: 0.0038 - softmax_loss: 1.1238 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9953\n",
      "Epoch 00144: saving model to /kaggle/working/semi_hard_triplet_model-144-0.883.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.1427 - triplet_loss: 0.0038 - softmax_loss: 1.1238 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9953 - val_loss: 2.5383 - val_triplet_loss: 0.1882 - val_softmax_loss: 1.5971 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8829\n",
      "Epoch 145/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1437 - triplet_loss: 0.0035 - softmax_loss: 1.1260 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9945\n",
      "Epoch 00145: saving model to /kaggle/working/semi_hard_triplet_model-145-0.871.h5\n",
      "162/162 [==============================] - 31s 193ms/step - loss: 1.1437 - triplet_loss: 0.0035 - softmax_loss: 1.1260 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9945 - val_loss: 2.5807 - val_triplet_loss: 0.1881 - val_softmax_loss: 1.6402 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8713\n",
      "Epoch 146/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1381 - triplet_loss: 0.0035 - softmax_loss: 1.1205 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9957\n",
      "Epoch 00146: saving model to /kaggle/working/semi_hard_triplet_model-146-0.883.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 1.1381 - triplet_loss: 0.0035 - softmax_loss: 1.1205 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9957 - val_loss: 2.5255 - val_triplet_loss: 0.1834 - val_softmax_loss: 1.6085 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8825\n",
      "Epoch 147/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1389 - triplet_loss: 0.0032 - softmax_loss: 1.1231 - triplet_accuracy: 0.0027 - softmax_accuracy: 0.9947\n",
      "Epoch 00147: saving model to /kaggle/working/semi_hard_triplet_model-147-0.879.h5\n",
      "162/162 [==============================] - 32s 196ms/step - loss: 1.1389 - triplet_loss: 0.0032 - softmax_loss: 1.1231 - triplet_accuracy: 0.0027 - softmax_accuracy: 0.9947 - val_loss: 2.5594 - val_triplet_loss: 0.1867 - val_softmax_loss: 1.6259 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8787\n",
      "Epoch 148/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1420 - triplet_loss: 0.0034 - softmax_loss: 1.1249 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9945- ETA: 6s - loss: 1.1400 - triplet_loss: 0.0032 - softmax_loss: 1.1238 - triplet_\n",
      "Epoch 00148: saving model to /kaggle/working/semi_hard_triplet_model-148-0.883.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 1.1420 - triplet_loss: 0.0034 - softmax_loss: 1.1249 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9945 - val_loss: 2.5355 - val_triplet_loss: 0.1863 - val_softmax_loss: 1.6038 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8833\n",
      "Epoch 149/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1325 - triplet_loss: 0.0023 - softmax_loss: 1.1212 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9949\n",
      "Epoch 00149: saving model to /kaggle/working/semi_hard_triplet_model-149-0.889.h5\n",
      "162/162 [==============================] - 31s 191ms/step - loss: 1.1325 - triplet_loss: 0.0023 - softmax_loss: 1.1212 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9949 - val_loss: 2.4986 - val_triplet_loss: 0.1837 - val_softmax_loss: 1.5802 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8887\n",
      "Epoch 150/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1385 - triplet_loss: 0.0039 - softmax_loss: 1.1189 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9954\n",
      "Epoch 00150: saving model to /kaggle/working/semi_hard_triplet_model-150-0.889.h5\n",
      "162/162 [==============================] - 31s 191ms/step - loss: 1.1385 - triplet_loss: 0.0039 - softmax_loss: 1.1189 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9954 - val_loss: 2.5093 - val_triplet_loss: 0.1826 - val_softmax_loss: 1.5961 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8887\n",
      "Epoch 151/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1351 - triplet_loss: 0.0034 - softmax_loss: 1.1182 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9960\n",
      "Epoch 00151: saving model to /kaggle/working/semi_hard_triplet_model-151-0.881.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.1351 - triplet_loss: 0.0034 - softmax_loss: 1.1182 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9960 - val_loss: 2.5532 - val_triplet_loss: 0.1874 - val_softmax_loss: 1.6160 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8806\n",
      "Epoch 152/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1349 - triplet_loss: 0.0032 - softmax_loss: 1.1190 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9952\n",
      "Epoch 00152: saving model to /kaggle/working/semi_hard_triplet_model-152-0.889.h5\n",
      "162/162 [==============================] - 31s 192ms/step - loss: 1.1349 - triplet_loss: 0.0032 - softmax_loss: 1.1190 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9952 - val_loss: 2.5130 - val_triplet_loss: 0.1832 - val_softmax_loss: 1.5971 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8891\n",
      "Epoch 153/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1252 - triplet_loss: 0.0019 - softmax_loss: 1.1157 - triplet_accuracy: 0.0027 - softmax_accuracy: 0.9952\n",
      "Epoch 00153: saving model to /kaggle/working/semi_hard_triplet_model-153-0.889.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.1252 - triplet_loss: 0.0019 - softmax_loss: 1.1157 - triplet_accuracy: 0.0027 - softmax_accuracy: 0.9952 - val_loss: 2.5009 - val_triplet_loss: 0.1830 - val_softmax_loss: 1.5860 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8891\n",
      "Epoch 154/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1272 - triplet_loss: 0.0028 - softmax_loss: 1.1130 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9959\n",
      "Epoch 00154: saving model to /kaggle/working/semi_hard_triplet_model-154-0.891.h5\n",
      "162/162 [==============================] - 30s 187ms/step - loss: 1.1272 - triplet_loss: 0.0028 - softmax_loss: 1.1130 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9959 - val_loss: 2.4915 - val_triplet_loss: 0.1809 - val_softmax_loss: 1.5869 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8910\n",
      "Epoch 155/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1252 - triplet_loss: 0.0028 - softmax_loss: 1.1114 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9970\n",
      "Epoch 00155: saving model to /kaggle/working/semi_hard_triplet_model-155-0.891.h5\n",
      "162/162 [==============================] - 32s 196ms/step - loss: 1.1252 - triplet_loss: 0.0028 - softmax_loss: 1.1114 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9970 - val_loss: 2.5176 - val_triplet_loss: 0.1847 - val_softmax_loss: 1.5944 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8906\n",
      "Epoch 156/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1366 - triplet_loss: 0.0035 - softmax_loss: 1.1192 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9953\n",
      "Epoch 00156: saving model to /kaggle/working/semi_hard_triplet_model-156-0.882.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 1.1366 - triplet_loss: 0.0035 - softmax_loss: 1.1192 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9953 - val_loss: 2.5308 - val_triplet_loss: 0.1847 - val_softmax_loss: 1.6074 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8818\n",
      "Epoch 157/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1258 - triplet_loss: 0.0026 - softmax_loss: 1.1126 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9957\n",
      "Epoch 00157: saving model to /kaggle/working/semi_hard_triplet_model-157-0.893.h5\n",
      "162/162 [==============================] - 32s 200ms/step - loss: 1.1258 - triplet_loss: 0.0026 - softmax_loss: 1.1126 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9957 - val_loss: 2.5112 - val_triplet_loss: 0.1831 - val_softmax_loss: 1.5958 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8926\n",
      "Epoch 158/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1300 - triplet_loss: 0.0026 - softmax_loss: 1.1173 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9946\n",
      "Epoch 00158: saving model to /kaggle/working/semi_hard_triplet_model-158-0.889.h5\n",
      "162/162 [==============================] - 30s 187ms/step - loss: 1.1300 - triplet_loss: 0.0026 - softmax_loss: 1.1173 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9946 - val_loss: 2.5094 - val_triplet_loss: 0.1848 - val_softmax_loss: 1.5853 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8887\n",
      "Epoch 159/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1280 - triplet_loss: 0.0030 - softmax_loss: 1.1131 - triplet_accuracy: 0.0030 - softmax_accuracy: 0.9952\n",
      "Epoch 00159: saving model to /kaggle/working/semi_hard_triplet_model-159-0.883.h5\n",
      "162/162 [==============================] - 31s 191ms/step - loss: 1.1280 - triplet_loss: 0.0030 - softmax_loss: 1.1131 - triplet_accuracy: 0.0030 - softmax_accuracy: 0.9952 - val_loss: 2.5242 - val_triplet_loss: 0.1868 - val_softmax_loss: 1.5901 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8829\n",
      "Epoch 160/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1241 - triplet_loss: 0.0025 - softmax_loss: 1.1116 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9959\n",
      "Epoch 00160: saving model to /kaggle/working/semi_hard_triplet_model-160-0.887.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 1.1241 - triplet_loss: 0.0025 - softmax_loss: 1.1116 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9959 - val_loss: 2.5306 - val_triplet_loss: 0.1887 - val_softmax_loss: 1.5868 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8872\n",
      "Epoch 161/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1270 - triplet_loss: 0.0029 - softmax_loss: 1.1123 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9957\n",
      "Epoch 00161: saving model to /kaggle/working/semi_hard_triplet_model-161-0.889.h5\n",
      "162/162 [==============================] - 31s 193ms/step - loss: 1.1270 - triplet_loss: 0.0029 - softmax_loss: 1.1123 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9957 - val_loss: 2.5273 - val_triplet_loss: 0.1865 - val_softmax_loss: 1.5948 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8887\n",
      "Epoch 162/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1206 - triplet_loss: 0.0020 - softmax_loss: 1.1105 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9957\n",
      "Epoch 00162: saving model to /kaggle/working/semi_hard_triplet_model-162-0.883.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 1.1206 - triplet_loss: 0.0020 - softmax_loss: 1.1105 - triplet_accuracy: 0.0028 - softmax_accuracy: 0.9957 - val_loss: 2.5363 - val_triplet_loss: 0.1849 - val_softmax_loss: 1.6117 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8833\n",
      "Epoch 163/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1232 - triplet_loss: 0.0025 - softmax_loss: 1.1108 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9957\n",
      "Epoch 00163: saving model to /kaggle/working/semi_hard_triplet_model-163-0.889.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.1232 - triplet_loss: 0.0025 - softmax_loss: 1.1108 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9957 - val_loss: 2.5263 - val_triplet_loss: 0.1852 - val_softmax_loss: 1.6003 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8891\n",
      "Epoch 164/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1271 - triplet_loss: 0.0034 - softmax_loss: 1.1103 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9970\n",
      "Epoch 00164: saving model to /kaggle/working/semi_hard_triplet_model-164-0.889.h5\n",
      "162/162 [==============================] - 31s 192ms/step - loss: 1.1271 - triplet_loss: 0.0034 - softmax_loss: 1.1103 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9970 - val_loss: 2.5008 - val_triplet_loss: 0.1842 - val_softmax_loss: 1.5796 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8895\n",
      "Epoch 165/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1189 - triplet_loss: 0.0020 - softmax_loss: 1.1091 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9952\n",
      "Epoch 00165: saving model to /kaggle/working/semi_hard_triplet_model-165-0.883.h5\n",
      "162/162 [==============================] - 32s 197ms/step - loss: 1.1189 - triplet_loss: 0.0020 - softmax_loss: 1.1091 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9952 - val_loss: 2.5409 - val_triplet_loss: 0.1858 - val_softmax_loss: 1.6118 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8829\n",
      "Epoch 166/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1159 - triplet_loss: 0.0019 - softmax_loss: 1.1062 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9971\n",
      "Epoch 00166: saving model to /kaggle/working/semi_hard_triplet_model-166-0.892.h5\n",
      "162/162 [==============================] - 31s 188ms/step - loss: 1.1159 - triplet_loss: 0.0019 - softmax_loss: 1.1062 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9971 - val_loss: 2.5316 - val_triplet_loss: 0.1857 - val_softmax_loss: 1.6032 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8918\n",
      "Epoch 167/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1180 - triplet_loss: 0.0026 - softmax_loss: 1.1052 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9970\n",
      "Epoch 00167: saving model to /kaggle/working/semi_hard_triplet_model-167-0.890.h5\n",
      "162/162 [==============================] - 33s 201ms/step - loss: 1.1180 - triplet_loss: 0.0026 - softmax_loss: 1.1052 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9970 - val_loss: 2.5142 - val_triplet_loss: 0.1842 - val_softmax_loss: 1.5932 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8899\n",
      "Epoch 168/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1191 - triplet_loss: 0.0022 - softmax_loss: 1.1081 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9964\n",
      "Epoch 00168: saving model to /kaggle/working/semi_hard_triplet_model-168-0.895.h5\n",
      "162/162 [==============================] - 31s 191ms/step - loss: 1.1191 - triplet_loss: 0.0022 - softmax_loss: 1.1081 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9964 - val_loss: 2.4713 - val_triplet_loss: 0.1805 - val_softmax_loss: 1.5687 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.8945\n",
      "Epoch 169/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1195 - triplet_loss: 0.0026 - softmax_loss: 1.1064 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9954\n",
      "Epoch 00169: saving model to /kaggle/working/semi_hard_triplet_model-169-0.891.h5\n",
      "162/162 [==============================] - 31s 193ms/step - loss: 1.1195 - triplet_loss: 0.0026 - softmax_loss: 1.1064 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9954 - val_loss: 2.4997 - val_triplet_loss: 0.1826 - val_softmax_loss: 1.5865 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8910\n",
      "Epoch 170/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1163 - triplet_loss: 0.0020 - softmax_loss: 1.1062 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9963\n",
      "Epoch 00170: saving model to /kaggle/working/semi_hard_triplet_model-170-0.895.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.1163 - triplet_loss: 0.0020 - softmax_loss: 1.1062 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9963 - val_loss: 2.5066 - val_triplet_loss: 0.1833 - val_softmax_loss: 1.5903 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8945\n",
      "Epoch 171/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1138 - triplet_loss: 0.0022 - softmax_loss: 1.1027 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9970\n",
      "Epoch 00171: saving model to /kaggle/working/semi_hard_triplet_model-171-0.886.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.1138 - triplet_loss: 0.0022 - softmax_loss: 1.1027 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9970 - val_loss: 2.5206 - val_triplet_loss: 0.1845 - val_softmax_loss: 1.5983 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8856\n",
      "Epoch 172/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1098 - triplet_loss: 0.0017 - softmax_loss: 1.1015 - triplet_accuracy: 0.0017 - softmax_accuracy: 0.9974\n",
      "Epoch 00172: saving model to /kaggle/working/semi_hard_triplet_model-172-0.888.h5\n",
      "162/162 [==============================] - 31s 193ms/step - loss: 1.1098 - triplet_loss: 0.0017 - softmax_loss: 1.1015 - triplet_accuracy: 0.0017 - softmax_accuracy: 0.9974 - val_loss: 2.4920 - val_triplet_loss: 0.1821 - val_softmax_loss: 1.5813 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8883\n",
      "Epoch 173/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1186 - triplet_loss: 0.0028 - softmax_loss: 1.1046 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9960- ETA: 6s - loss: 1.1214 - triplet_loss: 0.0031 - softmax_loss: 1.1057 - triplet_accu\n",
      "Epoch 00173: saving model to /kaggle/working/semi_hard_triplet_model-173-0.892.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 1.1186 - triplet_loss: 0.0028 - softmax_loss: 1.1046 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9960 - val_loss: 2.4866 - val_triplet_loss: 0.1823 - val_softmax_loss: 1.5753 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.8922\n",
      "Epoch 174/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1140 - triplet_loss: 0.0020 - softmax_loss: 1.1042 - triplet_accuracy: 0.0017 - softmax_accuracy: 0.9969\n",
      "Epoch 00174: saving model to /kaggle/working/semi_hard_triplet_model-174-0.890.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.1140 - triplet_loss: 0.0020 - softmax_loss: 1.1042 - triplet_accuracy: 0.0017 - softmax_accuracy: 0.9969 - val_loss: 2.4722 - val_triplet_loss: 0.1817 - val_softmax_loss: 1.5639 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8899\n",
      "Epoch 175/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1122 - triplet_loss: 0.0020 - softmax_loss: 1.1020 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9973\n",
      "Epoch 00175: saving model to /kaggle/working/semi_hard_triplet_model-175-0.893.h5\n",
      "162/162 [==============================] - 31s 192ms/step - loss: 1.1122 - triplet_loss: 0.0020 - softmax_loss: 1.1020 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9973 - val_loss: 2.4846 - val_triplet_loss: 0.1821 - val_softmax_loss: 1.5740 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8926\n",
      "Epoch 176/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1138 - triplet_loss: 0.0022 - softmax_loss: 1.1030 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9963\n",
      "Epoch 00176: saving model to /kaggle/working/semi_hard_triplet_model-176-0.881.h5\n",
      "162/162 [==============================] - 31s 193ms/step - loss: 1.1138 - triplet_loss: 0.0022 - softmax_loss: 1.1030 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9963 - val_loss: 2.5427 - val_triplet_loss: 0.1864 - val_softmax_loss: 1.6107 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8810\n",
      "Epoch 177/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1164 - triplet_loss: 0.0028 - softmax_loss: 1.1023 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9962- ETA: 1s - loss: 1.1174 - triplet_loss: 0.0029 - softmax_loss: 1.1029 - triplet_accuracy: 0.0024 - softmax_accuracy: \n",
      "Epoch 00177: saving model to /kaggle/working/semi_hard_triplet_model-177-0.883.h5\n",
      "162/162 [==============================] - 32s 196ms/step - loss: 1.1164 - triplet_loss: 0.0028 - softmax_loss: 1.1023 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9962 - val_loss: 2.5187 - val_triplet_loss: 0.1860 - val_softmax_loss: 1.5889 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8833\n",
      "Epoch 178/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1151 - triplet_loss: 0.0024 - softmax_loss: 1.1034 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9967\n",
      "Epoch 00178: saving model to /kaggle/working/semi_hard_triplet_model-178-0.887.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.1151 - triplet_loss: 0.0024 - softmax_loss: 1.1034 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9967 - val_loss: 2.5129 - val_triplet_loss: 0.1843 - val_softmax_loss: 1.5914 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8868\n",
      "Epoch 179/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1136 - triplet_loss: 0.0025 - softmax_loss: 1.1009 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9961\n",
      "Epoch 00179: saving model to /kaggle/working/semi_hard_triplet_model-179-0.894.h5\n",
      "162/162 [==============================] - 31s 188ms/step - loss: 1.1136 - triplet_loss: 0.0025 - softmax_loss: 1.1009 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9961 - val_loss: 2.4805 - val_triplet_loss: 0.1843 - val_softmax_loss: 1.5592 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8941\n",
      "Epoch 180/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1121 - triplet_loss: 0.0021 - softmax_loss: 1.1018 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9966\n",
      "Epoch 00180: saving model to /kaggle/working/semi_hard_triplet_model-180-0.891.h5\n",
      "162/162 [==============================] - 32s 194ms/step - loss: 1.1121 - triplet_loss: 0.0021 - softmax_loss: 1.1018 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9966 - val_loss: 2.4849 - val_triplet_loss: 0.1826 - val_softmax_loss: 1.5721 - val_triplet_accuracy: 0.0012 - val_softmax_accuracy: 0.8906\n",
      "Epoch 181/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1117 - triplet_loss: 0.0019 - softmax_loss: 1.1024 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9951\n",
      "Epoch 00181: saving model to /kaggle/working/semi_hard_triplet_model-181-0.886.h5\n",
      "162/162 [==============================] - 30s 185ms/step - loss: 1.1117 - triplet_loss: 0.0019 - softmax_loss: 1.1024 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9951 - val_loss: 2.5181 - val_triplet_loss: 0.1845 - val_softmax_loss: 1.5958 - val_triplet_accuracy: 0.0015 - val_softmax_accuracy: 0.8864\n",
      "Epoch 182/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1048 - triplet_loss: 0.0011 - softmax_loss: 1.0992 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9967\n",
      "Epoch 00182: saving model to /kaggle/working/semi_hard_triplet_model-182-0.891.h5\n",
      "162/162 [==============================] - 32s 196ms/step - loss: 1.1048 - triplet_loss: 0.0011 - softmax_loss: 1.0992 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9967 - val_loss: 2.4660 - val_triplet_loss: 0.1795 - val_softmax_loss: 1.5683 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8914\n",
      "Epoch 183/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1077 - triplet_loss: 0.0017 - softmax_loss: 1.0990 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9969\n",
      "Epoch 00183: saving model to /kaggle/working/semi_hard_triplet_model-183-0.897.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 1.1077 - triplet_loss: 0.0017 - softmax_loss: 1.0990 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9969 - val_loss: 2.4636 - val_triplet_loss: 0.1801 - val_softmax_loss: 1.5629 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.8968\n",
      "Epoch 184/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1094 - triplet_loss: 0.0021 - softmax_loss: 1.0987 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9972\n",
      "Epoch 00184: saving model to /kaggle/working/semi_hard_triplet_model-184-0.893.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.1094 - triplet_loss: 0.0021 - softmax_loss: 1.0987 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9972 - val_loss: 2.4979 - val_triplet_loss: 0.1830 - val_softmax_loss: 1.5830 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8934\n",
      "Epoch 185/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1050 - triplet_loss: 0.0016 - softmax_loss: 1.0969 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9969\n",
      "Epoch 00185: saving model to /kaggle/working/semi_hard_triplet_model-185-0.895.h5\n",
      "162/162 [==============================] - 30s 188ms/step - loss: 1.1050 - triplet_loss: 0.0016 - softmax_loss: 1.0969 - triplet_accuracy: 0.0018 - softmax_accuracy: 0.9969 - val_loss: 2.4673 - val_triplet_loss: 0.1815 - val_softmax_loss: 1.5600 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.8953\n",
      "Epoch 186/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1011 - triplet_loss: 0.0013 - softmax_loss: 1.0948 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9967\n",
      "Epoch 00186: saving model to /kaggle/working/semi_hard_triplet_model-186-0.895.h5\n",
      "162/162 [==============================] - 31s 191ms/step - loss: 1.1011 - triplet_loss: 0.0013 - softmax_loss: 1.0948 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9967 - val_loss: 2.4710 - val_triplet_loss: 0.1810 - val_softmax_loss: 1.5660 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.8953\n",
      "Epoch 187/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1048 - triplet_loss: 0.0021 - softmax_loss: 1.0944 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9974\n",
      "Epoch 00187: saving model to /kaggle/working/semi_hard_triplet_model-187-0.895.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.1048 - triplet_loss: 0.0021 - softmax_loss: 1.0944 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9974 - val_loss: 2.4989 - val_triplet_loss: 0.1835 - val_softmax_loss: 1.5815 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8949\n",
      "Epoch 188/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1042 - triplet_loss: 0.0018 - softmax_loss: 1.0952 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9972\n",
      "Epoch 00188: saving model to /kaggle/working/semi_hard_triplet_model-188-0.896.h5\n",
      "162/162 [==============================] - 32s 196ms/step - loss: 1.1042 - triplet_loss: 0.0018 - softmax_loss: 1.0952 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9972 - val_loss: 2.4796 - val_triplet_loss: 0.1828 - val_softmax_loss: 1.5654 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.8957\n",
      "Epoch 189/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1001 - triplet_loss: 0.0014 - softmax_loss: 1.0932 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9970\n",
      "Epoch 00189: saving model to /kaggle/working/semi_hard_triplet_model-189-0.891.h5\n",
      "162/162 [==============================] - 31s 188ms/step - loss: 1.1001 - triplet_loss: 0.0014 - softmax_loss: 1.0932 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9970 - val_loss: 2.4790 - val_triplet_loss: 0.1798 - val_softmax_loss: 1.5801 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.8914\n",
      "Epoch 190/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1032 - triplet_loss: 0.0020 - softmax_loss: 1.0931 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9973\n",
      "Epoch 00190: saving model to /kaggle/working/semi_hard_triplet_model-190-0.886.h5\n",
      "162/162 [==============================] - 32s 195ms/step - loss: 1.1032 - triplet_loss: 0.0020 - softmax_loss: 1.0931 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9973 - val_loss: 2.4909 - val_triplet_loss: 0.1797 - val_softmax_loss: 1.5926 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8864\n",
      "Epoch 191/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1037 - triplet_loss: 0.0020 - softmax_loss: 1.0938 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9962\n",
      "Epoch 00191: saving model to /kaggle/working/semi_hard_triplet_model-191-0.894.h5\n",
      "162/162 [==============================] - 30s 188ms/step - loss: 1.1037 - triplet_loss: 0.0020 - softmax_loss: 1.0938 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9962 - val_loss: 2.4721 - val_triplet_loss: 0.1785 - val_softmax_loss: 1.5797 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8937\n",
      "Epoch 192/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1055 - triplet_loss: 0.0023 - softmax_loss: 1.0941 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9964\n",
      "Epoch 00192: saving model to /kaggle/working/semi_hard_triplet_model-192-0.888.h5\n",
      "162/162 [==============================] - 32s 196ms/step - loss: 1.1055 - triplet_loss: 0.0023 - softmax_loss: 1.0941 - triplet_accuracy: 0.0019 - softmax_accuracy: 0.9964 - val_loss: 2.5018 - val_triplet_loss: 0.1785 - val_softmax_loss: 1.6092 - val_triplet_accuracy: 7.7280e-04 - val_softmax_accuracy: 0.8876\n",
      "Epoch 193/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1074 - triplet_loss: 0.0023 - softmax_loss: 1.0961 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9969- ETA: 4s - loss: 1.1066 - triplet_loss: 0.0022 - softmax_loss: 1.0958 - triplet_accuracy: 0.00\n",
      "Epoch 00193: saving model to /kaggle/working/semi_hard_triplet_model-193-0.883.h5\n",
      "162/162 [==============================] - 31s 192ms/step - loss: 1.1074 - triplet_loss: 0.0023 - softmax_loss: 1.0961 - triplet_accuracy: 0.0024 - softmax_accuracy: 0.9969 - val_loss: 2.5308 - val_triplet_loss: 0.1827 - val_softmax_loss: 1.6176 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8829\n",
      "Epoch 194/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1025 - triplet_loss: 0.0016 - softmax_loss: 1.0945 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9976\n",
      "Epoch 00194: saving model to /kaggle/working/semi_hard_triplet_model-194-0.891.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.1025 - triplet_loss: 0.0016 - softmax_loss: 1.0945 - triplet_accuracy: 0.0026 - softmax_accuracy: 0.9976 - val_loss: 2.5037 - val_triplet_loss: 0.1809 - val_softmax_loss: 1.5990 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.8910\n",
      "Epoch 195/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1043 - triplet_loss: 0.0017 - softmax_loss: 1.0956 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9967\n",
      "Epoch 00195: saving model to /kaggle/working/semi_hard_triplet_model-195-0.891.h5\n",
      "162/162 [==============================] - 31s 194ms/step - loss: 1.1043 - triplet_loss: 0.0017 - softmax_loss: 1.0956 - triplet_accuracy: 0.0023 - softmax_accuracy: 0.9967 - val_loss: 2.4761 - val_triplet_loss: 0.1807 - val_softmax_loss: 1.5726 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8906\n",
      "Epoch 196/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.0974 - triplet_loss: 0.0013 - softmax_loss: 1.0910 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9973\n",
      "Epoch 00196: saving model to /kaggle/working/semi_hard_triplet_model-196-0.895.h5\n",
      "162/162 [==============================] - 31s 193ms/step - loss: 1.0974 - triplet_loss: 0.0013 - softmax_loss: 1.0910 - triplet_accuracy: 0.0022 - softmax_accuracy: 0.9973 - val_loss: 2.4741 - val_triplet_loss: 0.1787 - val_softmax_loss: 1.5805 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8949\n",
      "Epoch 197/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1054 - triplet_loss: 0.0023 - softmax_loss: 1.0940 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9964\n",
      "Epoch 00197: saving model to /kaggle/working/semi_hard_triplet_model-197-0.890.h5\n",
      "162/162 [==============================] - 32s 199ms/step - loss: 1.1054 - triplet_loss: 0.0023 - softmax_loss: 1.0940 - triplet_accuracy: 0.0025 - softmax_accuracy: 0.9964 - val_loss: 2.4887 - val_triplet_loss: 0.1805 - val_softmax_loss: 1.5865 - val_triplet_accuracy: 0.0019 - val_softmax_accuracy: 0.8899\n",
      "Epoch 198/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1017 - triplet_loss: 0.0019 - softmax_loss: 1.0924 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9972\n",
      "Epoch 00198: saving model to /kaggle/working/semi_hard_triplet_model-198-0.892.h5\n",
      "162/162 [==============================] - 31s 190ms/step - loss: 1.1017 - triplet_loss: 0.0019 - softmax_loss: 1.0924 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9972 - val_loss: 2.4699 - val_triplet_loss: 0.1801 - val_softmax_loss: 1.5694 - val_triplet_accuracy: 0.0000e+00 - val_softmax_accuracy: 0.8918\n",
      "Epoch 199/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1003 - triplet_loss: 0.0016 - softmax_loss: 1.0924 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9968\n",
      "Epoch 00199: saving model to /kaggle/working/semi_hard_triplet_model-199-0.884.h5\n",
      "162/162 [==============================] - 32s 199ms/step - loss: 1.1003 - triplet_loss: 0.0016 - softmax_loss: 1.0924 - triplet_accuracy: 0.0020 - softmax_accuracy: 0.9968 - val_loss: 2.5113 - val_triplet_loss: 0.1826 - val_softmax_loss: 1.5982 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8837\n",
      "Epoch 200/200\n",
      "162/162 [==============================] - ETA: 0s - loss: 1.1015 - triplet_loss: 0.0024 - softmax_loss: 1.0896 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9971\n",
      "Epoch 00200: saving model to /kaggle/working/semi_hard_triplet_model-200-0.894.h5\n",
      "162/162 [==============================] - 31s 189ms/step - loss: 1.1015 - triplet_loss: 0.0024 - softmax_loss: 1.0896 - triplet_accuracy: 0.0021 - softmax_accuracy: 0.9971 - val_loss: 2.4820 - val_triplet_loss: 0.1812 - val_softmax_loss: 1.5761 - val_triplet_accuracy: 3.8640e-04 - val_softmax_accuracy: 0.8937\n"
     ]
    }
   ],
   "source": [
    "history = semi_hard_triplet_model.fit(train_generator,   # tensorflow 升级过后 fit 和 fit_generator 接口合并了\n",
    "                                      steps_per_epoch=len(train_img_path) // batch_size + 1,  # epoch迭代次数\n",
    "                                      validation_data=val_generator,\n",
    "                                      validation_steps=len(val_img_path) // batch_size + 1,\n",
    "                                      verbose=1,\n",
    "                                      shuffle=True,\n",
    "                                      epochs=nbr_epochs,\n",
    "                                      callbacks=[checkpoint])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [],
   "source": [
    "semi_hard_triplet_model.save_weights('semi_hard_triplet_model.h5')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO2dd3xUZdbHvycBQiD03osiiI0SQUUsrw0bLOoqrN1XAcvu2nXXhm2b7urrWhBXVNBd0BURFURl7eJKpEsTIRTpoSUQSDvvH2cmmYSUSUgyycz5fj7zue25zz33mZnfPfc8TVQVx3Ecp/YTF2kDHMdxnMrBBd1xHCdKcEF3HMeJElzQHcdxogQXdMdxnCjBBd1xHCdKcEGPYkRkpohcXdlpI4mIpIrImVWQr4rI4YH1cSLyQDhpK3Cdy0Xko4ra6TilId4OvWYhIhkhmw2AA0BuYHu0qr5R/VbVHEQkFbheVT+p5HwV6KGqqyorrYh0BdYAdVU1pzLsdJzSqBNpA5zCqGpScL008RKROi4STk3Bf481Aw+51BJE5DQR2SAi94jIZuAVEWkmIu+LyDYR2RlY7xhyzmcicn1g/RoR+UpEngykXSMi51YwbTcR+UJE0kXkExF5TkReL8HucGx8VES+DuT3kYi0DDl+pYisFZE0EbmvlPI5QUQ2i0h8yL7hIrIosD5AROaIyC4R2SQiz4pIvRLyelVEHgvZvitwzkYRua5I2vNFZL6I7BGR9SIyNuTwF4HlLhHJEJETg2Ubcv5JIjJXRHYHlieFWzblLOfmIvJK4B52isi0kGPDRGRB4B5+EpEhgf2FwlsiMjb4PYtI10Do6X9FZB3wn8D+twLfw+7Ab+SokPMTReSvge9zd+A3ligiH4jIr4vczyIR+UVx9+qUjAt67aIt0BzoAozCvr9XAtudgUzg2VLOHwisAFoCfwFeFhGpQNp/At8BLYCxwJWlXDMcG38FXAu0BuoBdwKISG/ghUD+7QPX60gxqOq3wF7gf4rk+8/Aei5wW+B+TgTOAG4qxW4CNgwJ2HMW0AMoGr/fC1wFNAXOB24MEaJTAsumqpqkqnOK5N0c+AB4JnBvfwM+EJEWRe7hoLIphrLKeRIWwjsqkNdTARsGABOBuwL3cAqQWlJ5FMOpwJHAOYHtmVg5tQbmAaEhwieB/sBJ2O/4biAPeA24IphIRI4DOgAzymGHA6Cq/qmhH+yPdWZg/TQgC6hfSvo+wM6Q7c+wkA3ANcCqkGMNAAXalictJhY5QIOQ468Dr4d5T8XZeH/I9k3Ah4H1B4HJIccaBsrgzBLyfgyYEFhvhIltlxLS3gq8E7KtwOGB9VeBxwLrE4A/haQ7IjRtMfk+DTwVWO8aSFsn5Pg1wFeB9SuB74qcPwe4pqyyKU85A+0w4WxWTLoXg/aW9vsLbI8Nfs8h99a9FBuaBtI0wR44mcBxxaRLAHZg9RJgwv98df/fouHjHnrtYpuq7g9uiEgDEXkx8Aq7B3vFbxoadijC5uCKqu4LrCaVM217YEfIPoD1JRkcpo2bQ9b3hdjUPjRvVd0LpJV0Lcwbv0hEEoCLgHmqujZgxxGBMMTmgB1/wLz1sihkA7C2yP0NFJFPA6GO3cCYMPMN5r22yL61mHcapKSyKUQZ5dwJ+852FnNqJ+CnMO0tjvyyEZF4EflTIGyzhwJPv2XgU7+4a6nqAeBN4AoRiQNGYm8UTjlxQa9dFG2SdAfQExioqo0peMUvKYxSGWwCmotIg5B9nUpJfyg2bgrNO3DNFiUlVtWlmCCeS+FwC1joZjnmBTYGfl8RG7A3lFD+CUwHOqlqE2BcSL5lNSHbiIVIQukM/ByGXUUprZzXY99Z02LOWw8cVkKee7G3syBti0kTeo+/AoZhYakmmBcftGE7sL+Ua70GXI6FwvZpkfCUEx4u6LWbRthr7K5APPahqr5gwONNAcaKSD0RORG4sIps/DdwgYicHKjAfISyf7P/BH6DCdpbRezYA2SISC/gxjBteBO4RkR6Bx4oRe1vhHm/+wPx6F+FHNuGhTq6l5D3DOAIEfmViNQRkcuA3sD7YdpW1I5iy1lVN2Gx7ecDlad1RSQo+C8D14rIGSISJyIdAuUDsAAYEUifDFwShg0HsLeoBthbUNCGPCx89TcRaR/w5k8MvE0REPA84K+4d15hXNBrN08DiZj38y3wYTVd93KsYjENi1tPwf7IxVFhG1X1B+BmTKQ3ATuBDWWc9i+svuE/qro9ZP+dmNimAy8FbA7HhpmBe/gPsCqwDOUm4BERScdi/m+GnLsPeBz4Wqx1zQlF8k4DLsC86zSskvCCInaHS1nlfCWQjb2lbMXqEFDV77BK16eA3cDnFLw1PIB51DuBhyn8xlMcE7E3pJ+BpQE7QrkTWAzMxWLmf6awBk0EjsHqZJwK4B2LnENGRKYAy1W1yt8QnOhFRK4CRqnqyZG2pbbiHrpTbkTkeBE5LPCKPgSLm04r6zzHKYlAOOsmYHykbanNuKA7FaEt1qQuA2tDfaOqzo+oRU6tRUTOweobtlB2WMcpBQ+5OI7jRAnuoTuO40QJERucq2XLltq1a9dIXd5xHKdW8v33329X1VbFHYuYoHft2pWUlJRIXd5xHKdWIiJFexfn4yEXx3GcKMEF3XEcJ0pwQXccx4kSXNAdx3GiBBd0x3GcKKFMQReRCSKyVUSWlHBcROQZEVkVmDaqX+Wb6TiO45RFOB76q8CQUo6fi0051QObFu2FQzfLcRzHKS9ltkNX1S9EpGspSYYBE9XGEPhWRJqKSLvAGMyO49RGVCE7G0Sgbl3IyIA9e6BtW4iLK0iTkQE7d8KuXZCUVHB8+3ZYt84+jRtDv36WX2IitGgBeXmWZntgpOC6de2Tlwc7dsCPP0KXLtC+PcyZA/v3W/4NG0K7dtCmDWzZAj//DJs2QXo6NG0KRxxhNm3bZja1bQstW5pNGRmQmwvNmll+8fFwzDGWx6pVdiwvr2AZH5hUa/t2s71ePUhIOHgZH2/XSkuD3bvt/pKSbF+zZmbz1q2Wrl49u/axx8KJJ1b611YZHYs6UHiKrg2BfQcJuoiMwrx4OncuOvGL49RAVO0P3bKlCc1339mfsWVL2LwZmjeHRo0s3e7dkJlp5y1aZCKRl2fHEhNNbHr1snM3boT58+Gnn0ycmjSxc+fNs2Xr1pCTY+uZmSYCDRrAYYdZnnPnwvvvm4j17g09esDatbBhg4lgvXomQtnZZuemTbaem1vwyckpWA8KbVYW7N1rn9xcu5emTU2cwASscWO7p127LI/y0rAh7NtnecQqd91VYwW9uGm8iv2mVHU8geExk5OTY/jbdCqVrVth4UIT0ZYtYcgQeOIJ8xzHjjWRnDvXhDAvz8Tz+ONN4NauNS8yLc0E7MgjzavLyrJzZs6E1FQ7J+jhFUWkfOJUWvo6dUyM9wWmbI2PN7FNTDQvdH9gStkmTWDoULNz6VL4+GPo0AG6dTPvNienwOtt3do80aA3Wdxn3z4rg4QEE9wGDWyZk2Pl2769CXtqqpUD2HazZvZQa9rU7Nuyxe6tWTPzsDt1sgfiokV2DxkZBeXZpo19XyIFD5+4OHtgHHaYPRA3bzbha9bMHjLp6fbQ2rrVvqcOHcxjb9zYvPLVq229VStbbtpkD+K8PHvwxsXZ7yIx0R6Uixfb+UcdZeUeF2cfkQJPvVUrK5cDB6y8iy6zs+3+W7Sw+9q+3Wxt2tSuvXevfQeqdk5ioqWrAsIabTEQcnlfVY8u5tiLwGeq+q/A9grgtLJCLsnJyepd/2OErCwThgYNyk67fbt5wZs2meeZmWl/iFMCM6bNnQuffWafefMs3+zsg/MJhgqysgq24+MLxCOUOnXsz6hqQhEkKQlOPdWuvWaNCddpp8GSJSYs7dubOOzZY/k2a2Z/1pwcs713b8tbxIRsxQpYvtyu0bGjiWzPnradkWFpe/c28cjMNIGpE+Jz5eWZwNWta9eqE7GRO5wIIiLfq2pycccq4xcxHbhFRCYDA4HdHj93APP63n8ffvtbC0cMG2ZiFRTBzp0tzaxZBV5R8NW+KPHxBd5xfDwkJ8OoUSagrVvDccdZKGTxYvjgAxgxwjykF180D2/o0AIB3LHDHgaJieZFtmtXEC9NS7OPCHTvXrA/lLPOKn9ZtGhh1zr77IOPtWx58L7iHn5xcfYQcZwSKNNDF5HgHI0tsQHoHwLqAqjqOBER4FmsJcw+4FpVLdP1dg+9lpGXZyInxUTYdu6EceNgxgzzKg8/HL74wrxRgL59oX9/eO89O968ub3Op6Zafueea+Kal2ci2revvaovXWpecny8hRQSE03ITzrJXp8dJwYpzUOP2AQXLug1EFVISYH16y3W17ChhSe+/hpeftm86mbNTFQ3bzYhv+EGmDjRKveSk82LXrkSBg2Ck0+22GSod+w4ziFR1SEXp7ayaRM8+yxMm2aVhOvWwaefHpyuTh245BJrpbFpk4l++/ZWWfTQQ1Y59fXX5jk7jhMxXNBjid274fe/N1Fu3Rpee8088cGDTdTr1IFnnrFKwIQEq6irV8/Eu7g4L1glZZculp/jOBHFBT2a2LvX4s1795pAz5ljzfLatrVY9eefWyeKTp1s/xVXwIMPWsw72ImkvKGR44+vmntxHKfcuKDXdnbutDDJW2/B1KnWTC8uzioYW7SwSsaVK02su3SByZOt1Ud2tjV/CxK67jhOrcQFvbaQk2OdZVJSrG10drZ53DNn2rGmTWH0aAt9ZGdbE8G+fYtvlQIu4I4Thbig12RycmDSJPjqK1iwwNpOt21rnjhYL7lbbzXxHjDA4t2O48QsLug1hby8grE1nn0WvvzSOsmkploX6dat4fXX4Ve/sq7PjRqZV+44jhPABb0msHgxDB9u7b+Tkqwn49FHWzfwp5+2dtyhoZNOnSJnq+M4NRYX9Eiye7d543/8ow3W8+tfWyuUUaPg9NMjbZ3jOLUMF/TqJCfHRpBr0MBGxLviCutxecEFNuaIj9PhOM4h4IJeHajCn/8MjzxSMF422NjY06d7W27HcSoFF/SqJDg2ypNPwptvWmuU4cNN1Pfts2aGDRtG2krHcaIEF/SqIisL/vd/rWVKQoJ55/ffX3K7cMdxnEPEBb0qWLLE2ofPng0PPAC33+5NDB2nkgkOFFsZPlJurnW2btfOhjJat84mQMrJKZhDJTERBg4M/3oZGfYCXjR9ZqblVRW4oFcmKSnw2GPw7rv2Tf7jH+alO04MoWozwS1YAPXrwwkn2CgUb7xhQwjddFPp/o2qCWxwWKE33rAhig4csME9mzWzOUYmTbIBPt98044vWgT//reN4nzggE1qdf/9JtJBvv8eLrvM0tx9t81dvXIlXH01fPutpWnWzEbUKI6BA60lcb9+cOONFjkdN84aqanaKBwZGTZny4sv2nwmt99ubSDOP9+mAhg4EK66Cu65p1KKu2jhaUQ+/fv316hi0SLV+HjVZs1Ux45VTUuLtEVOjJCdrXrhhfazy8oq2LdkieqqVao5OcWft3+/6syZqkuXqubmlu+aCxaoDhqk2qmT6oknqq5dq/rKK6pNm6omJKiavNmnSxfVd9+1vweoNmmiOmmS6rp1qrffrnrCCapDhlie116rWreupbvhBtW//rVwXsnJqr17F6yD6qWXqp5/fuF0wc9556nm5dn9vfOOauPGqu3a2RJUzzlHNTHR/rYTJ6qOG6d61VWqf/+76nvvWfl8/LHqp5+qvviiavv2qiIFNsTHqx5xhOq336r+/vcF142PV73kEtWGDQv2de6sOnSoalyc6n/+U/HvG0jREnTVBf1Q2LpV9YknVNevt39Ukyaq27ZF2iqnlpKTo/rhh6rLlhXsy8pSff118xfy8mzfs8+qnn666h13qG7ZYoIcFI0+fVSfe061f/+Cfccdp/r994WvlZJSIEqg2rat6pgxqrfdpjphgl3rH/9QfeABW//sM9W77lJdsUL1kUdU69Wzc665xn72HTuaUA0aZOmef96uMX26aoMGBYL2+eeqJ59s23XrWj6DB9uDAEwsR41Svf76AtuGD1f97jvVGTNMmHNzC/yl22+3NB06qD74oOrmzaqzZ5sAP/WUHRs5UrVXL1vv1cseJLt3270lJpro//xzeN/Rnj2qN99sdo8bZ/fTvn2BrTfcYPe9dq2lX7vWHmazZ1s5geof/nBIPxMX9CrhnXcKfoUtWtjy8ccjbZVTCeTmqs6apXrrrar//nfF8ti0yQTjootU58wpPs2uXapvvKH61VeqTz6p2q2b/YyOOqrAq7zqqsLe6T/+YcLZqZPte+wx1SlTbP2hh8xbBNU2bUxUn33WPNK4OBObzZvN52jRwkTwn/80z/rii014ExPt/H79Cq771FOqrVoV9nwvvbTAd/n6a/NEBwxQTU8/+D6nTzcx/+IL287ONluvv141NdX2rV+vOnq0CV+QN94wj33v3pLLOS+v5DeM3FzV004zb/mUU+ytIPgGE+TAgZLzLo3QfPbsUb33XnvAlvQ2pGpC/5e/lP9tqCgu6JXN/v3mnhx3nP1aDz/cHtPF/ZqdsNi+XXXhwvDSPvKI6r/+FX7emzapzp9v63/5i72e79tXfNq8vMLeYXy8ifrs2apz5xZ4yWVx9tkmovXqWTghmPdHH6n+8Y+qq1dbqCFUJAcPLrj2nDmq991n67/7nXndzZrZ9pFH2k+tZ097hb/vPrNz/367xvz5qjt3FtiyY4c9nOrWtQfBBReo1qljXn9x9//44wWectDGhAR7e3jkEQtBFFfG+/eHVzbVyf799uCMJlzQK5sJE6zogr/sfftMkZwKc9VVqklJB3tQRZk3z4q+eXPzjKZMsVfqSy81kdyxw7zWzZst/d69JoAiJn5B8Rw92vJ6//2CvHNyVO++247fdZdF1EJDF0HveetWE9RrrjGxfvzxwkIftPGPfzRPVMS8yDPPLJyXiP2U3n3X4seqdk9Bb1dE9brrCvJOTVW98UYLe6iqXnmleeIXXmjhk7KYN8/Sg+o995SeNvhAWLbMvPO//73s/J3qwQW9MsnLs3/1sceG7645xRIMKxw4UBBf/O9/C6eZNctiqNnZtj1sWEFM9oILTPTat1dt1MhelIIC3LevCe/o0ZpfORY8Jxh3DX6mTjWRDHqjo0YVfLXbtqk+/bTFb59+2o5PmKD61lu2fthhtnzxRdXJk1UffthEvlEjE8W1a81Tb9nSbH3qKdXFi+0BNnFi8eVy3XUFeWdklFx+f/+7pWvUSPWyy8Ir89WrVR99tPQwRlFKCyM41Y8LemXy6qtWbK+9FmlLqoW8vMp/bmVkqP761ybEvXuboAbF9a9/tReehQvNyw7GbufPtw+YaJ57ruZXAu7dq/rNNxb/rVvXQhB16hTkedttdt158+zhkZVl+555xuLSTZrY20Hz5ha3Lel+c3Ot2uSGG+z8+vVVMzNVzzqr8AMCVO+8s+C8YAuMRx4Jr3zmzbOy+eab0tN9913B9bz6JnZwQa8sVq82d2jw4JhxWx57TLVrV2sCVxwPP6z6t78VeNBBZs1SffNNK7Izz7Swx0MPqf7wg3nLcXHmyYKJZFKSxXcvushaEQT3B5u6jR9fECvescO83AsuUF2zpuCaKSkFHv5HH1kzsqlTS/+qVq2y2PSgQVYxVxZDhqgefbSFRAYPtn1bt5qHPHGiefTTpxf2gFeuVP2//zv0yrCi7N9vMXqwazqxgQt6ZZCWZlX/jRoVVpEoJjPTvNZgQ57Q5nSqJlpxcZrfKmLaNBOt5csLt0VOSlI99VQLOQT3vfii5XHFFbY9YoStt2hhIZUTT7SwybhxJrg33GDN3QYMqPz7TE8P/y3k4YftPurUsZYNkWbgQCu/YGsRJ/opTdDjqqCvUvSxa5eNT/7DDzbJcteukbaoWPLyKje/f//b5tp48UUrgtdfL3x8yRK75qhRkJYGv/gF9O8PV15pXZsnTYLf/MZ65332mU209NRT8Mordg7AX/9qPedGj4aTT7Z89u2DF16wGfdGj7bBKL/4Ar77zqZTrWySksLvzn3SSfZIysmBQYMq35bycvrp1hOyc+dIW+LUCEpS+qr+1CoP/f77zQ2aNSvSlpRIbq61Y37ggfDSb9liTdnuv79wKCA726oH7rnH6n2POMK81+7drTVJKC++aMWyZo2d98Yb1sEEVF9+ufz3sHixnXvqqYX3B5vvQeFWKZFg9+6CN42a0LBp/37vyxZr4CGXQ2DXLqs1u/jiSFtSKsuXa3676U8/tdYdxcVV8/JUX3rJIkfBcMmll1pl4bZt1rYZCo49/bSdd+aZB4c7xoyxogkNV6SnW5vtilSk5uZant9+W3j/tGkFNtWENsXHHms9Dh0nEpQm6D44V1k895xNFXfffZG2pFiWLoXu3S2sAVC3bsHsde+9Z4MY1a9fkP63v4W//x3+53/g+ectzV13wbHHwp49NlDRv/8NF15o81Mffrid1707TJ1q6488YvkuWAB9+hQOVyQlWd4VIS7OQi1FGTDAln362CBIkWb8eBs8ynFqGi7opfHddzZ64gUXQN++1X75tWuhS5eSj2/eDMcdZ8+ajAwT7kmTLC59ySVw553wf/9nMekTTzShffllGDnS4uFxcdCzJ8yZA3/4gwnzyJFw8cWW/xFHFFzrsMNg+3b46Scb8e6tt2xEvRtuqNoyAIsRJyfb3CA1gYEDI22B45RASa57VX9qfMhl/XrrVtetmwWcq5mFCy3M8MILJacJxrCPOcbGrAgNieTlWbO60AGavv7a1ouOT7JmjbWpFjm4JUuQYEeaP/yhIE+wcUAcx6k+ONRWLiIyRERWiMgqEbm3mONNROQ9EVkoIj+IyLWV/uSpbm67zWIQH3wArVtX++UXLLDl3XfDxInWuuOdd6x1xQcfmEc+bZqlWbzYvOz+/QvOF4G//Q3OPdfGel640FqXwMEtRbp2hQkTrAVKr17F29O9uy3ffNPyDm736VMpt+s4TmVQktIHP0A88BPQHagHLAR6F0nze+DPgfVWwA6gXmn51mgP/ZNPzP189NEqv9TOndbpZM+ewvuDnWiC3dwTEsyD7trVts8+2zqVDBtW4C2/9FLx1/jxx4JKxWOOqbidwev06qX6wQc2hGtFR6tzHKdicIge+gBglaquVtUsYDIwrOhzAWgkIgIkBQQ959AeNREiO9tqDrt1syB0FTN2rF3uhBNg1aqC/StXmhf8+uvwxz9avPzyy6FRIxgzBj76yKYtveMO6N3bzunXr/hrHH44HHWUtRkPVpiWl6ZNbSYXsHj2eefBf/4D9epVLD/HcSqfcCpFOwDrQ7Y3AEWrhZ4FpgMbgUbAZap6UDcXERkFjALoXFN7QrzwgnUgmjatcPOQEsjLs8rFsti1y1qUJCZa5WTz5rBpk3XaOe00mz7ruuusAw2YoB9xROGKwEmTbKlq0aCvvrKOLiNGWLjk6KNLvv4vfmG3VVFBB6sYTUkxQXccpwZSkuuuBeGUXwL/CNm+Evh7kTSXAE8BAhwOrAEal5ZvjQy5bNliDavPPjushtRr19pQp6GD8qvaWNann24ViUGCo/6Bao8eVhF53XUWVvnpJxtcKdiFOzfXQi3BQaWKIze3YEzv7OyyO7msXWvXK230vrK49FKz8auvKp6H4ziHBocYctkAdArZ7oh54qFcC0wNXG9VQNBLqF6rwbz0krU5f/rpsPqCv/8+7N0LH39csG/KFPOaP//cusDPnw9btsCrr9rEsB98YJ559+5WETl6tK2PGGHnT54MGzdaU8PQZoNFiYsrmDm8Th2bhLc0One2JosNG5Z5WyVy5JGQkOAVoY5TUwlH0OcCPUSkm4jUA0Zg4ZVQ1gFnAIhIG6AnsLoyDa0Wpk61YPaRR4aVfNYsW6akFOybMcMaxaxYAS1b2kzf119v8e777rPY80cfWXvvmTPh2WftvO7d7dL/+peFW6B0QY8Ed9wBc+ce2kPBcZyqo0xBV9Uc4BZgFrAMeFNVfxCRMSIyJpDsUeAkEVkMzAbuUdXtVWV0lZCaaqNBXXRRWMmzsqxSEKyXpkWerPKySxeriPzgA2jTxjz54cMLBPrEE+GNN2DIkMIvAiNHWvPCV1+17Zom6I0awTHHRNoKx3FKIqyeoqo6A5hRZN+4kPWNwNmVa1o1E2zUXUp3xLQ0OHAA2re3dt8ZGdbOe+ZMWLPGvOzNmwsGYzz2WBP7Dz+0EQPL4qqrrO34pEnQoIFdx3EcJ1x8+Nwgb79t7mdw8JIAH35oLU9U4ZxzoGNHW957r8Wu77rL0gXHUtm8Gdq2LTg/Ls7CLK1alW1C06Y2jkq9etCjR3itZxzHcYL4WC5gavzVVzagSRFuvtk88YkTLdkFF1h78bQ0C5GcdJINiJWSYs79tm2FBb28JCfDu+/aw8JxHKc8uGwAPPyw9Zq5+eZCuzMzLZSiCpdeah705MkHVwoGQytbt1radu0OzZwhQw7tfMdxYhN/qf/+e+vxc8cd0LhxoUMrV5pAN2tmHXmuu674Fh59+9rYK5s32/aheOiO4zgVxQV9yhQLWv/61wcdWrbMluPG2fRov/1t8VkceaSFYJYssW0XdMdxIoGHXL75xoYpLOKdAyxfbhWTQ4dayKUkgiMUfv65LV3QHceJBLHtoWdlWW3miScWe3jZMhujq6whXYKC/tlntmzTpvJMdBzHCZfYFvQFC6xheSmCXtL44KF06WJd4levtinSgl3yHcdxqpPYFvRvvrFlMYKem2uVouGMAhAfX9Cr81BbuDiO41SU2Bb0OXNs1KoOHQDrQNSli/XYfOklc97DHNYl35P3+LnjOJEititF58yxnkEBPvwQNmywsVeCY4+7oDuOU1uIXUHfsAHWry8UblmyxGb/mTcPPv3UWrmEO8O7C7rjOJEmdgV9zhxbFhH0gQOtK//ZZ9snXHr2tKULuuM4kSJ2Y+hz5lh7xMBsDRkZ1s2/tGncSuPoo2HYsPI9BBzHcSqT2PXQv/nGRsIKzHK8dKntrqigJyQUjMDrOI4TCWLTQ9+/3wLlRcItAEcdFSGbHMdxDg8eVpQAABn+SURBVJHY9NDnzYPsbDjxRPLybM7PJUusQ1C3bpE2znEcp2LEpoce0qHoz3+2ZugTJ1oLl/j4yJrmOI5TUWJT0D/9FHr0IKdlW557zsblSkuzcc0dx3FqK7EXcsnOti6hV17JBx/Azz/D1Kkm6uF2InIcx6mJxJ6gf/edtVE84wzGjbOJmC+80Kd8cxyn9hN7IZfZs0GEtOP+h1mz4NprXcwdx4kOYlPQ+/Zl9rxmqML550faIMdxnMohtgQ9M9N6iJ5xBh99ZGOXH398pI1yHMepHGJL0H/4AbKz0QED+egjOOMMD7c4jhM9xJagL1zIDpqxsN7xrF/v4644jhNdxJR/mj53OV1JJX2YTQjtgu44TjQRU4I+5+s80mnMFVfYlHHezd9xnGgidgRdlS9/bEu85PLCC/EkJUXaIMdxnMoldmLoa9fy5YHj6ds5zcXccZyoJGYEPStlEf9lIINPyIm0KY7jOFVCWIIuIkNEZIWIrBKRe0tIc5qILBCRH0Tk88o189D5ftZ29pPI4KHNIm2K4zhOlVBmDF1E4oHngLOADcBcEZmuqktD0jQFngeGqOo6EWldVQZXlC+/bwDAoDMTI2yJ4zhO1RCOhz4AWKWqq1U1C5gMDCuS5lfAVFVdB6CqWyvXzEPny9RO9ExcR+sa96hxHMepHMIR9A7A+pDtDYF9oRwBNBORz0TkexG5qriMRGSUiKSISMq2bdsqZnEFyMuDr3cdxeCOa6rtmo7jONVNOIIuxezTItt1gP7A+cA5wAMicsRBJ6mOV9VkVU1u1apVuY2tKEsXZLFTm3Jy7x3Vdk3HcZzqJpx26BuATiHbHYGNxaTZrqp7gb0i8gVwHLCyUqw8RL58bxfQmsGD8iJtiuM4TpURjoc+F+ghIt1EpB4wApheJM27wGARqSMiDYCBwLLKNbXifPl5Hu35mW7Ht4y0KY7jOFVGmR66quaIyC3ALCAemKCqP4jImMDxcaq6TEQ+BBYBecA/VHVJVRpeHr5amMTJfIB0OyHSpjiO41QZYXX9V9UZwIwi+8YV2X4CeKLyTKscNm+G9TuSuEO+hQ4XR9ocx3GcKiPqe4qmptry8Fa7ffBzx3GimqgX9HXrbNm5c2TtcBzHqWqiXtDXB1rQd+7pPUQdx4luoj4GsW5NDo3YR5Mj2kTaFMdxnCol6j30dSv205l10LVrpE1xHMepUqJe0NevzXVBdxwnJoh6QV+3uR6dWO+C7jhO1BPVgp6ZCdvSE+kctwE6FB1PzHEcJ7qIakHfsMGWnVpkQnx8ZI1xHMepYqJa0PPboHf0Qbkcx4l+olrQ89ugH1Y3soY4juNUA1Et6Ot+ygagQ+8mEbbEcRyn6onqjkXrlu+lLZkkHN6p7MSO4zi1nKj20NevzrEmi926RdoUx3GcKieqBX3dxnjvVOQ4TswQtYKuCuvTGlgb9HbtIm2O4zhOlRO1gr5zJ+zNTqBT0wxvg+44TkwQtYKe3wa9VWZkDXEcx6kmolbQ89ugd8iNrCGO4zjVRNQKetBD79TVwy2O48QGUdsOfd1P2dQjj9bdkyJtiuM4TrUQtYK+ftV+OrKVuA7ewsVxnNggekMuqXnWBt2bLDqOEyNEraCv31THBd1xnJgiKgU9Jwd+3pFo3f5d0B3HiRGiUtA3bYLcvDg6x/0MLVpE2hzHcZxqISoFPdgGvVOzDIiLylt0HMc5iKhUu/xeou2yI2uI4zhONRLVgt7Jh0F3HCeGiEpBX78emshuGnduGmlTHMdxqo2oFPR1qXl01rXQtm2kTXEcx6k2whJ0ERkiIitEZJWI3FtKuuNFJFdELqk8E8vP+tQcb7LoOE7MUaagi0g88BxwLtAbGCkivUtI92dgVmUbWV7WrRfvVOQ4TswRjoc+AFilqqtVNQuYDAwrJt2vgbeBrZVoX7nZtw/Sdtc1QW/TJpKmOI7jVCvhCHoHYH3I9obAvnxEpAMwHBhXWkYiMkpEUkQkZdu2beW1NSzy26Cz3gXdcZyYIhxBl2L2aZHtp4F7VLXU2SRUdbyqJqtqcqtWrcK1sVzkt0F3D91xnBgjnOFzNwChLbo7AhuLpEkGJosIQEvgPBHJUdVplWJlOcgX9KSdkJhY3Zd3HMeJGOEI+lygh4h0A34GRgC/Ck2gqt2C6yLyKvB+JMQcLOQi5NGhrU895zhObFGmoKtqjojcgrVeiQcmqOoPIjImcLzUuHl1s24dtKuXRt22PiiX4zixRVgzFqnqDGBGkX3FCrmqXnPoZlWcrVuhrWz1TkWO48QcUddTNCMDknJ3eYWo4zgxR/QJenoeSTku6I7jxB7RJ+i7c0kiwwXdcZyYI+oEfW+6mqB7DN1xnBgj6gQ9Y6/QkL3uoTuOE3NElaCrQsb+eA+5OI4Tk0SVoGdlQU5unAu64zgxSVQJ+t69tkxKyPFu/47jxBxRJegZGbZs2CSs/lKO4zhRRVQKelKzupE1xHEcJwJEp6C3SIisIY7jOBEgqgQ9P4beukFkDXEcx4kAUSXoGTuzAWjYJinCljiO41Q/0SXoG/cAkNS+UYQtcRzHqX6iS9A3WxA9qUPTCFviOI5T/USVoO/dGhD0zs0jbInjOE71E1WCnrFtPwANu1bNBNSO4zg1megS9B1Z1OMAdTt6t3/HcWKP6BL0XTkksde7/TuOE5NElaDv3ZNLUp3MSJvhOI4TEaJK0DP2QsO62ZE2w3EcJyJEl6Dviyepvgu64zixSXQJ+oE6JDXIi7QZjuM4ESF6BD0ri4yc+iR5r3/HcWKU6Bk4fOtW9tKQho2j5xnlOI5THqJH/bZsIYMkkppGzzPKcRynPESfoDf3sdAdx4lNosad1U2bTdBb5UbaFMdxnIgQNR76/p/TUOJo2KphpE1xHMeJCFEj6BkbdgGQ1LxehC1xHMeJDGEJuogMEZEVIrJKRO4t5vjlIrIo8PlGRI6rfFNLJ2NTOoA3W3QcJ2YpU9BFJB54DjgX6A2MFJHeRZKtAU5V1WOBR4HxlW1oWeRPbuGC7jhOjBKOhz4AWKWqq1U1C5gMDAtNoKrfqOrOwOa3QMfKNbNsMrbZoFwNPYTuOE6MEo6gdwDWh2xvCOwrif8FZh6KURUhfUcWAI0bV/eVHcdxagbhNFuUYvZpsQlFTscE/eQSjo8CRgF07tw5TBPDICuLPelmZiOfH9pxnBglHA99A9ApZLsjsLFoIhE5FvgHMExV04rLSFXHq2qyqia3alWJ08Rt3Uo6puQu6I7jxCrhCPpcoIeIdBOResAIYHpoAhHpDEwFrlTVlZVvZhls2ZIv6B5ycRwnVikz5KKqOSJyCzALiAcmqOoPIjImcHwc8CDQAnheRAByVDW56swuwpYt7MGU3D10x3FilbC6/qvqDGBGkX3jQtavB66vXNPKQcBDT6iXR716UdNXynEcp1xEh/pt3sweGtOocXH1t47jOLFBdAj6li2k12lOYxd0x3FimKgR9D31Wnr83HGcmCY6BH3zZtLrNPMWLo7jxDTRIejr15Nep6l76I7jxDS1X9Dz8mD9evZoIxd0x3Fimto/Y9HWrZCVRXr9Bh5ycZwwyc7OZsOGDezfvz/SpjglUL9+fTp27EjdunXDPqf2C/q6dQDsOZDgHrrjhMmGDRto1KgRXbt2JdAZ0KlBqCppaWls2LCBbt26hX1e7Q+5rFtHLnHsO1DHPXTHCZP9+/fTokULF/MaiojQokWLcr9BRYWg+8BcjlN+XMxrNhX5fqJD0Bu2A3xgLsdxYpvoEPS2PQD30B2ntpCWlkafPn3o06cPbdu2pUOHDvnbWVlZpZ6bkpLCb37zmzKvcdJJJ1WWubWGqKgU3dNqMPzkgu44tYUWLVqwYMECAMaOHUtSUhJ33nln/vGcnBzq1ClenpKTk0lOLnsw12+++aZyjK1FRIWgpw/oCnjIxXEqxK23QkBcK40+feDpp8t1yjXXXEPz5s2ZP38+/fr147LLLuPWW28lMzOTxMREXnnlFXr27Mlnn33Gk08+yfvvv8/YsWNZt24dq1evZt26ddx666353ntSUhIZGRl89tlnjB07lpYtW7JkyRL69+/P66+/jogwY8YMbr/9dlq2bEm/fv1YvXo177//fiG7UlNTufLKK9m7dy8Azz77bL73/5e//IVJkyYRFxfHueeey5/+9CdWrVrFmDFj2LZtG/Hx8bz11lscdthhlVCoZVO7BT0zE7ZtY09jm5PaPXTHqd2sXLmSTz75hPj4ePbs2cMXX3xBnTp1+OSTT/j973/P22+/fdA5y5cv59NPPyU9PZ2ePXty4403HtR2e/78+fzwww+0b9+eQYMG8fXXX5OcnMzo0aP54osv6NatGyNHjizWptatW/Pxxx9Tv359fvzxR0aOHElKSgozZ85k2rRp/Pe//6VBgwbs2LEDgMsvv5x7772X4cOHs3//fvLy8iq/oEqgdgv6epu7Oj3JK0Udp8KU05OuSn75y18SHx8PwO7du7n66qv58ccfERGys7OLPef8888nISGBhIQEWrduzZYtW+jYsWOhNAMGDMjf16dPH1JTU0lKSqJ79+757bxHjhzJ+PHjD8o/OzubW265hQULFhAfH8/KlTYp2yeffMK1115LgwYNAGjevDnp6en8/PPPDB8+HLDOQdVJ7a4UXbsWgPREm5/UPXTHqd00bNgwf/2BBx7g9NNPZ8mSJbz33nsltslOSEjIX4+PjycnJyesNKrFznV/EE899RRt2rRh4cKFpKSk5FfaqupBTQvDzbOqqN2CHnhS7klsC7igO040sXv3bjp06ADAq6++Wun59+rVi9WrV5OamgrAlClTSrSjXbt2xMXFMWnSJHJzcwE4++yzmTBhAvv27QNgx44dNG7cmI4dOzJt2jQADhw4kH+8Oqjdgr58OTRuTLomUa8ehDyEHcep5dx999387ne/Y9CgQfkiWpkkJiby/PPPM2TIEE4++WTatGlDkyZNDkp300038dprr3HCCSewcuXK/LeIIUOGMHToUJKTk+nTpw9PPvkkAJMmTeKZZ57h2GOP5aSTTmLz5s2VbntJSKReEZKTkzUlJeXQMjnrLNi9m5uSv+Ott2DbtsqxzXGinWXLlnHkkUdG2oyIk5GRQVJSEqrKzTffTI8ePbjtttsibVY+xX1PIvK9qhbbbrN2e+grVkCvXqSne7jFcZzy89JLL9GnTx+OOuoodu/ezejRoyNt0iFRe1u57N1rrVx69mTPdy7ojuOUn9tuu61GeeSHSu310AMVovTqxa5d3mTRcRyn9gr68uW27NWLVauge/fImuM4jhNpaq+gr1gBcXHsaX04GzeC1+84jhPr1F5BX74cunVj+Rprq9irV4TtcRzHiTC1V9AXLYJevfIjL+6hO07t4bTTTmPWrFmF9j399NPcdNNNpZ4TbOp83nnnsWvXroPSjB07Nr89eElMmzaNpUuX5m8/+OCDfPLJJ+Uxv8ZSOwV9wwZYtgxOO43ly6FOHY+hO05tYuTIkUyePLnQvsmTJ5c4QFZRZsyYQdOmTSt07aKC/sgjj3DmmWdWKK+aRu1stvjRR7Y85xyWPQg9ekA5JsZ2HCeESIyee8kll3D//fdz4MABEhISSE1NZePGjZx88snceOONzJ07l8zMTC655BIefvjhg87v2rUrKSkptGzZkscff5yJEyfSqVMnWrVqRf/+/QFrYz5+/HiysrI4/PDDmTRpEgsWLGD69Ol8/vnnPPbYY7z99ts8+uijXHDBBVxyySXMnj2bO++8k5ycHI4//nheeOEFEhIS6Nq1K1dffTXvvfce2dnZvPXWW/QqEuetCcPs1k4PfdYsaN8ejj6a5cs9fu44tY0WLVowYMAAPvzwQ8C888suuwwR4fHHHyclJYVFixbx+eefs2jRohLz+f7775k8eTLz589n6tSpzJ07N//YRRddxNy5c1m4cCFHHnkkL7/8MieddBJDhw7liSeeYMGCBYUEdP/+/VxzzTVMmTKFxYsXk5OTwwsvvJB/vGXLlsybN48bb7yx2LBOcJjdefPmMWXKlPxx2UOH2V24cCF33303YMPs3nzzzSxcuJBvvvmGdu3aHVqhUhs99Nxc+PhjGDaM7Bxh1Sq46KJIG+U4tZdIjZ4bDLsMGzaMyZMnM2HCBADefPNNxo8fT05ODps2bWLp0qUce+yxxebx5ZdfMnz48PwhbIcOHZp/bMmSJdx///3s2rWLjIwMzjnnnFLtWbFiBd26deOII44A4Oqrr+a5557j1ltvBewBAdC/f3+mTp160Pk1YZjdsDx0ERkiIitEZJWI3FvMcRGRZwLHF4lIv0qxrjhSUmDnTjjnHFatgpwc99Adpzbyi1/8gtmzZzNv3jwyMzPp168fa9as4cknn2T27NksWrSI888/v8Rhc4MUHcI2yDXXXMOzzz7L4sWLeeihh8rMp6xxrYJD8JY0RG9NGGa3TEEXkXjgOeBcoDcwUkR6F0l2LtAj8BkFvEBVsXs3HHUUuwecxahREBcHAwZU2dUcx6kikpKSOO2007juuuvyK0P37NlDw4YNadKkCVu2bGHmzJml5nHKKafwzjvvkJmZSXp6Ou+9917+sfT0dNq1a0d2djZvvPFG/v5GjRqRnp5+UF69evUiNTWVVatWATZq4qmnnhr2/dSEYXbD8dAHAKtUdbWqZgGTgWFF0gwDJqrxLdBURA49IFQMM3PPpmf2Etof04Jvv4XJk6Fnz6q4kuM4Vc3IkSNZuHAhI0aMAOC4446jb9++HHXUUVx33XUMGjSo1PODc4/26dOHiy++mMGDB+cfe/TRRxk4cCBnnXVWoQrMESNG8MQTT9C3b19++umn/P3169fnlVde4Ze//CXHHHMMcXFxjBkzJux7qQnD7JY5fK6IXAIMUdXrA9tXAgNV9ZaQNO8Df1LVrwLbs4F7VDWlSF6jMA+ezp07918bmHGoPPz3v/C3v1md6PDhcMop5c7CcWIeHz63dlDe4XPDqRQtLkBV9CkQThpUdTwwHmw89DCufRADB0IJE4s4juPENOGEXDYAnUK2OwIbK5DGcRzHqULCEfS5QA8R6SYi9YARwPQiaaYDVwVau5wA7FbVTZVsq+M4lUikJzR2Sqci30+ZIRdVzRGRW4BZQDwwQVV/EJExgePjgBnAecAqYB9wbbktcRyn2qhfvz5paWm0aNGixGZ/TuRQVdLS0srdPr12zynqOE6FyM7OZsOGDWW2zXYiR/369enYsSN1i4xrcqiVoo7jRBl169alW7dukTbDqWRq51gujuM4zkG4oDuO40QJLuiO4zhRQsQqRUVkG1D+rqJGS2B7JZpTmdRU29yu8lFT7YKaa5vbVT4qalcXVW1V3IGICfqhICIpJdXyRpqaapvbVT5qql1Qc21zu8pHVdjlIRfHcZwowQXdcRwnSqitgj4+0gaUQk21ze0qHzXVLqi5trld5aPS7aqVMXTHcRznYGqrh+44juMUwQXdcRwnSqh1gl7WhNXVaEcnEflURJaJyA8i8tvA/rEi8rOILAh8zouAbakisjhw/ZTAvuYi8rGI/BhYNouAXT1DymWBiOwRkVsjUWYiMkFEtorIkpB9JZaRiPwu8JtbISKlTx9f+XY9ISLLAxOwvyMiTQP7u4pIZki5jatmu0r83qqrvEqxbUqIXakisiCwv1rKrBR9qNrfmKrWmg82fO9PQHegHrAQ6B0hW9oB/QLrjYCV2CTaY4E7I1xOqUDLIvv+AtwbWL8X+HMN+C43A10iUWbAKUA/YElZZRT4XhcCCUC3wG8wvhrtOhuoE1j/c4hdXUPTRaC8iv3eqrO8SrKtyPG/Ag9WZ5mVog9V+hurbR56OBNWVwuquklV5wXW04FlQIdI2BImw4DXAuuvAb+IoC0AZwA/qWpFewsfEqr6BbCjyO6SymgYMFlVD6jqGmzc/wHVZZeqfqSqOYHNb7EZwaqVEsqrJKqtvMqyTWyw90uBf1XV9UuwqSR9qNLfWG0T9A7A+pDtDdQAERWRrkBf4L+BXbcEXo8nRCK0gc3n+pGIfC82MTdAGw3MIhVYto6AXaGMoPCfLNJlBiWXUU363V0HzAzZ7iYi80XkcxEZXNJJVUhx31tNKq/BwBZV/TFkX7WWWRF9qNLfWG0T9LAmo65ORCQJeBu4VVX3AC8AhwF9gE3Y6151M0hV+wHnAjeLyCkRsKFExKYyHAq8FdhVE8qsNGrE705E7gNygDcCuzYBnVW1L3A78E8RaVyNJpX0vdWI8gowksKOQ7WWWTH6UGLSYvaVu8xqm6DXqMmoRaQu9mW9oapTAVR1i6rmqmoe8BJV+KpZEqq6MbDcCrwTsGGLiLQL2N0O2FrddoVwLjBPVbdAzSizACWVUcR/dyJyNXABcLkGgq6B1/O0wPr3WNz1iOqyqZTvLeLlBSAidYCLgCnBfdVZZsXpA1X8G6ttgh7OhNXVQiA29zKwTFX/FrK/XUiy4cCSoudWsV0NRaRRcB2rUFuCldPVgWRXA+9Wp11FKOQ1RbrMQiipjKYDI0QkQUS6AT2A76rLKBEZAtwDDFXVfSH7W4lIfGC9e8Cu1dVoV0nfW0TLK4QzgeWquiG4o7rKrCR9oKp/Y1Vd21sFtcfnYTXGPwH3RdCOk7FXokXAgsDnPGASsDiwfzrQrprt6o7Vli8EfgiWEdACmA38GFg2j1C5NQDSgCYh+6q9zLAHyiYgG/OO/re0MgLuC/zmVgDnVrNdq7D4avB3Ni6Q9uLAd7wQmAdcWM12lfi9VVd5lWRbYP+rwJgiaaulzErRhyr9jXnXf8dxnCihtoVcHMdxnBJwQXccx4kSXNAdx3GiBBd0x3GcKMEF3XEcJ0pwQXccx4kSXNAdx3GihP8HLhNJn3j6jdYAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWoAAAEICAYAAAB25L6yAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deXxU5fX48c8hCYQs7EEiUZYK4gIEDKigiGLrRhFxpVahuGu1al361apU29pWfv36tS4takUtirt1r+KGS13CIoKA7BIWCYGQQAhkOb8/zgwJMctkmSXJeb9eec3MnXufe+bO5NznPve5zxVVxTnnXOxqE+0AnHPO1c4TtXPOxThP1M45F+M8UTvnXIzzRO2cczHOE7VzzsU4T9StjIi8KSKTmnreaBKRNSJyYhjKVRE5KPD87yJyWyjzNmA954vI2w2Ns5ZyR4tITlOX6yIvPtoBuLqJyI5KL5OA3UBZ4PVlqjoz1LJU9ZRwzNvSqerlTVGOiPQGVgMJqloaKHsmEPJ36FofT9TNgKqmBJ+LyBrgYlWdXXU+EYkP/vM751oOb/poxoKHtiJys4hsAh4Tkc4i8pqI5IrItsDzjErLfCAiFweeTxaRj0VkWmDe1SJySgPn7SMic0SkUERmi8gDIvKvGuIOJca7ROSTQHlvi0i3Su9fICJrRSRPRG6tZfscJSKbRCSu0rQzRGRh4PlwEfmviOSLyEYRuV9E2tZQ1gwR+X2l1zcGltkgIlOqzHuaiMwXkQIRWSciUyu9PSfwmC8iO0Tk6OC2rbT8CBH5UkS2Bx5HhLptaiMihwSWzxeRxSIyrtJ7p4rIN4Ey14vIDYHp3QLfT76IbBWRj0TE80aE+QZv/noAXYBewKXYd/pY4PWBwC7g/lqWPxJYBnQD/gI8KiLSgHmfAr4AugJTgQtqWWcoMf4M+AXQHWgLBBPHocBDgfL3D6wvg2qo6mfATuCEKuU+FXheBlwX+DxHA2OAK2uJm0AMJwfi+THQD6jaPr4TuBDoBJwGXCEi4wPvjQo8dlLVFFX9b5WyuwCvA/cFPttfgddFpGuVz/CDbVNHzAnAq8DbgeWuBmaKyMGBWR7FmtFSgcOB9wLTfw3kAGnAfsAtgI87EWGeqJu/cuAOVd2tqrtUNU9VX1DVIlUtBP4AHFfL8mtV9WFVLQMeB9Kxf8iQ5xWRA4FhwO2qukdVPwZeqWmFIcb4mKp+q6q7gGeBzMD0s4DXVHWOqu4Gbgtsg5o8DUwEEJFU4NTANFR1rqp+pqqlqroG+Ec1cVTnnEB8i1R1J7Zjqvz5PlDVr1W1XFUXBtYXSrlgiX25qj4ZiOtpYCnw00rz1LRtanMUkAL8KfAdvQe8RmDbACXAoSLSQVW3qeq8StPTgV6qWqKqH6kPEBRxnqibv1xVLQ6+EJEkEflHoGmgADvU7lT58L+KTcEnqloUeJpSz3n3B7ZWmgawrqaAQ4xxU6XnRZVi2r9y2YFEmVfTurDa8wQRaQdMAOap6tpAHP0Dh/WbAnH8Eatd12WfGIC1VT7fkSLyfqBpZztweYjlBsteW2XaWqBnpdc1bZs6Y1bVyju1yuWeie3E1orIhyJydGD6PcAK4G0RWSUivwntY7im5Im6+atau/k1cDBwpKp2oOJQu6bmjKawEegiIkmVph1Qy/yNiXFj5bID6+xa08yq+g2WkE5h32YPsCaUpUC/QBy3NCQGrPmmsqewI4oDVLUj8PdK5dZVG92ANQlVdiCwPoS46ir3gCrty3vLVdUvVfV0rFnkZaymjqoWquqvVbUvVqu/XkTGNDIWV0+eqFueVKzNNz/Q3nlHuFcYqKFmA1NFpG2gNvbTWhZpTIzPA2NF5JjAib87qft3/BRwDbZDeK5KHAXADhEZAFwRYgzPApNF5NDAjqJq/KnYEUaxiAzHdhBBuVhTTd8ayn4D6C8iPxOReBE5FzgUa6ZojM+xtvObRCRBREZj39GswHd2voh0VNUSbJuUAYjIWBE5KHAuIji9rPpVuHDxRN3y3Au0B7YAnwFvRWi952Mn5PKA3wPPYP29q9PgGFV1MXAVlnw3Atuwk121eRoYDbynqlsqTb8BS6KFwMOBmEOJ4c3AZ3gPaxZ4r8osVwJ3ikghcDuB2mlg2SKsTf6TQE+Ko6qUnQeMxY468oCbgLFV4q43Vd0DjMOOLLYADwIXqurSwCwXAGsCTUCXAz8PTO8HzAZ2AP8FHlTVDxoTi6s/8fMCLhxE5BlgqaqGvUbvXEvnNWrXJERkmIj8SETaBLqvnY61dTrnGsmvTHRNpQfwInZiLwe4QlXnRzck51oGb/pwzrkY500fzjkX48LS9NGtWzft3bt3OIp2zrkWae7cuVtUNa2698KSqHv37k12dnY4inbOuRZJRKpekbqXN30451yM80TtnHMxzhO1c87FOO9H7VwLUFJSQk5ODsXFxXXP7KIqMTGRjIwMEhISQl7GE7VzLUBOTg6pqan07t2bmu/74KJNVcnLyyMnJ4c+ffqEvJw3fTjXAhQXF9O1a1dP0jFOROjatWu9j3w8UTvXQniSbh4a8j3FVqK+6y74z3+iHYVzzsWU2ErUf/mLJ2rnmqG8vDwyMzPJzMykR48e9OzZc+/rPXv21LpsdnY211xzTZ3rGDFiRJ3zhOKDDz5g7NixTVJWpMTWycSkJNi5M9pROOfqqWvXrixYsACAqVOnkpKSwg03VNwcvbS0lPj46tNNVlYWWVlZda7j008/bZpgm6HYqlEnJ3uidq6FmDx5Mtdffz3HH388N998M1988QUjRoxgyJAhjBgxgmXLlgH71nCnTp3KlClTGD16NH379uW+++7bW15KSsre+UePHs1ZZ53FgAEDOP/88wmOAvrGG28wYMAAjjnmGK655po6a85bt25l/PjxDBo0iKOOOoqFCxcC8OGHH+49IhgyZAiFhYVs3LiRUaNGkZmZyeGHH85HH33U5NusJiHVqEXkV8Al2A06H1bVe8MSjSdq5xrv2mshULttMpmZcG/9/+2//fZbZs+eTVxcHAUFBcyZM4f4+Hhmz57NLbfcwgsvvPCDZZYuXcr7779PYWEhBx98MFdcccUP+hzPnz+fxYsXs//++zNy5Eg++eQTsrKyuOyyy5gzZw59+vRh4sSJdcZ3xx13MGTIEF5++WXee+89LrzwQhYsWMC0adN44IEHGDlyJDt27CAxMZHp06dz0kknceutt1JWVkZRUVG9t0dD1ZmoReRwLEkPB/YAb4nI66q6vMmj8UTtXIty9tlnExcXB8D27duZNGkSy5cvR0QoKSmpdpnTTjuNdu3a0a5dO7p37873339PRkbGPvMMHz5877TMzEzWrFlDSkoKffv23ds/eeLEiUyfPr3W+D7++OO9O4sTTjiBvLw8tm/fzsiRI7n++us5//zzmTBhAhkZGQwbNowpU6ZQUlLC+PHjyczMbNS2qY9QatSHAJ8FbsqJiHwInAH8pcmjSU6GCO6lnGuRGlDzDZfk5OS9z2+77TaOP/54XnrpJdasWcPo0aOrXaZdu3Z7n8fFxVFaWhrSPA25CUp1y4gIv/nNbzjttNN44403OOqoo5g9ezajRo1izpw5vP7661xwwQXceOONXHjhhfVeZ0OE0ka9CBglIl1FJAk4FTig6kwicqmIZItIdm5ubsOi8Rq1cy3W9u3b6dmzJwAzZsxo8vIHDBjAqlWrWLNmDQDPPFP3TeVHjRrFzJkzAWv77tatGx06dGDlypUMHDiQm2++maysLJYuXcratWvp3r07l1xyCRdddBHz5s1r8s9Qkzpr1Kq6RET+DLyD3TL+K+AHuzhVnQ5MB8jKymrY/b08UTvXYt10001MmjSJv/71r5xwwglNXn779u158MEHOfnkk+nWrRvDhw+vc5mpU6fyi1/8gkGDBpGUlMTjjz8OwL333sv7779PXFwchx56KKeccgqzZs3innvuISEhgZSUFJ544okm/ww1qfc9E0Xkj0COqj5Y0zxZWVnaoBsHXHQRvPUWrF9f/2Wda8WWLFnCIYccEu0wom7Hjh2kpKSgqlx11VX069eP6667Ltph/UB135eIzFXVavsphtQ9T0S6Bx4PBCYATzcyzup5jdo51wgPP/wwmZmZHHbYYWzfvp3LLrss2iE1iVAveHlBRLoCJcBVqrotLNF4onbONcJ1110XkzXoxgopUavqseEOBLBEXVoKJSVQj7FanXOuJYu9KxPBa9XOOVeJJ2rnnItxnqidcy7GxVaiTkqyR0/UzjUro0eP5j9Vhii+9957ufLKK2tdJtiN99RTTyU/P/8H80ydOpVp06bVuu6XX36Zb775Zu/r22+/ndmzZ9cn/GrF0nCosZWovUbtXLM0ceJEZs2atc+0WbNmhTQwEtiod506dWrQuqsm6jvvvJMTTzyxQWXFqthM1D7eh3PNyllnncVrr73G7t27AVizZg0bNmzgmGOO4YorriArK4vDDjuMO+64o9rle/fuzZYtWwD4wx/+wMEHH8yJJ564dyhUsD7Sw4YNY/DgwZx55pkUFRXx6aef8sorr3DjjTeSmZnJypUrmTx5Ms8//zwA7777LkOGDGHgwIFMmTJlb3y9e/fmjjvuYOjQoQwcOJClS5fW+vmiPRxqbN04wGvUzjVaNEY57dq1K8OHD+ett97i9NNPZ9asWZx77rmICH/4wx/o0qULZWVljBkzhoULFzJo0KBqy5k7dy6zZs1i/vz5lJaWMnToUI444ggAJkyYwCWXXALAb3/7Wx599FGuvvpqxo0bx9ixYznrrLP2Kau4uJjJkyfz7rvv0r9/fy688EIeeughrr32WgC6devGvHnzePDBB5k2bRqPPPJIjZ8v2sOhxmaN2hO1c81O5eaPys0ezz77LEOHDmXIkCEsXrx4n2aKqj766CPOOOMMkpKS6NChA+PGjdv73qJFizj22GMZOHAgM2fOZPHixbXGs2zZMvr06UP//v0BmDRpEnPmzNn7/oQJEwA44ogj9g7kVJOPP/6YCy64AKh+ONT77ruP/Px84uPjGTZsGI899hhTp07l66+/JjU1tdayQ+E1audamGiNcjp+/Hiuv/565s2bx65duxg6dCirV69m2rRpfPnll3Tu3JnJkydTXFxcazk13aV78uTJvPzyywwePJgZM2bwwQcf1FpOXeMYBYdKrWko1brKiuRwqF6jds41iZSUFEaPHs2UKVP21qYLCgpITk6mY8eOfP/997z55pu1ljFq1Cheeukldu3aRWFhIa+++ure9woLC0lPT6ekpGTv0KQAqampFBYW/qCsAQMGsGbNGlasWAHAk08+yXHHHdegzxbt4VBjq0bt3fOca9YmTpzIhAkT9jaBDB48mCFDhnDYYYfRt29fRo4cWevyQ4cO5dxzzyUzM5NevXpx7LEVo1fcddddHHnkkfTq1YuBAwfuTc7nnXcel1xyCffdd9/ek4gAiYmJPPbYY5x99tmUlpYybNgwLr/88gZ9rmgPh1rvYU5D0eBhTgHatoVf/xruvrtpg3KuBfNhTpuXsAxzGlE+gp5zzu3DE7VzzsW4mErUqniidq6BwtGM6ZpeQ76nmEnU5eVw3HEwteB6dmwvi3Y4zjUriYmJ5OXlebKOcapKXl4eiYmJ9VoupF4fInIdcDGgwNfAL1S19s6Q9VRYCD16wO8+uowZH5zOwgLo0KEp1+Bcy5WRkUFOTg65ubnRDsXVITExkYyMjHotU2eiFpGewDXAoaq6S0SeBc4DZjQkyJp07AjPPguvDb2dn86/k1mz4NJLm3INzrVcCQkJ9OnTJ9phuDAJtekjHmgvIvFAErAhXAGd1msRh7dbzsMPh2sNzjnXvNSZqFV1PTAN+A7YCGxX1berzicil4pItohkN+bwS1KSuST5KbKzm35gGeeca47qTNQi0hk4HegD7A8ki8jPq86nqtNVNUtVs9LS0hoeUXIyP2/zFO3awb/+1fBinHOupQil6eNEYLWq5qpqCfAiMCJsESUn02XXevr0gToGtHLOuVYhlET9HXCUiCSJDWs1BlgStoiSk6GoiLQ0JTCOuHPOtWqhtFF/DjwPzMO65rUBpoctoqQkUCWtSzne08g550LsR62qdwDV30OnqQU6T3dL3U1ublJEVumcc7EsZq5M3KtHDwDSEgvIy7MrFp1zrjWLvUSdng5AWputlJfDtm1Rjsc556Is9hJ1oEbdTTcDeDu1c67Vi71EHaxR77GLH73nh3OutYu9RJ2YCJ06kVa8DvAatXPOxV6iBkhPJ61gJeCJ2jnnYjZRd9u2HPCmD+eci81E3aMHid+vJSXFa9TOORebiTo9HTZuJC1NPVE751q92E3UxcV061zmTR/OuVYvdhM1kJZS7DVq51yrF9uJun2hJ2rnXKsXm4k6ON5H/DZv+nDOtXqxmagDNepuuoVdu2DnzijH45xzURSbibpjR0hMJK10I+Bd9JxzrVso90w8WEQWVPorEJFrwxqVCKSn03HXJgAKCsK6Nueci2l13jhAVZcBmQAiEgesB14Kc1yQnk6HwvWAJ2rnXOtW36aPMcBKVV0bjmD2kZ5Oh/zvAE/UzrnWrb6J+jzg6ereEJFLRSRbRLJzm6JRuUcPOuStBjxRO+dat5ATtYi0BcYBz1X3vqpOV9UsVc1KS0trfGTp6XQozAGgsLDxxTnnXHNVnxr1KcA8Vf0+XMHsIz2dDlhV2mvUzrnWrD6JeiI1NHuERXo6yexERD1RO+datZAStYgkAT8GXgxvOJWkp9MGJbV9qSdq51yrVmf3PABVLQK6hjmWfQUuI+/QtpiCgoSIrto552JJbF6ZCJCWBm3a0CG+yGvUzrlWLXYTdVwc7LcfHWSHJ2rnXKsWu4karOeH5nuids61arGfqEu3eqJ2zrVqMZ+oU3dv8UTtnGvVYjtR9+hBh12bKSjQaEfinHNRE9uJOj2dDmynsBDKy6MdjHPORUczSNQFqIrf5cU512o1i0QNPt6Hc6718kTtnHMxLrYT9X777U3UPtSpc661iu1EnZhIh1Tr8eE1audcaxXbiRrokJYIeKJ2zrVesZ+oeyQBnqidc61X7CfqnqmAJ2rnXOsV6o0DOonI8yKyVESWiMjR4Q4sKDWjIwAF2/3qROdc6xTSjQOA/wPeUtWzAje5TQpjTPtIyNiP9hRRkNsGSIzUap1zLmbUmahFpAMwCpgMoKp7gD3hDauSQF/qgu/b4onaOdcahdL00RfIBR4Tkfki8oiIJFedSUQuFZFsEcnOzc1tuggDiXp7buT2Dc45F0tCSdTxwFDgIVUdAuwEflN1JlWdrqpZqpqVlpbWdBGmp9OR7WzPK2u6Mp1zrhkJJVHnADmq+nng9fNY4o6M9HQ6kU/+9oit0TnnYkqdiVpVNwHrROTgwKQxwDdhjaqy1FQ6xxWwrTDU857OOdeyhJr9rgZmBnp8rAJ+Eb6QqhChU1IJ+UVtI7ZK55yLJSElalVdAGSFOZYadeqo5G+IWI9A55yLKTF/ZSJA57R4isvbUVwc7Uiccy7ymkWi7tSjHQD5uSVRjsQ55yKveSTqnikA5H+7OcqROOdc5DWLRN25t433sW2ZJ2rnXOvTLBJ1p4O6AZC/eluUI3HOuchrHom6f3cAtq31sU6dc61Ps0jUnfdvD0D+hqIoR+Kcc5HXLBJ1R2uiJn/z7ugG4pxzUdAsEnViIiS22c22PL95gHOu9WkWiRqgc/ti8rcLqCdr51zr0mwSdaeUUvLLUmDr1miH4pxzEdV8EnUn2EZnWLMm2qE451xENZtE3TktgXw6wcqV0Q7FOeciqtkk6k7p7T1RO+dapeaTqNMS2CZdPFE751qdkMajFpE1QCFQBpSqasTHpu7cGfK1I7piJRLplTvnXBTV5/5Wx6vqlrBFUodOnaCcOHYs30hqtIJwzrkoaD5NH53scduGXfgdBJxzrUmoiVqBt0VkrohcWt0MInKpiGSLSHZubm7TRRjQpYs95tEFVq9u8vKdcy5WhZqoR6rqUOAU4CoRGVV1BlWdrqpZqpqVlpbWpEEC9Otnj0s4BFasaPLynXMuVoWUqFV1Q+BxM/ASMDycQVVnwABISFAWMsh7fjjnWpU6E7WIJItIavA58BNgUbgDqyohAQ49FL6KO8ITtXOuVQmlRr0f8LGIfAV8Abyuqm+FN6zqDRokLGwz2BO1c65VqbN7nqquAgZHIJY6DR4MTz7ZnS3L8ugW7WCccy5Cmk33PIBBg+xx4dqOUFYW3WCccy5CmlWiHhyo139VdhisWxfdYJxzLkKaVaLu3h3267zbe34451qVZpWoAQYPLOcrBntfaudcq9HsEvWgYYks5jBKl/vVic651qHZJerBmcIe2rHsKx/vwznXOjS7RL2358eydtENxDnnIqTZJeoBAyChTSlfberudyR3zrUKzS5Rt20Lh/TIZ2HJIbB5c7TDcc65sGt2iRpg8IBi6/nx7bfRDsU558KuWSbqQcd0ZAM92fL2vGiH4pxzYdcsE3XmsXYzruw3m/4GBc45F2uaZaIeMQLaxZXwn4U9oLQ02uE451xYNctEnZQEow/bwpslJ8L8+dEOxznnwqpZJmqAU85KYhkDWP2iJ2rnXMsWcqIWkTgRmS8ir4UzoFCdcm5HAN581Zs+nHMtW31q1L8CloQrkPrq1w/6puYyfckxrP/Ox6Z2zrVcISVqEckATgMeCW84oROBuycv49vygzj8cOWbb6IdkXPOhUeoNep7gZuA8jDGUm/n3NyHrxjMnt3K//5vtKNxzrnwCOUu5GOBzao6t475LhWRbBHJzs2NUP/mnj3p9yNlYo8PePpp2L49Mqt1zrlICqVGPRIYJyJrgFnACSLyr6ozqep0Vc1S1ay0tLQmDrMWxx3HZdv+xM6dMHNm5FbrnHORUmeiVtX/UdUMVe0NnAe8p6o/D3tkoRo9mqzC9xh6SBEPPeQD6jnnWp5m2496rzFjEODqAbNZtAjefjvaATnnXNMSDUMVNCsrS7Ozs5u83BqNHMmegmL6bJ3LgAFwww1QXAxnnBG5EJxzrjFEZK6qZlX3XvOvUQOcfTZtF83jVz/L5b334NRT4dxzIT8/2oE551zjtYxEfdZZAFzW/gkmTIDrroOSEvj3v2HJEnjqqSjH55xzjdAymj7AhtQrLISFC1GE3r3ttl05ObB0qdWuU1MjG5JzzoWq5Td9AEyZAosWwccfIwLnnGMnFr/5BsrL4Ysvoh2gc841TMtJ1D/7GXTuDH/7G2CJGuC44+zxv/+NUlzOOddILSdRJyXBRRfBiy9CTg5ZWfDIIzBrFhx6KHz6abQDdM65hmk5iRrgyiutneORRxCxvN2jBxx9NHz2mb3lnHPNTctK1H36wI9/DI89BmUVQ5+OGAHbtvlNy51zzVPLStQAF18M330Hs2fvnXT00fZ4+unWZv3KK/W71Pyyy2DSpCaO0znnQtTyEvW4cdCtmzVQBxx8MJx9NmRkwPr1lrDvvju04rZuhRkz6p/cnXOuqbS8RN2unXXVe+EFmGsjs7ZpA88+C+++a32qx4yBf/wjtDbr55+HPXusH/batWGO3TnnqtHyEjXALbdA9+5wxRX7tFUDxMfbScbvvoM5c+ouauZM61ACsGBBGGJ1zrk6tMxE3bEj/PWv8OWXdmKxitNPt6sU77vPLjd/8snqi1m71pL5tddardwTtXMuGuKjHUDYTJwIDz4It90G550HKSl730pKgjPPtLbnoDlz7KThnj2wbp21aT/9tL138cXWPXv+/Mh+BOecg5Zaowa7++0998CmTVa7ruK22+Cmm2D5crjxRjv3OGwYjBxpef2JJ+Bf/7LXffrAkCGNr1EvXgy/+50NGOWcc6FquYkarF/emWfCn//8g07Uffva5IMOgr/8BTZutKsYX3wRhg6FX//aEuvPA/eyycy0du2tW+21qjWBjxwZWuLNzoZRo2DqVPjkk6b9mM65li2Um9smisgXIvKViCwWkd9FIrAmc++91hPk/PNrzag9etgY1mecAb//vSXk+HhrAgGrUYP1AlGFW2+Fv//dLk2v1BOwWsXF8NOfWru4SGgnMZ1zLiiUNurdwAmqukNEEoCPReRNVf0szLE1jYwMePhhG7P6iitg+nQ7M1iLk0+2Cxx79ICuXW3aqFFWe778cpg2zZpMLrnEKupTp9oIqx9/bAP4xcVZYp8xw9rDZ82yFpjZs+H66+Gjj8L+qett1y5o3z7aUTjnqqWqIf8BScA84Mja5jviiCM05tx2myqoXnSRanl5nbNXN0tRkerEiaqZmaqPP65aWqr6xRdWLKgefLDqeeepnn22vb74YisnM1P18MPt+S9/qZqcrLpnT/3WXZsNG2ovry45Oart26s+91zDy3DONQ6QrTXl3pre0H0TdBywANgB/LmGeS4FsoHsAw88MLKfMBTl5aq//a195Pvvb9Ki589XXb9+32m33GKrOuYYe5w+3aY/84y9/vzzinl37lR9/nnVdetUH3pItWtX1XffVf3uO9Uf/Uh19uya1715s2pqqurYsfVP8EEzZlhMY8bY6/feUy0sbFhZzrmGaXSi1opk3Al4Hzi8tvliskatqlpWpnrqqapt26rOmxfWVZWUqF5xhWpWliXAnTtt+oYNttX797cEe/nlqoMHV9TKQbVNG9UjjrDKP6j++McV5ZaXq77/vuqkSZbAgzsEUH3ssYbFOmmSLS+i+o9/2PPbb2/c53fO1U9tibret+ISkTuAnao6raZ5onIrrlDl5lq3jj174P33bbDqCBs0CFavtkvZX3/dTjL+7W/W7t21q7VrX3yxzdu9O2zebF0Dn3jCTmZ+95291769nfD88Y9hyxa7Yv7uu224ExFrnv/nP+3vr3+Fo46qPp7evW14lMAV94BtlsWLw7oZnHOV1HYrrlBq0WlAp8Dz9sBHwNjalonZGnXQ0qWqPXqo7ref6pIlEV/9li2q+fn2fO1a1Y0b932/pES1Xz/VpCRrVomPtzZkEdVx46ypYs0amwfs4GDdOqu5V66ZJyZWPLZvr/ryy1b2mWdaO7qq6qpVNs/f/qY6YoQ9P+88e2zKTZyJyNAAABS3SURBVLN7t+qOHT+c/uqrFW350VZerrppU7SjcK0VjWn6AAYB84GFwCLg9rqWiflErWpZaL/9LGFHIVnX5euvrYlDVfX88y3ZvvTSvvNs3Kj69tsVr8vLVd95R/Wf/1T9+99Vr75a9dFHLfkMG2ZNKqNGVSTyOXNsXlBdtEj1yy/tJOm6dTbtD39Q/fZb1YULbcewbVvDP88559iOZdeufacPH16x/mj7179sp7h4cbQjse/yrbdsx+pah0Yl6ob8NYtErar6zTeq3burdupkGS5GFRU1vqa3Y4edcATVq66yfdSIEdYjpXv3H9ZojzxSNSFh3xo6qE6ZYk39la1da/u6qkmluNgS8/LldjQAqr//fcX7y5dXlHvXXY37fE3hJz+xWK6+OtqR2PkHsHMGrnXwRF2bVaus71ybNqpTp7boKkxJidWiS0tV77nHvv2UlB/W1FVVn31W9dhjVf/v/6xHyqOP2slRUD3jDOtmePnllvyDSTg1VfWBB6wp5qqrVDt2VO3bV/WssyzpjxljzTm33ab69NOqv/udLXvQQXbyNOjzz1Vzcxv2GYuKrGmpvnJzVePiLM6OHStO/j7xhHUS2rnTyi0oqFhm61bV8eNVP/20YbHWJritTzih6ct2sckTdV0KClQvuMA2x2mnNa5TcjNRVGS12G+/DX2ZYA/HNm3sIKR7d9WMDJs2Y4bqiSdW1JDbtVM991zVLl3s9aRJqqtXq/bpUzFPXJzqccep/ulP9nrtWtsxBHvFbNpkyfHdd61b49at9tXk5FQf38aNFfvc0aNVb7jBmoBmz7amm9LSmj/b9Om23nvvtcepU1XvvLMi1vbt7fHQQ+1IQdWahkC1d++K7ozl5T884qjsvvtU33yz9u1cVqaanm7bp00bbzdvLTxRh+r++22TTJ4cG2e3YlRNBx3l5apPPmm16rw8m7Z4sdXAly+vmK+szPqLd+liCXjZMtvsSUm6tz93UpLVbCs3u7RtW5EwMzPtBOiyZbajGDtW9YAD7GKia6+1Lo/Bk6nBv27drBYfbGtfs8ZO6u7apXr00VazLy+39vzgMhMn2rmCyy+3coPNNLt2WfPRIYfYUcFJJ9mOYcAA1Z497fPu2mXxBRP3Bx/Y8gkJqm+8ofrJJzZtxQqLJVg/+PRTm+9//sceH3wwtO9l1SrbmbnmqbZEXe/ueaGI6e55dfnd7+ya8FtvtUE/XNioWjdCgPvvh5Ur4YAD4Jpr7DL7Bx+0S/GHD7cuiy++CKWlsP/+dll+cNhZEevy2KmTdU8M3iOzvNxuvbZihXV9fOUV6w7ZrZvN8+qr1jWyRw97/4EH7Eb2u3bBvHl2Q+RTTrEhAYLOOcfKGTMG3njD7hr08ce23pISG2Zg8WJITrbPt26drW/SJBtCYNs26NDBhhqo6qCD4PHHLY7nnrOepEcfbZ/5/vth4EDo0sWGrgEbluDRR2H3bliyxLpuJiXZjTHuvttiCPruO3jpJejc2UaJ7NUL7rzT7qvxxz/aEAiFhTa96ne0ezckJjbNd+5qVlv3PE/UVanagB7Tp1uH5LQ0S949e0Y7MlfF/Pnw3ns24FX//qEtM2+ejbcyb54l5e++s6T5pz/BqafWvfzGjTa+14IF1h3/nXcqdjZB2dlw4onQrx9MngwfflgxmNesWTZuzMyZ9n5SEmzYAEVFcNddVj7YT/Chh2zHcsklFdMTE+HYY+0ygM8+s0ewcn71K5tvxgzbwV1wge2kcnJsp1J5TLKOHWH7dnt+xBGwbBns2GE7hjvugJNOsr7+kybZdr77bosjuJPYtcu2/dy5tqM76STbCSxfbp9//XpbX7t2sGaNLXPooTBhgu1g7rrLYh440K4DGDly3yF4CgpsG/fvb+VX9c47cPvt8JOf2LoPPNCuGwjKy7MYK09riLlz4T//sd/YwIE1z1e50tFQnqjrq7TUBnB6/337xfXvb1W8Dh2iHZlrJoqLLUkF/3kXLbIENmlSzf/QmzZZjfr44+0oonJZr79uNeylS+1nmZpqyfiaa2zI3vJySEiw+V97ze6bsWOHHWX06AGjR9vQvWVl8OabdhRw5ZWwahVcdZUloqwsG79s1So7slm/3u63MXiw/fzj4mzn0r+/LR8c8rc6IjZ/aanVcVRthxTUs6f9Oy1bZrGPGmVHAk88YUcHGzfaMm3b2oiWffvCIYfYjao/+QRuvtk+25YtFTedPuUUG9V42TLbyZWX292bJkywUS4feMC22eTJtiP45z9tJ3TCCfCjH9n2WrQI0tPtIrD777ejoKATTrC00Lu3HSl9/bXtrOfNs6OS4cMtrlNPbVjS9kTdGO+8Y1u+Xz/7r5gyxX49zsWw/Hxrsujeve6kUVJSkeT37LGrZD//3BLjlClWW33zTfjvfy2RLVlitcuLL7aa8OrVlriLiqxZ6uijrSYbH287hmDT0fr1VtsvL7db4KWkWJyzZlmCKyiwm3SMHm2PmZlWm33pJfj++31vfzpqFPz735Zcv/7aar7/+78VO49zzrFb6X3+udXci4psfTt2VJQhYjuAlSsrpsXH284FLGFfe60Ndfzss5a4c3Kq34annQYLF9qyq1dXHHnUhyfqxnrtNfjtb+Grr6wx9IknrJrhnGsS69dbTfi44/Y9JxBUVmZt/ytWVNSsq45WvHOnJfRu3ay2XlxsNesNGyzpXnaZJfXgwfGYMXZeYN06q5m3a2f1sXXr7MhlzJh9E25pqdXMt2+3oR4OP9x2BMHmqJISa/pp6KgUnqibgqqdfbr0Uttt33GHNVYecED1vyznnKuH2hJ1y74VV1MSsZOLixdbo9lvf2vHZwMHWuOhc86FiSfq+uraFZ55xk65/+1v1iA1frydYnbOuTDwRN1QRx4Jv/yltVd/+qk1WD33XMUpaOecayKeqBvr7LOtZ0hysp1qPuYY62O1fn20I3POtRCeqJvCiSdah8yHH7be/ZMn2+lkv924c64JeKJuKnFx1rF03Tq7pKpXLzv5+J//eHOIc65R6kzUInKAiLwvIktEZLGI/CoSgTVbbdpYH+u337ZLp04+2TpnnnuuDVbhSds5V0+h1KhLgV+r6iHAUcBVIhL5Gw02NwceaL3mZ8ywHvqffGLXtx5/vPXHrnyZlXPO1aLORK2qG1V1XuB5IbAE8BGKQpGYaIM7vPqqtV0/+KBdfjVunPUSeeWVaEfonGsG6tVGLSK9gSHA59W8d6mIZItIdq5fAPJD8fE2ost331k/bFU4/XQbEafysGbOOVdFyIlaRFKAF4BrVbWg6vuqOl1Vs1Q1Ky0trSljbFkSEqwb36JFcMMNVsvOyrKBhSsPL+accwEhJWoRScCS9ExVfTG8IbUS8fFwzz1Wuy4rsx4jPXtaf+yuXe2mBV7Tds4RwqBMIiLA48BWVb02lEJb5KBM4aRq4zR+9JFdKPPtt9aunZlpg+YOGRLtCJ1zYVbboEzxISw/ErgA+FpEFgSm3aKqbzRVgK2eiDV/ZFX6jl56yUZ2HzbMbtUxcaKNg33kkdC+ffRidc5FnA9zGsu2brX7Nz7ySMWgT716wU03WTPJqFF2/yPnXLPnw5w2V126wH332dWO779vF8x07Gg9RcaPt9tT3HWXjXrunGuxvEbd3JSXW5/snByYNs3astu1sxvDnXWWXRXZp88Pb3/hnItpjW2jdrGkTRurSffta00fixdbF79Zs+Dpp22e7t3t1s4nnWQ3sOvTx+9C41wz5jXqlmLPHrvN9aJF8OGHNvRq8MKj5GT4xS/g6qvtFtLOuZjj90xsjcrL7Wa88+fDBx9YjbukxLr6XXEF/Pzn3nvEuRjiidrZVY/PPGN3pFmwwG7DPH48DB9u3QKHDYPNm2H3butZ4pyLKE/UroKqXVjz+OPWV3vbNpuemgqFhfZ81Cjr292mDVx3HYwda6+dc2HjidpVT9Vq2nPmWPe/fv2seeTppy1xb9xoPUz69IETToD8fHu89FK7BN4512Q8UbuGKSmxG/Y+/jjMm2cnJdeuhbQ0KCqyJpOrr7Z+3qqW0Pv2tcfU1GhH71yz4onaNQ1Vay558UW7e80LL8CmTdXPm5Zml7v37WsJv6TEBp+Ki7Ppxx5rF/SsXQsFBTBoEHTrFtnP41wM8UTtwmPnTvjySxgwwMYhWbUKVq+2x2XL4NNPrfmkbVv7i4uzS+FrupLywAPthgrdu1vSPuAAu8lC7972fmmpleHt5a4F8kTtYoeq9fVesMDGMsnIsMviFyyw5pWlSy2R5+ZCcbEtk5FhvVSWL7ca+rXXQl4ebN9ud9Fp377iLz7eyl21ymrpxxwDPXpY7R1sx9GlCyQlRW8bOFcNT9SueVq1yppXFi+2E5n9+sGbb9prsMQcHKyqqsq9WMB6sMTFWROMCBx0kNXeu3a1mznEx1c8xsfbjmLNGjtqSE+Ho46yi4p69LB7X3boYGXt2mV/RUX216kT7LefleVcPXiidi1HWRl88401h6SmWg199+6KhFlaatM7dbJxvRcssP7hmzdbou3Vy54vWmQJf/t2W6akpOKxpMRq3T/6kZ1AXbnS2tKDguOolJdXH6OINd2kpVmNv7DQkveAARbXqlVWZv/+tq71623HkZxsO6Thw+1oAOzIobjYxnPJz7eyBwywnUl5uY2e2KWLPSYn2/YoKbH5y8ut/T8lxWLevdvKCh6FuJjiidq5xsrLs+aSRYtsIKzycntdudklKcn6pW/caH9bttjOIzXVkvHy5ZZse/a0JL1ihSXcjAzbqezebWWsXt2wGBMSbGejajuJnTutll+dlBR7bNfO5i0rsyReUmLxdOpkRxXx8RZTcrL9VX5eWmo7zaQka5LascPKSUiwcxIJCfs+j4+3nVZ8vO1oO3e294I7x+DOMjHRtllqqh25iNg2Ce6EU1OtjNLSir/giergkVHlo6TangfHwAnmwcqPqhXld+jww/FymvicSaMStYj8ExgLbFbVw0NZoSdq5xohJ8dq8KrWNJOUZEm8Y0dLDkuX2nwitmPYts3a5bdtq0hEOTmWjDMyLIGWl1sCTEy05J2ba8vv2mU7ofh4ey8uzm7AnJ9vJ3VVLeFX/isqskeAQw6xMtassQSakGAJd88e+ws+D4qLszJrOhqJVSK2jcrK7OhI1Xo8iVTssMvL7fv69tsGrqJxo+fNAO4HnmjQ2p1z9ZORYX816dkzcrE0hco108REe1y/3nYGe/ZU1G6DO5niYqt5B/9KS60G3rZtxbTy8orzCfHxtgMoK9u3Zh7K89LSihpxdY/Bsrdts2XatKnoknrAARZHcOcVF2dHImFQZ6JW1Tki0jssa3fOtXwiFYkY7DHY5dKFpMlGlxeRS0UkW0Syc4PDazrnnGu0JkvUqjpdVbNUNSstLa2pinXOuVbP79fknHMxzhO1c87FuDoTtYg8DfwXOFhEckTkovCH5ZxzLiiUXh8TIxGIc8656nnTh3POxThP1M45F+PCMtaHiOQCa+ucsXrdgBoGLI4qj6v+YjU2j6t+PK76a0hsvVS12r7NYUnUjSEi2TVd7x5NHlf9xWpsHlf9eFz119SxedOHc87FOE/UzjkX42IxUU+PdgA18LjqL1Zj87jqx+OqvyaNLebaqJ1zzu0rFmvUzjnnKvFE7ZxzMS5mErWInCwiy0RkhYj8JopxHCAi74vIEhFZLCK/CkyfKiLrRWRB4O/UKMW3RkS+DsSQHZjWRUTeEZHlgcfOEY7p4ErbZYGIFIjItdHYZiLyTxHZLCKLKk2rcfuIyP8EfnPLROSkKMR2j4gsFZGFIvKSiHQKTO8tIrsqbbu/RziuGr+7SG2zGuJ6plJMa0RkQWB6JLdXTTkifL8zVY36HxAHrAT6Am2Br4BDoxRLOjA08DwV+BY4FJgK3BAD22oN0K3KtL8Avwk8/w3w5yh/l5uAXtHYZsAoYCiwqK7tE/hevwLaAX0Cv8G4CMf2EyA+8PzPlWLrXXm+KGyzar+7SG6z6uKq8v7/A26PwvaqKUeE7XcWKzXq4cAKVV2lqnuAWcDp0QhEVTeq6rzA80JgCRDrN6k7HXg88PxxYHwUYxkDrFTVhl6Z2iiqOgfYWmVyTdvndGCWqu5W1dXACuy3GLHYVPVtVS0NvPwMqOVmiZGLqxYR22a1xSUiApwDPB2OddemlhwRtt9ZrCTqnsC6Sq9ziIHkGLhX5BDg88CkXwYOUf8Z6eaFShR4W0TmisilgWn7qepGsB8R0D1KsQGcx77/PLGwzWraPrH2u5sCvFnpdR8RmS8iH4rIsVGIp7rvLla22bHA96q6vNK0iG+vKjkibL+zWEnUUs20qPYbFJEU4AXgWlUtAB4CfgRkAhuxw65oGKmqQ4FTgKtEZFSU4vgBEWkLjAOeC0yKlW1Wk5j53YnIrUApMDMwaSNwoKoOAa4HnhKRDhEMqabvLla22UT2rRBEfHtVkyNqnLWaafXaZrGSqHOAAyq9zgA2RCkWRCQB+wJmquqLAKr6vaqWqWo58DBhPESujapuCDxuBl4KxPG9iKQHYk8HNkcjNmznMU9Vvw/EGBPbjJq3T0z87kRkEjAWOF8DjZqBw+S8wPO5WLtm/0jFVMt3F/VtJiLxwATgmeC0SG+v6nIEYfydxUqi/hLoJyJ9ArWy84BXohFIoO3rUWCJqv610vT0SrOdASyqumwEYksWkdTgc+xE1CJsW00KzDYJ+HekYwvYp5YTC9ssoKbt8wpwnoi0E5E+QD/gi0gGJiInAzcD41S1qNL0NBGJCzzvG4htVQTjqum7i/o2A04ElqpqTnBCJLdXTTmCcP7OInGWNMQzqadiZ09XArdGMY5jsMOShcCCwN+pwJPA14HprwDpUYitL3b2+CtgcXA7AV2Bd4HlgccuUYgtCcgDOlaaFvFthu0oNgIlWE3motq2D3Br4De3DDglCrGtwNovg7+1vwfmPTPwHX8FzAN+GuG4avzuIrXNqosrMH0GcHmVeSO5vWrKEWH7nfkl5M45F+NipenDOedcDTxRO+dcjPNE7ZxzMc4TtXPOxThP1M45F+M8UTvnXIzzRO2cczHu/wNrhd1IdWYbKwAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWoAAAEICAYAAAB25L6yAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deXwV9b3/8dcnISGEfYnsq60LuFAbcGkLWFt3q95al+tCq5XrVevyK7Vyba23m4htf9brVm9vqyJFW4tt3e2PK1JcARsExQUEBEEJIGEJS5bP74/vHBJCTnIScs6ZJO/n4zGPmcyZ5TNzTj7zne8sX3N3REQkvnKyHYCIiDRMiVpEJOaUqEVEYk6JWkQk5pSoRURiTolaRCTmlKhbITO7z8x+2NLTpoOZXWhmz6c47TfNbF66Y2oOM3vAzH7awsvsa2ZzzWyrmf2yJZe9v8xsjpl9O9txSNAh2wG0N2a2Evi2u/+/5i7D3a9Ix7RNYWbDgBVAnrtXNrD+GcCMNKz/AWCNu/+gpZedQZOADUA3d3czmwM87O6/zW5YEjcqUceMmbWZg2db2pY0GQq87XrqTBqhRJ1BZjYdGAI8YWbbzOwGMxtmZm5ml5nZh8D/RtP+ycw+NrOy6PR4VK3l7DkNN7MJZrbGzL5rZuvNbJ2ZfauZ0/Y2syfMbIuZzTeznzZQFTE36m+OtuXYqOriJTP7v2a2CbilbnVGtK3XmNkHZrbBzG43s3p/h2Z2iJn93cw2mdm7ZnZuNH4ScCFwQ7TuJ5LM/2szWx1tz0Iz+1Ktz24xsz+a2UNR1cNbZlZc6/PPmdkb0WePAgVJ9gNm9hkzezH6rjZE0yc+Oy7al2VR/7jE9wJMrLUNLwFfAu6K/r6r1v660szej2L5iZkdaGavRNv1RzPLj6btaWZPmlmpmX0aDQ+KPusVffdnRH93MbNlZnZJsu2qtQ05ZvYDM1sV/W4eMrPu0WcFZvawmW00s83RNvaNPvtm9D1vNbMVZnZhY+uSJNxdXQY7YCXwlVp/DwMceAjoDHSKxl8KdAU6AncAJbXmeQD4aTQ8AagEfgzkAacC5UDPZkz7SNQVAiOB1cC8JNuRiLtDrXHfjJb/HUK1Wqdo3Lxa0zjwAtCLcNB6j1AVRO1po32xGvhWtKyjCNUEo+puVwP7+iKgdzT/d4GPgYLos1uAndE+yAVuBV6NPssHVgHXR/vpHKAi2fqAmcBNhIJPAfDFaHwv4FPg4iiGC6K/e9e3DcCcxL6os7/+BnQDRgG7gNnACKA78DYwMZq2N/D16PvrCvwJ+EutZZ0Y7YMDgP8GHmtg3+2JhfBbXBatswswC5geffZvwBPROnOBz0exdga2AAdH0/VPfHfqmt6pRB0ft7j7dnffAeDuv3P3re6+i5BUjkyUYupRAfzY3Svc/WlgG3BwU6Y1s1zCP/mP3L3c3d8GHmzGdqx19/9y98rEttTjNnff5O4fEg5CF9QzzenASnf/fbSsN4A/E5JmStz9YXffGM3/S8JBr/Z+mefuT7t7FTAdODIafwwhQd8R7afHgPkNrKqCUI0xwN13unviDOI04H13nx7FMBN4Bzgj1W2I3ObuW9z9LWAJ8Ly7f+DuZcAzwOei7d3o7n+Ovr+twM+A8bX2x/OE5D07iu3fUlz/hcCvonVuA6YA51uo2qogHCA+4+5V7r7Q3bdE81UDh5lZJ3dfF8UvzaBEHR+rEwNmlmtmU81suZltIZTCAfokmXej731Br5xQ8mnKtEWEUt/qWp/VHk5VKvPUnmYVMKCeaYYCR0en05vNbDMhYfRLNZCoimdpVO2wmVACrb0PP641XA4URMlnAPCRu9euO17VwKpuAAx4PapCuTQaP6Ce+VYBA1PdhsgntYZ31PN3FwAzKzSz30RVFFsI1VM9ooNwwv3AYcDv3X1jiuuvux2rCL+VvoQD3HPAI2a21symmVmeu28HzgOuANaZ2VNmdkiqGyx7U6LOvGQXjmqP/1fgTOArhOQyLBpv6QuLUkK1xaBa4wY3MH0q25FM7eUOAdbWM81q4EV371Gr6+Lu/57KeqL66O8D5xKqdnoAZaS2D9cBA82s9rRDkk3s7h+7++XuPoBQSr3HzD4TbdfQOpMPAT5KtqgUYmvIdwlnDEe7ezdgXDTeIBQAgN8Qqtn+PYoxFXW3Ywjht/JJdMbxn+4+EjiOcCZ0CYC7P+fuXyVUe7xDqG6RZlCizrxPCHV9DelKqIvcSKj7+3m6g4pO/2cRLgAWRqWfhi40lRJObRvblvp8L7rwNRi4Fni0nmmeBA4ys4vNLC/qxpjZodHnje3HroRkUgp0MLObCXWnqXglmvcaM+tgZv8CjE02sZl9I3HRjlAH7UAV8HS0Df8aLec8Qt3/k0kWlcpvoyFdCSXszWbWC/hRnc//I+pfCvwCeKhOaTuZmcD1ZjbczLoQfo+PunulmR1vZodHy9lCqAqpsnCP+NfMrDPht7yNsE+kGZSoM+9W4AfR6fzkJNM8RDi9/IhwsejVDMV2NaEE/zHhlHYm4Z9sH+5eTqgDfSnalmOasJ6/AguBEuAp4H/qWf5WwsWv8wkluo+B2wj1zETzjIzW/Zd61vEcof72PcK+3EmKVTnuvhv4F8LFzU8Jp/CzGphlDPCamW0jXPi71t1XRFULpxNKuhsJVSSnu/uGJMv5NXBOdMfGnanEWscdhAu4Gwi/mWcTH5jZ54H/A1wSHZRvIxxQbkxhub8j/B7mEu6d30m4YAyhKuoxQpJeCrwIPEzILd8lfHebCHXlVzZjmwSwvavhRGqY2W1AP3ef2ILLdOCz7r6spZYp0tapRC17WLhv+QgLxgKXAY9nOy6R9k5PjkltXQnVHQOA9cAvCdUUIpJFqvoQEYk5VX2IiMRcWqo++vTp48OGDUvHokVE2qSFCxducPei+j5LS6IeNmwYCxYsSMeiRUTaJDNL+vSrqj5ERGIupURtZtdH7zBYYmYzzSzpKx9FRKRlNZqozWwgcA1Q7O6HEV5leH66AxMRkSDVOuoOQCczqyC8e6K+l+iISExVVFSwZs0adu7cme1Q2r2CggIGDRpEXl5eyvM0mqjd/SMz+wXwIeGFL89H77Xdi4VWNyYBDBmS9EVjIpIFa9asoWvXrgwbNoy9XwoomeTubNy4kTVr1jB8+PCU50ul6qMn4ZWbwwlPrHU2s4vqCeB+dy929+KionrvMBGRLNm5cye9e/dWks4yM6N3795NPrNJ5WLiV4AV7l7q7hWEt4gd14wYRSSLlKTjoTnfQyqJ+kPgmOgdxQacQHidYYv7yU/guefSsWQRkdar0UTt7q8R3jf7BrA4muf+dAQzbZoStYhIXSndR+3uP3L3Q9z9MHe/OGpwtcUVFsL27elYsohk0+bNm7nnnnuSfn7ccY3Xpg4bNowNG5K1uRD8/OeNN4bUpUuy5kTjK1ZPJnburEQt0hYlS9RVVaF1rpdffrlF1pNKom6NYvU+aiVqkQy47jooKWnZZY4eDXfckfTjG2+8keXLlzN69Gjy8vLo0qUL/fv3p6SkhLfffpsuXbqwbds25syZw80330zv3r159913GTduHPfccw85OXuXKR9++GHuvPNOdu/ezdFHH80999zDTTfdxI4dOxg9ejSjRo1ixowZDYbs7txwww0888wzmBk/+MEPOO+881i3bh3nnXceW7ZsobKyknvvvZfjjjuOyy67jAULFmBmXHrppVx//fUtsutSEbtEXV6e7ShEpKVNnTqVJUuWUFJSwpw5czjttNNYsmRJvfcSv/7667z99tsMHTqUk08+mVmzZnHOOefs+Xzp0qU8+uijvPTSS+Tl5XHllVcyY8YMpk6dyl133UVJigehWbNmUVJSwqJFi9iwYQNjxoxh3Lhx/OEPf+Ckk07ipptuoqqqivLyckpKSvjoo49YsmQJEM4QMil2iVolapE0a6Dkmyljx45N+sDH2LFjGTEiNMZ+wQUXMG/evL0S9ezZs1m4cCFjxowBYMeOHRxwwAFNjmHevHlccMEF5Obm0rdvX8aPH8/8+fMZM2YMl156KRUVFZx11lmMHj2aESNG8MEHH/Cd73yH0047jRNPPLEZW918qqMWkYzr3Llz0s/q3mdc9293Z+LEiZSUlFBSUsK7777LLbfc0uQYkrVuNW7cOObOncvAgQO5+OKLeeihh+jZsyeLFi1iwoQJ3H333Xz7299u8vr2R6wSte76EGmbunbtytatW1Oa9vXXX2fFihVUV1fz6KOP8sUvfnGvz0844QQee+wx1q9fD8CmTZtYtSq8yjkvL4+KioqU1jNu3DgeffRRqqqqKC0tZe7cuYwdO5ZVq1ZxwAEHcPnll3PZZZfxxhtvsGHDBqqrq/n617/OT37yE954440mbP3+U9WHiKRd7969+cIXvsBhhx1Gp06d6Nu3b9Jpjz32WG688UYWL17MuHHjOPvss/f6fOTIkfz0pz/lxBNPpLq6mry8PO6++26GDh3KpEmTOOKIIzjqqKMavZh49tln88orr3DkkUdiZkybNo1+/frx4IMPcvvtt++56PnQQw/x0Ucf8a1vfYvq6moAbr311v3fKU2QlsZti4uLvTktvFxzDUyfDp9+2uIhibRrS5cu5dBDD812GI2aM2cOv/jFL3jyySezHUpa1fd9mNlCdy+ub/pYVX2oRC0isq/YVX1UVISuCa9qFZE2YsKECUyYMGG/l7Nx40ZOOOGEfcbPnj2b3r177/fyMy1WibqwMPS3b4cePbIbi4i0Xr179075furWIHZVH6DqDxGR2mKZqPV0oohIjVgmapWoRURqKFGLiMScErWIxMo//vEPRo0axejRo3nllVd4+umnsxLHhAkTaM7zIOmQSuO2B5tZSa1ui5ldl45gat/1ISLt04wZM5g8efKe93hkK1HHSaO357n7u8BoADPLBT4CHk9HMLqYKJJ+WXgdNdu3b+fcc89lzZo1VFVV8cMf/pA+ffowefJkKisrGTNmDPfeey/Tp0/nj3/8I8899xzPP/88L730Ejt27GDevHlMmTKFpUuXsmLFCtatW8d7773Hr371K1599VWeeeYZBg4cyBNPPEFeXh4//vGPeeKJJ9ixYwfHHXccv/nNb6iqquLYY4/l9ttvZ8KECUyZMoWcnBx+9rOfNbp9M2fO5Oc//znuzmmnncZtt91GVVVVve+ovvPOO7nvvvvo0KEDI0eO5JFHHtnv/dvU+6hPAJa7+6r9XnM9VPUh0jY9++yzDBgwgKeeegqAsrIyDjvsMGbPns1BBx3EJZdcwr333st1113HvHnzOP300znnnHN44IEHWLBgAXfddRcAt9xyC8uXL+eFF17g7bff5thjj+XPf/4z06ZN4+yzz+app57irLPO4uqrr+bmm28G4OKLL+bJJ5/kjDPO4IEHHuCcc87hzjvv5Nlnn+W1115rNPa1a9fy/e9/n4ULF9KzZ09OPPFE/vKXvzB48OB631E9depUVqxYQceOHVvsvdVNTdTnAzPr+8DMJgGTAIYMGdKsYJSoRdIvG6+jPvzww5k8eTLf//73Of300+nWrRvDhw/noIMOAmDixIncfffdXHdd47Wqp5xyCnl5eRx++OFUVVVx8skn71nHypUrAXjhhReYNm0a5eXlbNq0iVGjRnHGGWcwatQoLr74Ys444wxeeeUV8vPzG13f/PnzmTBhAkVFRQBceOGFzJ07lx/+8If1vqP6iCOO4MILL+Sss87irLPOas7u2kfKFxPNLB/4GvCn+j539/vdvdjdixMb1FRK1CJt00EHHcTChQs5/PDDmTJlCn/961+bvayOHTsCkJOTQ15e3p73Vefk5FBZWcnOnTu58soreeyxx1i8eDGXX345O3fu3DP/4sWL6dGjB5988klK60v24rpk76h+6qmnuOqqq1i4cCGf//znqaysbPa2JjTlro9TgDfcPbWta4a8vNApUYu0LWvXrqWwsJCLLrqIyZMn8/LLL7Ny5UqWLVsGwPTp0xk/fvw+8zXlPdYJiaTcp08ftm3bxmOPPbbns1mzZrFx40bmzp3LNddck1LVxNFHH82LL77Ihg0bqKqqYubMmYwfP77ed1RXV1ezevVqjj/+eKZNm8bmzZvZtm1bk+KvT1OqPi4gSbVHSyos1MVEkbZm8eLFfO9739tTCr733nspKyvjG9/4xp6LiVdcccU+8x1//PFMnTqV0aNHM2XKlJTW1aNHDy6//HIOP/xwhg0btqfJrg0bNnDjjTcye/ZsBg8ezNVXX821117Lgw8+2ODy+vfvz6233srxxx+Pu3Pqqady5plnsmjRon3eUV1VVcVFF11EWVkZ7s71119PjxZ4cVFK76M2s0JgNTDC3csam76576MGGDgQTjkFfvvbZs0uIvVoLe+jbi+a+j7qlErU7l4OZOTdgHontYjI3mL1mlNQohaRzDr77LNZsWLFXuNuu+02TjrppCxFtC8lapF2wt33adFb4PHH0/L8XlLNaf4wVu/6AF1MFEmHgoICNm7c2KwkIS3H3dm4cSMFBQVNmi+WJeqPP852FCJty6BBg1izZg2lpaXZDqXdKygoYNCgQU2aJ5aJWlUfIi0rLy+P4cOHZzsMaabYVX0oUYuI7E2JWkQk5mKbqHXNQ0QkiF2iLiwMSXrXrmxHIiISD7FL1F27hv6WLdmNQ0QkLmKXqPv2Df0U30AoItLmxS5R9+8f+uvWZTcOEZG4UKIWEYm52CXqfv1CX4laRCSIXaLu3DlcUFSiFhEJYpeoIVR/6H0fIiJBSonazHqY2WNm9o6ZLTWzY9MZVP/+KlGLiCSkWqL+NfCsux8CHAksTV9IStQiIrU1+vY8M+sGjAO+CeDuu4Hd6QyqXz8lahGRhFRK1COAUuD3ZvZPM/utmXWuO5GZTTKzBWa2YH/fedu/f3jfRxNbiRcRaZNSSdQdgKOAe939c8B24Ma6E7n7/e5e7O7FRUVF+xVU4l5qXVAUEUktUa8B1rj7a9HfjxESd9rooRcRkRqNJmp3/xhYbWYHR6NOAN5OZ1B66EVEpEaqTXF9B5hhZvnAB8C30heSStQiIrWllKjdvQQoTnMse/TqBfn5qqMWEYGYPploplv0REQSYpmoQQ+9iIgkxDZRq0QtIhLENlGrRC0iEsQ6UW/cCLvT+rC6iEj8xTpRg9pOFBGJbaLWQy8iIkFsE7UeehERCZSoRURiLraJum/f8OCLnk4UkfYutom6QwcoKlKJWkQktoka9NCLiAjEPFHroRcRkVaQqFVHLSLtXatI1NXV2Y5ERCR7Yp2o+/WDysrwKLmISHsV60Ste6lFRFJM1Ga20swWm1mJmS1Id1AJStQiIqm3mQhwvLtvSFsk9Ugkal1QFJH2LNZVH3oxk4hI6onagefNbKGZTapvAjObZGYLzGxBaWlpiwTXuTN07apELSLtW6qJ+gvufhRwCnCVmY2rO4G73+/uxe5eXFRU1GIB6qEXEWnvUkrU7r426q8HHgfGpjOo2pSoRaS9azRRm1lnM+uaGAZOBJakO7AEPZ0oIu1dKnd99AUeN7PE9H9w92fTGlUtejGTiLR3jSZqd/8AODIDsdSrf3/Yvh22bg0XFkVE2ptY354HeuhFRKTVJGrVU4tIexX7RK2HXkSkvYt9olbVh4i0d7FP1L16QX6+ErWItF+xT9RmukVPRNq32CdqgEGDYPXqbEchIpIdrSJRDx0KH36Y7ShERLKjVSTqIUNCiVptJ4pIe9QqEvXQoVBRoXupRaR9ahWJesiQ0F+1KrtxiIhkQ6tI1EOHhr7qqUWkPWoViVolahFpz1pFou7WDXr0UIlaRNqnVpGoIVR/qEQtIu1Rq0nUQ4aoRC0i7VPKidrMcs3sn2b2ZDoDSkYlahFpr5pSor4WWJquQBozZAiUlYVORKQ9SSlRm9kg4DTgt+kNJ7nELXoqVYtIe5NqifoO4AYg6UPcZjbJzBaY2YLS0tIWCa624cNDf8WKFl+0iEisNZqozex0YL27L2xoOne/392L3b24qKioxQJMOPDA0P/ggxZftIhIrKVSov4C8DUzWwk8AnzZzB5Oa1T16NkTuneH5cszvWYRkexqNFG7+xR3H+Tuw4Dzgf9194vSHlkdZjBihErUItL+tJr7qCFUfyhRi0h706RE7e5z3P30dAXTmBEjwsXEqqpsRSAiknmtqkQ9YgTs3g1r12Y7EhGRzGlViVp3fohIe9SqEvWIEaGvOz9EpD1pVYl68GDIzVWJWkTal1aVqPPywqPkKlGLSHvSqhI1wGc+A8uWZTsKEZHMaXWJ+qCD4L33wD3bkYiIZEarTNRbtsD69dmOREQkM1plooZQqhYRaQ+UqEVEYq7VJeohQyA/X4laRNqPVpeoc3PDnR9K1CLSXrS6RA01d36IiLQHrTZRL1umt+iJSPvQahP17t3w4YfZjkREJP1aZaI+9NDQX7Qou3GIiGRCKo3bFpjZ62a2yMzeMrP/zERgDTnqKOjYEf7xj2xHIiKSfh1SmGYX8GV332ZmecA8M3vG3V9Nc2xJFRTAMcfAiy9mKwIRkcxJpXFbd/dt0Z95UZf1N22MHw///CeUlWU7EhGR9EqpjtrMcs2sBFgP/N3dX6tnmklmtsDMFpSWlrZ0nPsYPx6qq+Gll9K+KhGRrEopUbt7lbuPBgYBY83ssHqmud/di929uKioqKXj3Mcxx4T3U6v6Q0Tauqa2Qr4ZmAOcnJZomqCwEMaMgblzsx2JiEh6pXLXR5GZ9YiGOwFfAd5Jd2CpGD8eFiyA7duzHYmISPqkUqLuD7xgZm8C8wl11E+mN6zUjB8PlZXw8svZjkREJH0avT3P3d8EPpeBWJrsuOPCS5pefBG++tVsRyMikh6t8snEhK5dw8MvuqAoIm1Zq07UEKo/Xn8dduzIdiQiIunRJhL17t2qpxaRtqvVJ+ovfzlUgUyfnu1IRETSo9Un6sJCuOAC+NOfQuvkIiJtTatP1ACXXQbl5fDII9mORESk5bWJRD1mDIwaBfffD57110WJiLSsNpGozeCaa2DhQpg9O9vRiIi0rDaRqAEmToSBA+FnP8t2JCIiLavNJOqOHWHyZJgzR7fqiUjb0mYSNcDll0OvXvCrX2U7EhGRltOmEnXnzjBpEjz+OKxcme1oRERaRptK1ABXXQU5OfBf/5XtSEREWkabS9SDBsG558J998Hy5dmORkRk/7W5RA1w222hma5LLw3tKoqItGZtMlEPGgR33BGa6VIViIi0dqk0xTXYzF4ws6Vm9paZXZuJwPbXxIlw+ukwZQq89162oxERab5UStSVwHfd/VDgGOAqMxuZ3rD2nxn85jdQUACXXBJehSoi0ho1mqjdfZ27vxENbwWWAgPTHVhLGDAgJOvXXoMrrtB7QESkdWpSHbWZDSO0n/haPZ9NMrMFZragtLS0ZaJrAd/4Btx8M/z+93DddVBRke2IRESaJuVEbWZdgD8D17n7Pm9+dvf73b3Y3YuLiopaMsb99qMfwbXXwp13hoYGNm7MdkQiIqlLKVGbWR4hSc9w91npDanl5eSEu0BmzoT582HcOPjww2xHJSKSmlTu+jDgf4Cl7t6q36Jx/vnw7LOwejUceijceitUVWU7KhGRhqVSov4CcDHwZTMribpT0xxX2kyYAIsWwUknwX/8B5x9Nmzdmu2oRESSS+Wuj3nubu5+hLuPjrqnMxFcugwfDrNmhYdhnnoKDj44DKvNRRGJozb5ZGKqrr4a5s2Dz3wmtBDTv394QKayMtuRiYjUaNeJGuDYY+HFF+GVV+Css2Dq1HBnyOuvh4StUraIZFu7T9QQnmI85hiYMQOmT4c334Sjjw5PNXbvDjfcoPuvRSR7zNPwuF5xcbEvWLCgxZebKVu3woMPwrp1sHYtPPAADB0aLkR+8Ytw/PFw4IHZjlJE2hIzW+juxfV91iHTwbQGXbuG+uuEr30NHnooXHh88MEw7qCD4Igj4KijwgugBgzITqwi0vapRN0E7vDuu/D3v8Pzz4fh99+H3FwoLoaxY2HIEBg5EkaPDhcnzbIdtYi0Bg2VqJWo99OyZaFq5MUXoaQEtm2r+ayoCE4+Gc48MyTzoUNDKTw3N2vhikhMKVFniDts3gxLloSkPX8+/O1vUFZWM02nTjBwIIwaFeq8P//5kLy7d89a2CISA6qjzhAz6NkTvvSl0AHs2AGLF4dS9DvvwMKF4QLl/Pnw17/WzDtoUM3j7N26hUfcR4+u6fr1CweBoqLw7hIRaT9Uos6itWtDyXvRolDfnZcXxm/aBG+9FVqmqfv19OgBRx4ZDgjdu4ek3r17SObjx0Pv3qoXF2mNVKKOqQEDQndqkjenbN8eSuMlJVBaGhLykiUhiS9bFh7GSXSJRnxzc0MyLyqCQw4JJfUePcJj8wceGLr+/VVPLtKaKFHHWOfO4UGcY45peLrKSnj11dCSzaZN8Omn8PHHsHRpuMhZVrZ3a+wdOoRE3r176Hr3Dsl7+/ZQ/TJ4cBiG8ODPqFGh6qW6Gvr0gS5d0rfNIrIvVX20A5WV4f3by5fDBx/AqlWwfn0oiZeVheGPPw4J2AzWrAnDFRWhXryu/v3hs58N3bBhoc7cPczbsWO4YNqpUzgI9OgRukQ1TbduYZoE93AAUAlf2jtVfbRzHTrAiBGha4rq6lBP/v778MknISGvWxf+XrYMnngiJPmmys8PCbtzZ9iwIVxwHTAgJP3+/UMiLyoK96QPHhymra4OB5WBA8M966tXh2UNH64kL22fErUklZMT6rkPOST5NLt315Sm3cPfO3ZAeXlIrJs3h6qYrVv3rlMvKwv3nPfpA4WFIfGuWhXes7JrVzgAlJc3HmNubrgIm5MTYkj0c3NDqb5Ll3BA6NIlvLslcQJZUBDq63v2DPPn5+/dr6oKceTnh2kb6zp2rJm3U6eaC8MiLUGJWvZLfv7ef3fsGB7B31/uob79ww9rEnb37uHumH/+M5wdmIWSfWVlKHEnqlHcQ6LdsSMcDLZtC3XuW7eGeczCxdk5c/Z+QKklFRaGfZGTEw4anTvXVAFVVIT19uxZ84bGHj3Cmc/u3eEsYeDAEAGoKYoAAAqrSURBVOenn4YDRn0Hhfz8MNyzJ/Tqte/dPslqNQsKwne0e3c4oBQVhTjKy2vW1bVrzcGtY8cQW313E+3cWXOwrK6u2b/SslRHLe1adXVIUrt3hwS6e3focnNDgtq9OySjhrpdu0I/MX95eTiTqKgIB4yqqnCg2Lw5dPn5IZFv3hwSYLduYbiyMvy9fHk4kFRXhwReUFCzrh07std8XOLAkDhIVFWFMx+zEGdZWTg4JM5g8vP3PsupfbaTlxe2NS+v8eHc3Jp9W1FRc/Cr3c9EV11d853k5YXvNDd37/3SpQucdlrz9u9+1VGb2e+A04H17n5Y80IQiaecnJrE01pUV9ccUHbuDGce9V30hfpLtzt2hFJ8x47hILNhQ9j+Tp3CuJ07w9nH1q3h88S66g6bhesIlZVhGT16hMS1bVuYN1EtljjLSQxXVYV5KipClxjevr3+8VVVNd9Rhw41CbOqqma4pbr91a9fuI7T0lKp+ngAuAt4qOVXLyJNlZNTUw3SrRsccEC2I2o7EgeTul1VVTgIJaqjKivDmUN19d4HsDRUUAApJGp3n2tmw9KzehGR+EhUzTR0J1FhYebiSWixt0aY2SQzW2BmC0pLS1tqsSIi7V6LJWp3v9/di929uKioqKUWKyLS7uk9bCIiMadELSISc40majObCbwCHGxma8zssvSHJSIiCanc9XFBJgIREZH6qepDRCTmlKhFRGJOiVpEJOaUqEVEYk6JWkQk5pSoRURiTolaRCTmlKhFRGJOiVpEJOaUqEVEYi5eiTpZe0IiIu1YfFohdw9NS+fnw8iRoWnk3r1D88p1+4MGweDBau5YRNqF+CTqykr40Y+gpATeeSf0N20KXX2tThYUhCZ/O3YMrXL26RMaj+vbN/SLikJrm927h36PHjXJvrBQSV5EWo34JOq8PLj22n3HV1eHJpMTSXvjRli5EpYtg/Ly0GTyjh2hGeSVK+G116C0tOEmhQsKQtLu1Qt69qxJ4ImuqAiGD4f+/aFr19CCaOfOoVVREZEMi0+iTiYnp6ZEPGJEavNUV4emgsvKQrd5c/g7keg3bAjDiXEffADz54fPdu1qOI6ePWuqYPLyQhI/5JBQiu/evaY74ICQ7JXcRWQ/xT9RN0dOTk3puCncQyl9/fqQvNevh61bQ4m+rGzfZF9VFf6eMaP+5XXpEkrnHTuGrnv3vUvu9XWdO4c4evUK86uKRqTdSylRm9nJwK+BXOC37j41rVFli1lIlMOHhy5V5eUhYSdK8GVl8NFH8OabIbnv2hWqaMrK4L33QqLfuBEqKhpebkFBSPQHHBBK8gUFNUk/UX2TqJfv2jVciO3Ycd9+x44h6XftGurzlfxFWpVGE7WZ5QJ3A18F1gDzzexv7v52uoNrNQoLQzdoUOrzuMO2bTVJO9GVl4fPN20KJfrS0tD/9NOaqpldu0K9fENVNcnk5tYk7dpdly7hwOEeSv4FBaFqJz8/JPfENhYWhvHl5WGaHj3CcqurQ2cWzmgS/YaGU50um8N1u9xcHegk41IpUY8Flrn7BwBm9ghwJqBEvT/MapLksGHNW4Z7qJr55BPYvh127w6Ju25/585wUNi6NXlXWhoSMIQLtYn5d+8OB4UdO1ps01u93NxwAMvNrRnn3rRh2PuAlTgwJKZJ1k+sPycn9JMdOFId15Rp9+cAlZi37oEvIZXtTra8psbX0tNBiNM93H322mupz5eiVBL1QGB1rb/XAEfXncjMJgGTAIYMGdIiwUkjzMLFzG7d0r+u6uqQ8MvLQ/IuLKypzqldik5MW10dfriNDac6XTaHEx2E20grKkJXWZk8UTQ2nFhmYj1VVftOU18/MU9VVU1XV33JLVnCS3XaZONSSWa1k27d/Vl7GY3161teQ/E1FEtLTZeYNvE/0L176vM1QSqJur5vYp+tcPf7gfsBiouLm7CV0irk5NRUfdTWr1924hFpR1K5d2wNMLjW34OAtekJR0RE6kolUc8HPmtmw80sHzgf+Ft6wxIRkYRGqz7cvdLMrgaeI9ye9zt3fyvtkYmICJDifdTu/jTwdJpjERGReuj5ZhGRmFOiFhGJOSVqEZGYU6IWEYk586Y8gZPqQs1KgVXNnL0PsKEFw2kpiqvp4hqb4moaxdV0zYltqLsX1fdBWhL1/jCzBe5enO046lJcTRfX2BRX0yiupmvp2FT1ISISc0rUIiIxF8dEfX+2A0hCcTVdXGNTXE2juJquRWOLXR21iIjsLY4lahERqUWJWkQk5mKTqM3sZDN718yWmdmNWYxjsJm9YGZLzewtM7s2Gn+LmX1kZiVRd2qW4ltpZoujGBZE43qZ2d/N7P2o3zPDMR1ca7+UmNkWM7suG/vMzH5nZuvNbEmtcUn3j5lNiX5z75rZSVmI7XYze8fM3jSzx82sRzR+mJntqLXv7stwXEm/u0ztsyRxPVorppVmVhKNz+T+SpYj0vc7c/esd4TXpy4HRgD5wCJgZJZi6Q8cFQ13Bd4DRgK3AJNjsK9WAn3qjJsG3BgN3wjcluXv8mNgaDb2GTAOOApY0tj+ib7XRUBHYHj0G8zNcGwnAh2i4dtqxTas9nRZ2Gf1fneZ3Gf1xVXn818CN2dhfyXLEWn7ncWlRL2nAV133w0kGtDNOHdf5+5vRMNbgaWEdiPj7EzgwWj4QeCsLMZyArDc3Zv7ZOp+cfe5wKY6o5PtnzOBR9x9l7uvAJYRfosZi83dn3f3yujPVwktKGVUkn2WTMb2WUNxmZkB5wIz07HuhjSQI9L2O4tLoq6vAd2sJ0czGwZ8Dkg0K3x1dIr6u0xXL9TiwPNmtjBqUBigr7uvg/AjAg7IUmwQWgCq/c8Th32WbP/E7Xd3KfBMrb+Hm9k/zexFM/tSFuKp77uLyz77EvCJu79fa1zG91edHJG231lcEnVKDehmkpl1Af4MXOfuW4B7gQOB0cA6wmlXNnzB3Y8CTgGuMrNxWYpjHxaaavsa8KdoVFz2WTKx+d2Z2U1AJTAjGrUOGOLunwP+D/AHM8tAc/N7JPvu4rLPLmDvAkHG91c9OSLppPWMa9I+i0uijlUDumaWR/gCZrj7LAB3/8Tdq9y9Gvhv0niK3BB3Xxv11wOPR3F8Ymb9o9j7A+uzERvh4PGGu38SxRiLfUby/ROL352ZTQROBy70qFIzOk3eGA0vJNRrHpSpmBr47rK+z8ysA/AvwKOJcZneX/XlCNL4O4tLoo5NA7pR3df/AEvd/Ve1xvevNdnZwJK682Ygts5m1jUxTLgQtYSwryZGk00E/prp2CJ7lXLisM8iyfbP34DzzayjmQ0HPgu8nsnAzOxk4PvA19y9vNb4IjPLjYZHRLF9kMG4kn13Wd9nwFeAd9x9TWJEJvdXshxBOn9nmbhKmuKV1FMJV0+XAzdlMY4vEk5L3gRKou5UYDqwOBr/N6B/FmIbQbh6vAh4K7GfgN7AbOD9qN8rC7EVAhuB7rXGZXyfEQ4U64AKQknmsob2D3BT9Jt7FzglC7EtI9RfJn5r90XTfj36jhcBbwBnZDiupN9dpvZZfXFF4x8ArqgzbSb3V7IckbbfmR4hFxGJubhUfYiISBJK1CIiMadELSISc0rUIiIxp0QtIhJzStQiIjGnRC0iEnP/HxMNTYqVPTUfAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWoAAAEICAYAAAB25L6yAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deXwUVbbA8d/JAiEQZIvIjiwiCggMIo4C4oiKIIqDgtuA+zaO+xNnHLeHo/N0XMYZ9TnKQwcEEUV5oiM+QRFlhICAIAjIoixC2MMSQpLz/jjVpAlZOpCkO/T5fj796e6q6qpTt6tP3bpVdVtUFeecc7ErIdoBOOecK5knaueci3GeqJ1zLsZ5onbOuRjnido552KcJ2rnnItxnqhjjIicJSJrw94vFpGzIpn2MJb1soj88XA/f6RE5CMRGRbhtJ+JyPUVHdPhEBEVkTblPM9BIvKTiOwSkS7lOe8jISItg/VNinYs8cQLO8ap6snlMR8RGQ5cr6pnhs375vKYdxHLegRoo6pXlTSdqvaroOUr0FZVV1TE/CvJ08BvVfX9UHIEklU1N9qBucrnNWpX6cT4tleyFsDiaAfhYoP/WCqAiIwQkYmFhj0vIn8NXl8jIktEJEtEVorITSXMa7WInBO8riEio0Vkm4h8B5xaxHJ/COb7nYgMCoa3B14GTg8OpbcHw0eLyMiwz98gIitEZKuITBaRxmHjVERuFpHlwfL/LiJSRLznA78HhgTLWhAM/0xEHheRL4E9QKvw5gwRGS4iX4rICyKyQ0SWisivSiiXa4My3CYiH4tIi2D4jGCSBcHyhxTx2dYiMk1EtojIZhEZKyJ1CpX5vSKyMIjlLRFJCRt/n4hsEJH1InJtcTGGrdfK4DtZJSJXBsMTRORBEVkjIptE5A0ROUZEqovILiAxWIcfgNA6bQ/W6fSw8npWRLYHy/hlMPynYJ7DwuLoLyLfiMjOYPwjYeOGBJ+vHbzvJyI/i0h6SesWTNs42Fa2BtvODWHjuotIRrDMjSLyTDA8RUTGBOW/XUTmiEjD0pYV11TVH+X8wGpDe4DawftEYAPQI3jfH2gNCNA7mLZrMO4sYG3YvFYD5wSvnwS+AOoBzYBFhaa9FGiM7YCHALuBRsG44cDMQnGOBkYGr88GNgNdgerAC8CMsGkV+ACoAzQHMoHzi1n/R4AxhYZ9BvwInIw1uSUHw64Piy8XuCsYNwTYAdQL+3xo2ouBFUD7YF4PAl8VirVNCd9PG6BvsJ7pWCJ8rlCZzw7Ksh6wBLg5GHc+sBHoANQE3ixuecH4nUC74H0j4OTg9bXBOrQCagHvAv8sah2AlsH7pLDxofK6Btu+Rgbl+/dgvc4FsoBaYdtVx2Db6BSsw8Vh8xsbbA/1gfXAgGLK7qBYgM+BF4EUoHOwXfwqGDcLuDp4XYuC7f8m4H+B1CD2XxD8VvxRzDYb7QCO1gcwE/hN8Lov8EMJ074H3BG8PoviE/VKwpIjcGP4tEXMdz5wUfB6OCUn6teA/wobVwvYD7QM3itwZtj4CcCIYpb7CEUn6seKGBaeqNcDEjZ+dtgPPXzaj4DrwqZLwHZ2LcJiLTZRFxHvxcA3hcr8qrD3/wW8HLweBTwZNu6E4paHJertwK+BGoXGfQrcGva+XVDeSYXXgeIT9fKw9x2DaRqGDdsCdC5mnZ8Dng17XwdL9N8C/11CWR2IBass5AFpYeOfAEYHr2cAjwINCs3jWuAroFNl/R6r+sObPirOm8DlwesrgvfAgUPLfweHi9uBC4AGEcyzMfBT2Ps14SNF5DciMj84nNyO1foimW9o3gfmp6q7sB96k7Bpfg57vQdL5mXxUynj12nwSw6sCeIqrAXwfNh6bsWOTpoUMe0hRORYERkvIutEZCcwhkPLqbh1LfE7CKequ7Ejg5uBDSIyRURODJtP+GfXYMmvLE0AG8Ne7w2WWXhYLQAROU1EpotIpojsCGI6sM6quh14G9tm/hLh8hsDW1U1q9B6hL6H67Ad2dKgeWNAMPyfwMfA+KD56L9EJDnCZcYlT9QV523gLBFpCgwiSNQiUh14Bzur31BV6wAfYommNBuwWkxI89CLoI32H8BvgfrBfBeFzbe0bhLXYwkwNL+a2GHwugjiKqy4ZZUWQ5NC7d7Ng7gK+wm4SVXrhD1qqOpXEcb3RBBLJ1WtDVxFZOUPJXwHRVHVj1W1L9bssRT7jqBQeQfzyeXg5HtgNhHGVpI3gclAM1U9BjtncWCdRaQzVtMdB/w1wnmuB+qJSFrYsOYE24yqLlfVy4FjgT8DE0WkpqruV9VHVfUk4JfAAOA3R7R2RzlP1BVEVTOxw/X/AVap6pJgVDWsDTETyBWRflh7YiQmAA+ISN1gB3B72Lia2A86E+yEJVY7CtkINBWRasXM+03gGhHpHOxM/gR8raqrI4wt3EagpZT9yo5jgd+JSLKIXIq1QX9YxHQvY+VwMkBwEu7SQstvVcJy0oBd2Mm5JsB9ZYhxAjBcRE4SkVTg4eImFJGGIjIw2OntC5aZF4weB9wlIseLSC2svN/Soi+/ywTyS1mn0qRhtd9sEemOHeWF4kzBjip+j7V5NxGRW0uboar+hDVhPBGcIOyE1aLHBvO9SkTSVTUfawICyBORPiLSUUQSsTb8/RSUiyuCJ+qK9SZwDmHNHsFh4u+wH/w27AczOcL5PYodWq4CpmKHkKH5focdss7CElVH4Muwz07DLvf6WUQ2F56xqn4K/BGr7W/ATnYOjTCuwt4OnreIyLwyfO5roC12UvNxYLCqbiki1klYDW180HSxCAi/JvsR4PWgaeSyIpbzKHbSdAcwBTuRFxFV/Qhr352GnQycVsLkCcA9WM1zK3biOJQAR2Hf3wzs+8zm4B1v+DL3YOXxZbBOPSKNN8ytwGMikgU8hG1/IU9g5zpeUtV92BHGSBFpG8F8L8fardcDk4CHVfWTYNz5wOLgKpbngaGqmg0cB0zEkvQS7ITkmMNYp7ghBzcJOhcdUsQNOc454zVq55yLcZ6onXMuxnnTh3POxTivUTvnXIyrkN7zGjRooC1btqyIWTvn3FFp7ty5m1W1yP5VKiRRt2zZkoyMjIqYtXPOHZVEpNi7XL3pwznnYpwnaueci3GeqJ1zLsb5X3E5F4P279/P2rVryc7OjnYorpylpKTQtGlTkpMj7zDQE7VzMWjt2rWkpaXRsmVL5NA/0nFVlKqyZcsW1q5dy/HHHx/x57zpw7kYlJ2dTf369T1JH2VEhPr165f5SMkTtXMxypP00elwvteYStT/+Z8wdixsLKrrdOeci1Mx00adnQ3PPw9btkCtWrBgAbQ6km7SnXPuKBEzNeqUFKtJz5oF+fnwhz9EOyLnXKRq1Sr+7zNXr17Nm2++Wez49evXM3jw4CNaBsD27dt58cUXS5xm9erVdOjQocRpYlHMJGqAxETo0QPuuQfGj4c5c6IdkXPuSJWUqHNzc2ncuDETJ0484uVEkqirqphp+gh3333WDPLqq3DqqdGOxrkou/NOmD+/fOfZuTM891yxo++//35atGjBrbfaP4c98sgjiAgzZsxg27Zt7N+/n5EjR3LRRReVuqgRI0awZMkSOnfuzLBhw6hbty5TpkwhOzub3bt3M2rUKAYMGMCiRYsYPXo0kyZNYt++faxatYorrriChx8+9G8pn3rqKSZMmMC+ffsYNGgQjz76KCNGjOCHH36gc+fO9O3bl6eeeqrEuLKzs7nlllvIyMggKSmJZ555hj59+rB48WKuueYacnJyyM/P55133qFx48ZcdtllrF27lry8PP74xz8yZMiQUte9vMRkok5Lg+bNITMz2pE4F5+GDh3KnXfeeSBRT5gwgX/961/cdddd1K5dm82bN9OjRw8GDhxY6lUMTz75JE8//TQffPABAKNHj2bWrFksXLiQevXqsXr16oOmnz17NosWLSI1NZVTTz2V/v37061btwPjp06dyvLly5k9ezaqysCBA5kxYwZPPvkkixYtYn6EO7W///3vAHz77bcsXbqUc889l2XLlvHyyy9zxx13cOWVV5KTk0NeXh4ffvghjRs3ZsqUKQDs2LEjomWUl5hM1AB168K2bdGOwrkYUELNt6J06dKFTZs2sX79ejIzM6lbty6NGjXirrvuYsaMGSQkJLBu3To2btzIcccdV+b59+3bl3r16hU7rn79+gBccsklzJw585BEPXXqVLp06QLArl27WL58Oc2bNy9TDDNnzuT22+3/hE888URatGjBsmXLOP3003n88cdZu3Ytl1xyCW3btqVjx47ce++93H///QwYMICePXuWeZ2PREy1UYerVw+2bo12FM7Fr8GDBzNx4kTeeusthg4dytixY8nMzGTu3LnMnz+fhg0bHvYt7jVr1ix2XOEaeuH3qsoDDzzA/PnzmT9/PitWrOC6664rcwzF/bvVFVdcweTJk6lRowbnnXce06ZN44QTTmDu3Ll07NiRBx54gMcee6zMyzsSnqidc0UaOnQo48ePZ+LEiQwePJgdO3Zw7LHHkpyczPTp01mzptjukw+SlpZGVlZWxMv95JNP2Lp1K3v37uW9997jjDPOOGj8eeedx6hRo9i1axcA69atY9OmTWVeTq9evRg7diwAy5Yt48cff6Rdu3asXLmSVq1a8bvf/Y6BAweycOFC1q9fT2pqKldddRX33nsv8+bNi3g55SFmmz48UTsXXSeffDJZWVk0adKERo0aceWVV3LhhRfSrVs3OnfuzIknnhjRfDp16kRSUhKnnHIKw4cPp27duiVOf+aZZ3L11VezYsUKrrjiioOaPQDOPfdclixZwumnnw7YZXtjxoyhdevWnHHGGXTo0IF+/fqVejLx1ltv5eabb6Zjx44kJSUxevRoqlevzltvvcWYMWNITk7muOOO46GHHmLOnDncd999JCQkkJyczEsvvRTRupeXCvlz227duumR/sPL44/Dgw/ajTDVq5dTYM5VEUuWLKF9+/bRDqPSjR49moyMDP72t79FO5QKVdT3KyJzVbVbUdPHdNMH+AlF55yL6aYPsOaPwzip7JyrZN9++y1XX331QcOqV6/O119/HfE8hg8fzvDhw2MillgSs4k61IzlNWrnqoaOHTtGfA1zRYulWMpDzDd9+AlF51y880TtnHMxzhO1c87FuJhN1LVrg4i3UTvnXESJWkTqiMhEEVkqIktE5PQKDyzBTih6jdq52FdaX9Fl8fbbb9O+fXv69OnDZ599xldffVVu8y6Lli1bsnnz5qgsu7BIa9TPA/9S1ROBU4AlFRdSAb870bn489prr/Hiiy8yffr0qCbqWFLq5XkiUhvoBQwHUNUcIKdiwzLeg55zUemOulz7o96wYQNDhgxh586d5Obm8tJLL9GzZ0/GjRvHn/70J1SV/v378+c//5nHHnuMmTNnsmrVKjp16sQXX3xBYmIiY8aM4YUXXuC1116jRo0aLF26lDVr1vA///M/vP7668yaNYvTTjuN0aNHA3DLLbcwZ84c9u7dy+DBg3n00UfZsWMH3bt3Z/LkybRr147LL7+cs88+mxtuuKHUdXjmmWcYNWoUANdffz133nknu3fvLrKP6hEjRjB58mSSkpI499xzefrpp0v/QkqjqiU+gM7AbGA08A3wKlCziOluBDKAjObNm2t5OO881e7dy2VWzlUp33333YHXd9yh2rt3+T7uuKPk5c+bN0979ep14H379u11zZo1umPHDlVVzczM1NatW2t+fr6qqtasWbPYeT399NM6cuRIVVXNzc3VnTt36rp167RZs2a6adMm3b9/v/bp00cnTZqkqqq9e/fWOXPmqKrqww8/rE899dSBeQ0bNkyHDBmi+fn5+t5772laWpouXLhQ8/LytGvXrvrNN9+oquqWLVsOLK937966YMECVVWdOnWq9ujRQ8eNG6fnnXdeiWXQokULzczM1IyMDO3QoYPu2rVLs7Ky9KSTTtJ58+bpxIkT9frrrz8w/fbt23XLli16wgknHCiXbdu2FTnv8O83BMjQYvJwJDe8JAFdgdtV9WsReR4YAfyxUMJ/BXgFrK+PI9+FWNPHDz+Ux5ycq7qi0B11ufZHfeqpp3Lttdeyf/9+Lr74Yjp37sy0adM466yzSE9PB+DKK69kxowZXHzxxaXGduGFFyIidOzYkYYNG9KxY0fAOpFavXo1nTt3ZsKECbzyyivk5uayYcMGvvvuOzp16kTfvn15++23ue2221iwYEFEZTFz5kwGDRp0oGvWSy65hC+++ILzzz//kD6qc3NzSUlJ4frrr6d///4MGDAgomWUJpI26rXAWlUN3Xs5EUvcFa5ePW/6cC5ayqs/6l69ejFjxgyaNGnC1VdfzRtvvFFsX9CRqB700paQkHDgdeh9bm4uq1at4umnn+bTTz9l4cKF9O/f/0Cc+fn5LFmyhBo1arA1whNgxcVaVB/VSUlJzJ49m1//+te89957nH/++Ye9nuFKTdSq+jPwk4i0Cwb9CviuXJZeilAbdX5+ZSzNOReuvPqjXrNmDcceeyw33HAD1113HfPmzeO0007j888/Z/PmzeTl5TFu3Dh69+59yGfL2sc0wM6dO6lZsybHHHMMGzdu5KOPPjow7tlnn6V9+/aMGzfuQC2/NL169eK9995jz5497N69m0mTJtGzZ88i+6jetWsXO3bs4IILLuC5554rt9vYI+3r43ZgrIhUA1YC15TL0ktRr54l6Z07oU6dyliicy6kvPqj/uyzz3jqqadITk6mVq1avPHGGzRq1IgnnniCPn36oKpccMEFRZ6YvPDCCxk8eDDvv/8+L7zwQkTLO+WUU+jSpQsnn3wyrVq1OvDHA8uWLePVV19l9uzZpKWl0atXL0aOHMmjjz5a4vy6du3K8OHD6d69O2AnE7t06cLHH398SB/VWVlZXHTRRWRnZ6OqPPvssxHFXJqY7Y8aYPRouOYaWLkSjj/+yONyrqqI1/6o48VR0x81QOga+uAfd5xzLi7FbDenAKH/v9y9O7pxOOdKV1X7gD7ttNPYt2/fQcP++c9/HriaJBbEdKIO1ag9Ubt4pKqH/AN3LKuqfUBX9o7kcJqbY7rpI1Sj9qYPF29SUlLYsmXLEV3G5mKPqrJlyxZSUlLK9LmYrlF704eLV02bNmXt2rVkZmZGOxRXzlJSUmjatGmZPhPTidqbPly8Sk5O5ni/1MkFvOnDOediXJVI1F6jds7Fs5hO1MnJUK2aJ2rnXHyL6UQNVqv2pg/nXDyrEonaa9TOuXgW84m6Vi2vUTvn4lvMJ2qvUTvn4p0naueci3Exn6i96cM5F+9iPlF7jdo5F+88UTvnXIyrEonamz6cc/Es5hN1rVpeo3bOxbeYT9Q1a0JODkTwZ8HOOXdUqhKJGrxW7ZyLXxH1Ry0iq4EsIA/ILe6fcitCeJ/UdepU1lKdcy52lOWPA/qo6uYKi6QYXqN2zsW7KtP04Vd+OOfiVaSJWoGpIjJXRG6syIAK87/jcs7Fu0ibPs5Q1fUicizwiYgsVdUZ4RMECfxGgObNm5dbgN704ZyLdxHVqFV1ffC8CZgEdC9imldUtZuqdktPTy+3AL3pwzkX70pN1CJSU0TSQq+Bc4FFFR1YiDd9OOfiXSRNHw2BSSISmv5NVf1XhUYVxps+nHPxrtREraorgVMqIZYiedOHcy7exfzleamp9uw1audcvIr5RJ2QYMnaE7VzLl7FfKIG7+rUORffqkyi9hq1cy5eVYlEnZYGWVnRjsI556LDE7VzzsW4KpGoa9f2RO2ci19VIlF7jdo5F8+qTKLeuTPaUTjnXHRUiUTtTR/OuXhWJRJ1qOkjPz/akTjnXOWrEom6dm179mupnXPxqEok6rQ0e/bmD+dcPKpSidpPKDrn4lGVSNShpg+vUTvn4lGVSNReo3bOxbMqkai9Ru2ci2dVIlH7yUTnXDyrUonamz6cc/GoSiRqb/pwzsWzKpGoU1IgMdFr1M65+FQlErWI9/fhnItfESdqEUkUkW9E5IOKDKg43tWpcy5elaVGfQewpKICKY13deqci1cRJWoRaQr0B16t2HCK500fzrl4FWmN+jngP4BiOxoVkRtFJENEMjIzM8sluHBeo3bOxatSE7WIDAA2qerckqZT1VdUtZuqdktPTy+3AEO8Ru2ci1eR1KjPAAaKyGpgPHC2iIyp0KiK4CcTnXPxqtREraoPqGpTVW0JDAWmqepVFR5ZIbVre9OHcy4+VYnrqKGgRq0a7Uicc65ylSlRq+pnqjqgooIpSVqaJWn/Oy7nXLypMjVq7+/DORevqkyi9q5OnXPxqsok6vr17XnjxujG4Zxzla3KJOp27ex56dLoxuGcc5WtyiTqFi2gRg1YErXeRpxzLjqqTKJOSLBatSdq51y8qTKJGqB9e0/Uzrn4U6US9Yknwpo1fi21cy6+VKlE3b69PS9bFt04nHOuMlXJRO3NH865eFKlEnXbtnZS0RO1cy6eVKlEXb06tG7tido5F1+qVKIGu0TP26idc/GkyiXqNm1gxQrv7tQ5Fz+qXKJu2xb27oUNG6IdiXPOVY4ql6jbtLHn5cujG4dzzlWWKpeo27a15xUrohuHc85VliqXqJs1g+Rkr1E75+JHlUvUSUnQqpXXqJ1z8aPKJWqwdmqvUTvn4kWVTNRt2/oles65+FElE3WbNrBnj1+i55yLD6UmahFJEZHZIrJARBaLyKOVEVhJQld++K3kzrl4EEmNeh9wtqqeAnQGzheRHhUbVsm6d7fOmb74IppROOdc5Sg1UavZFbxNDh5RbR2uUwe6dIHp06MZhXPOVY6I2qhFJFFE5gObgE9U9esiprlRRDJEJCMzM7O84zzEWWfBv/9tt5M759zRLKJErap5qtoZaAp0F5EORUzziqp2U9Vu6enp5R3nIfr0gZwcmDWrwhflnHNRVaarPlR1O/AZcH6FRFMGPXtaO7U3fzjnjnaRXPWRLiJ1gtc1gHOApRUdWGlq14ZTT4XXX/f+qZ1zR7dIatSNgOkishCYg7VRf1CxYUXmr3+1NurTT/dbyp1zR69IrvpYqKpdVLWTqnZQ1ccqI7BIdO9ubdQ7d8I//hHtaJxzrmJUyTsTw7VpA337wvjxfku5c+7oVOUTNcDQofDjj3a5nnPOHW2OikR90UX2D+Xjx0c7EuecK39HRaI+5hi44AKYMAHy8qIdjXPOla+jIlGDNX/8/DPMmBHtSJxzrnwdNYm6f3+oWbPgpKLfWu6cO1ocNYm6Zk0YOBAmTrQbYbp29atAnHNHh6MmUYM1f2zdCnPnwtKlsGhRtCNyzrkjd1Ql6n794IknYOpUe/+vf1myfv316MblnHNHQrQC2ge6deumGRkZ5T7fsujYERo0gO3bYeFCyMqC1NSohuScc8USkbmq2q2ocUdVjTrc+efDZ5/B/PmQnw/ffhvtiJxz7vAc1YkaoFkze54/P3qxOOfckThqE3XPnnDJJfDmm/bXXZ6onXNVVVK0A6go1arBO+/Y686d4ZtvohuPc84drqO2Rh2uc2c7oXi4t5fv3w+5ueUbk3PORSpuEvXevbB8+eF9/sorYdCg8o3JOeciFTeJGqzN+he/gM8/j/yzu3fD5Mnw1Vd+p6NzLjriIlGfdBI0aWJNGNu22T+YjxkT2WenTYN9++yOx8zMio3TOeeKEheJOjnZ/lhg2TK7nrpjR3jmmcg+O2VKweulUf9LX+dcPIqLRA2QkAAi1nnT8OF2Fcj335f8GVX48EPr4AlgyZIKD9M55w4RN4k63JAhlrTHjIH//m+YObPo6RYtgp9+gptvttvPvUbtnIuGUhO1iDQTkekiskREFovIHZURWEVq3Bh694aRIy0J9+ljCXvrVqs1f/ih1aYnTbKEfuGFcOKJXqN2zkVHJDXqXOAeVW0P9ABuE5GTKjasinfbbZCebgn6rLMsYdevbyce+/e3m2XeeQfOOAOOO674RB3plSD5+fD223DXXZCTU66r4pw7ypWaqFV1g6rOC15nAUuAJhUdWEUbPBg2bYIbb7Qa9JQp8PTT8Oqr0LYt3Hef3STz61/b9O3b2wnJ3bvtvSrccgt0725Xk5RE1ZL/ZZfBc88V39TinHNFKVMbtYi0BLoAXxcx7kYRyRCRjMwqdh1bcrL9Oe4998B118H998Pq1TbukkvsuX17ew71wjd6NLz8MmRkWH8iJZk3z/rGvuceO6np/+vonCuLiPujFpFawOfA46r6bknTxkJ/1EciJwdat7a27K+DXdLatXDCCZbUf/lLu776zDOtXXvXLhg1Cr74wu5+XL4cqleH99+HWrWsmWXUKNiwAX71K/vX9GnToruO4XbssN4Gn30WevSIdjTOxacj7o9aRJKBd4CxpSXpo0G1ajB9OkyYUDCsaVO7pO/MM2HlSrjpJvsj3YceghUroFcv+MMf4OOPbfpp0+D3v4fsbKtxDxpkvfj16gWzZhXfTj17tt09OW+eNZn83//ZDTcleeYZmDPn8Nd32jT4978tUTvnYpCqlvgABHgDeK60aUOPX/ziFxov8vJU//IX1fHjVbdtKxh+++2qoHr66fb8ySc2/J137P2ECaojR6quXq06apRqo0aqt9yi2qCBjR88WHXKFHv9xBOHLnfVKtV9+1Q//dSmad5cdffuw1uH3/3O5lG9uuqCBaotW1qczrnKA2RocXm4uBFakKjPBBRYCMwPHheU9Jl4StTF2bVLtVs31VNOUX3wQUvoqqqbNlmphx5JSfbcvr29btBAdcgQ1cRE1XbtbFybNqr5+apZWZaMX3hBNSFBtU8f1dNOU61Tx6Z78MGi43j7bdWcnOJj7dRJtVkzm0doXueeWzHlEsvy823H6Vw0HFGiPpyHJ+qS9eiheuKJVhu+6SbV3/9edf9+1Z9+ssfq1ZaIQfW88+z54YdVa9QoSPCnnaYqYq9ffln1iissuV9zjepTT9n0c+eq9upl0/TrZ0lbVXX2bNWvvrLXmzfb+JEjVU86yV537Gjz2rix4svi//5P9Z57Kn45kXj9dSv3RYuiHYmLR56oY8yePaq5uSVPc/nlljh37lQ95hj7pjLQHYQAABHjSURBVE4+WfU//9Nq1Lm5qm++aQl63z5rdrn9dtWUFJs2lMQTElRvuMGeO3dWfekl1WrVVGvXVv35Z9V337XpvvxSdfJk1QcesOYPUH3xxfJb57FjbedRWM+etqwFC8pvWYerXz+L5YEHoh2JWb8+2hG4yuSJugrKyVHdu9deP/igNYOsXVv653bssKS9caPqk0+qfvCBDf/f/1WtX9++8Q4drJnlqqtUBwxQTU21ZB+Sn29NMccea00hdeuqtmqletxxqjffbE0w4b7/XnX6dNV164qPKbSz+fDDguE//VRwhHDffREXTYXYscN2YKDaooWVQTRlZNjO1s8VxA9P1FVcfn75JI516+zE5JYtqnffXVDzfvTRQ6d98UVrt77mGtVbb1W98krVyy6z6Zs0sffPPGM7kcTEgnk9+KA1HTz2mGqXLqoXXWTDwBJ969aqn39uJ0OfftqGd+pk8wwdZcyZo5qZeXjruGNH8TuMkowfb7HccEPBEYaqNRdt2lRwjiB8h7Zhg+oZZ6h+/PHhxVqSO++0OPr3L/95u9jkidodYvt2u9oj1FYdqc8/Vx040BJrqDZ85ZWWrIYNKxgmYu3ooZOlv/qVXfkSGp+YaDX8X/xCddw4PXAlzKhR9tnjj1f94QdLvJMnW7v76tVWC3///YL29nCLF9sVK4mJqldfbe3ujz2m+sgjqg89pPqPf9i5gKJceqlqerodjaSk2E7mvvsKTq6GmpS6dClI1rfdZsMaNVLdutWG/fyz6jffFL2M/fst8d59d8HJ5aLk5to8ExJsXTZtivjrcVWYJ2pXITZsOPTE27vvWm08VKudMsWS7syZ9v677+wEYqjm+te/2pUsoatOQPXMM1Xr1SuoqYceCQkFbe/166uOGGFNAxddZJcnVq9utfabbjr4xGv4o2tXS/zz59vnR4xQHTrUxv32txbja69ZwgfVCy+0I4e771a96y4b9sc/2k4kKUn1nHMszl/+UvU3v7GELmJHCzNm2Lw++cR2LM89VxDHsGGq771nO8oFC2zns3ixLX/69ILlhMqoONnZ1kQ2a5bqxRdbDFOmRPb9ffqp6sSJdrSQl1f6eRNXsUpK1BHfmVgWVf3ORFc5fv4ZGja0Hgp37oQ33rCbiR5/3G7hf+01aNAATj0VWraE11+3O0O7drXb9z/6yP6wuH596wKgfn3r9Kp5cxuen2+37If6Ip84EW6/3ZYLkJRkzyJ2c9IDD9gdpWDpdOtWm2e43/wGxo6FGjVs/itWWOddTz5pNzH172/rMmnSwZ9r2BD27IHTT7f4n3zy0PJISIBrr7WbnZYutb5ozjwTfvgBunSBdessrkGDbD5LlsCf/2x3xoJ1Mpaba/9idPfdMHCg3Vl74onWb82778KaNdCmjd149Ze/2Py6d7fhOTlw661WRg0bwnffWd83a9bApZdCv352Vy1Y/zaLF9vf2qWmWjzt20NiYsH65OXZNO+8A1lZdkfvBRfA5s12k1adOvY3eZ07Q4sW9j2E7N5tZdu8OdSte2hZLVtm3RT362d306pa+YXs22fLT00taQs8VG6uzScvD155BT791LaXfv3gmmvsxreirFpl/yJVrVrZlheupDsTPVG7KmvLFktqZ5wR+Q8yJ8fuxFy92vpxqVvXftS1akX2+W3brM+WtDS4+GLrIrew/HxL5qmpcMop1qXAE0/Yna3z5lmnX+vWWZcCmzdbEmvSxHZEr7xiSes//sOS5oIF8PzzlpgaN7ZkP3VqQUdggwbZjqxWLUsk1atbfC+8cGhcvXpZMp061RLLoEEW//PPW7LMy7NuD6pVsxiWLbOdWXq6xSoCzZrZ+v38syW1cGlpFsfWrfY6K8vKNiHB5pmdbck5N9e+h/37C3qfPOMMOPts+3/S1autWwOwHXPv3tZ9Q8eO9jx7tnVRHOogLSnJlnH33dbZ2gcfwN/+Zn9ofe+90LevzfOdd2yndOmlVv4ff2zl0LevxTx1qvXDU6uWxb9qle3U6tWzZVarZn86ArYT2bDBdqTr11unbo0aWXcR995bsMMvC0/UzkWZqiWn0n7A+/aVPs3u3QVJtGPHopc1aZIt7+yzbUfRqBG0amXj9++3Lgd69Di4Fgo232eftSR1wQX2Jxvp6daPzYwZBctt0sSSfu/elhBnzbJuCHJyLLHt2mX/pnTyydaPTP368OWX8I9/WHx/+pPNd9Ei603yueesP50zz7Sjh4YNLUlmZFg3CqtWFSRvsK6JX34ZPvvMxv34I4wbVzC+f39ISbHkHNKwIWzcWPA+MdHi2rTJ3rdvD+ecY8tZs8Z2eAMG2A5q5UrbObzxhu3cW7e2z37+uZXhHXfY0cu6dbZO4UcHkfJE7ZyLaTk51mTUoEHR41UtWa5YAR062NFF4WT45ZdWc+7du6CJYsUKe6Sm2k7g22+tdty0KZx2mtXwFy60pNusWelx5uYWNJmB7aRUC47odu2K/OisME/UzjkX44649zznnHPR44naOedinCdq55yLcZ6onXMuxnmids65GOeJ2jnnYpwnaueci3GeqJ1zLsZ5onbOuRjnido552KcJ2rnnItxnqidcy7GlZqoRWSUiGwSkUWVEZBzzrmDRVKjHg2cX8FxOOecK0apiVpVZwBbKyEW55xzRSi3NmoRuVFEMkQkIzMzs7xm65xzca/cErWqvqKq3VS1W3p6ennN1jnn4p5f9eGcczHOE7VzzsW4SC7PGwfMAtqJyFoRua7iw3LOOReSVNoEqnp5ZQTinHOuaN704ZxzMc4TtXPOxThP1M45F+M8UTvnXIzzRO2cczHOE7VzzsU4T9TOORfjPFE751yM80TtnHMxzhO1c87FOE/UzjkX4zxRO+dcjPNE7ZxzMc4TtXPOxbhSuzmtVOPHQ61a0KwZdOgAiYnRjsg556IudhK1KgwbBjk59r5OHejdG3r2hNatLXk3bw4NGoBIdGN1zrlKFDuJGuD772HTJli+HD77DKZNg/ffP3ialBSoVw+qVSt4JCeX/D7SYUVNk5QEubn2umFDe6Smwp499giNq17dPp+UVPBITCx4AGRmwpYt0LatjXfOuQiIqpb7TLt166YZGRnlM7PMTPjxR/jpJ3teswZ27LCa9/799hz+iHRYqOZeWUTsqAEs0TduDAkJBQ+Rgkdo+qLeh14nJEBamj2vWgV5eVCjhj1SU23HERKa9phjbFxmpu1U2ra1svnhB1i0CNLTbZqff7ajlzZtbEe0f7/NPyXFdlTHHgt790JWlj2H4klMPHh9srNt+dWq2fe3Zw8cdxzUrFmwY1u3zr6L00+32LZvh507bVnHHAP5+bbsvDwrv/z8g5+LGgYFO8rkZJtvjRoWR36+lUXNmhZ/airUrg27dtlyCz+ysmxe6en22eRkW//0dNi3zyoXNWrYPELfUY0aVk7Z2fb5lBR7gJVt6PvYscPKNlRJqFXL5rN7t1VWFi+Gli2hXTuoW9eOMlNSbLnZ2fbZUNmHyjz0PVSvbtMmJhas265dti7JyfYQsXJo0MC+m717D932Cj9KG5+TU1CJUT20IhSquBTe7ssqL8/KL/T58N9S6BFaxypCROaqarcix8V8oq4oqvZll5bU9++3Lzw722r7GzfaBl2zpv3IExMLpt23z+aZm2uPUIIJPUKJcN48S5ah5BJ6hL6LUAIq6X1+viWB/fvtx1y9usUV+sHt22fTitiys7IsCe7ebYkmO9t2fAkJ0LQpdOoEmzfbD/q442DlShsfOspISLDPhOZbVqmpVmaZmQcPT0y05HG48z2ahe/cj2Yihybv0PCinsG249LKJrRTTUws+A2G7/jD3ycn244tVIEI7VB274Zt22wbrV7dHtWqWSz79hX87nNy7HONG8N33x1mMRSfqOP3+FukoIkiNbVylz1sWOUurzihjSvSWoeqJftNmyzppqVZ7REKNvjwHU+NGvaZ7GyrDYrYDmzfvoKNu359m2b+fPv8McdYrTI722pM4c1H4bXG8JpU4WFQsMPMybEd19699johwXZau3ZZ/Hv22HLS0my5oeWHHrVq2ec2b7ZtJSfHdjabNtn7du1sWFZWwc50zx7bodeoYfPNzrblq0KjRgU72Tp1rPxDO/pQzbdmTTuiOflkO4JcudJi3LbN5lWjhiWVpKSDjypCO/vwMs7NLVi3mjULKif799u0u3bZ+oQqHqHvuahHSeNCj2rVbD6pqVbWhStChbeRwttNXl7Bcop6Dr2uVcuaQEUOPqIKf2Rl2fegWnCkEdqOCm9XubkF31N4JSs11Y5m8vIOTsyqBYk7lNxzcwuOnMpZ/NaonXMuhpRUo47oOmoROV9EvheRFSIyonzDc845V5JSE7WIJAJ/B/oBJwGXi8hJFR2Yc845E0mNujuwQlVXqmoOMB64qGLDcs45FxJJom4C/BT2fm0w7CAicqOIZIhIRmbhM/vOOecOWySJuqhLAg45A6mqr6hqN1Xtlp6efuSROeecAyJL1GuBZmHvmwLrKyYc55xzhUWSqOcAbUXkeBGpBgwFJldsWM4550JKveFFVXNF5LfAx0AiMEpVF1d4ZM4554AKuuFFRDKBNYf58QbA5nIMp7x4XGUXq7F5XGXjcZXd4cTWQlWLPMFXIYn6SIhIRnF350STx1V2sRqbx1U2HlfZlXds/g8vzjkX4zxRO+dcjIvFRP1KtAMohsdVdrEam8dVNh5X2ZVrbDHXRu2cc+5gsVijds45F8YTtXPOxbiYSdSx0ue1iDQTkekiskREFovIHcHwR0RknYjMDx4XRCm+1SLybRBDRjCsnoh8IiLLg+e6lRxTu7BymS8iO0XkzmiUmYiMEpFNIrIobFix5SMiDwTb3Pcicl4UYntKRJaKyEIRmSQidYLhLUVkb1jZvVzJcRX73VVWmRUT11thMa0WkfnB8Mosr+JyRMVtZ6oa9Qd2x+MPQCugGrAAOClKsTQCugav04BlWD/cjwD3xkBZrQYaFBr2X8CI4PUI4M9R/i5/BlpEo8yAXkBXYFFp5RN8rwuA6sDxwTaYWMmxnQskBa//HBZby/DpolBmRX53lVlmRcVVaPxfgIeiUF7F5YgK285ipUYdM31eq+oGVZ0XvM4CllBEt64x5iLg9eD168DFUYzlV8APqnq4d6YeEVWdAWwtNLi48rkIGK+q+1R1FbAC2xYrLTZVnaqqucHbf2OdnlWqYsqsOJVWZiXFJSICXAaMq4hll6SEHFFh21msJOqI+ryubCLSEugCfB0M+m1wiDqqspsXwigwVUTmisiNwbCGqroBbCMCjo1SbGCddoX/eGKhzIorn1jb7q4FPgp7f7yIfCMin4tIzyjEU9R3Fytl1hPYqKrLw4ZVenkVyhEVtp3FSqKOqM/ryiQitYB3gDtVdSfwEtAa6AxswA67ouEMVe2K/TXabSLSK0pxHEKsd8WBwNvBoFgps+LEzHYnIn8AcoGxwaANQHNV7QLcDbwpIrUrMaTivrtYKbPLObhCUOnlVUSOKHbSIoaVqcxiJVHHVJ/XIpKMfQFjVfVdAFXdqKp5qpoP/IMKPEQuiaquD543AZOCODaKSKMg9kbApmjEhu085qnqxiDGmCgzii+fmNjuRGQYMAC4UoNGzeAweUvwei7WrnlCZcVUwncX9TITkSTgEuCt0LDKLq+icgQVuJ3FSqKOmT6vg7av14AlqvpM2PBGYZMNAhYV/mwlxFZTRNJCr7ETUYuwshoWTDYMeL+yYwscVMuJhTILFFc+k4GhIlJdRI4H2gKzKzMwETkfuB8YqKp7woani/2xNCLSKohtZSXGVdx3F/UyA84Blqrq2tCAyiyv4nIEFbmdVcZZ0gjPpF6AnT39AfhDFOM4EzssWQjMDx4XAP8Evg2GTwYaRSG2VtjZ4wXA4lA5AfWBT4HlwXO9KMSWCmwBjgkbVullhu0oNgD7sZrMdSWVD/CHYJv7HugXhdhWYO2XoW3t5WDaXwff8QJgHnBhJcdV7HdXWWVWVFzB8NHAzYWmrczyKi5HVNh25reQO+dcjIuVpg/nnHPF8ETtnHMxzhO1c87FOE/UzjkX4zxRO+dcjPNE7ZxzMc4TtXPOxbj/B8NJQHyxf0DYAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "\n",
    "acc = history.history['softmax_accuracy']\n",
    "val_acc = history.history['val_softmax_accuracy']\n",
    "loss = history.history['loss']\n",
    "triplet_loss = history.history['triplet_loss']\n",
    "softmax_loss = history.history['softmax_loss']\n",
    "val_loss = history.history['val_loss']\n",
    "val_triplet_loss = history.history['val_triplet_loss']\n",
    "val_softmax_loss = history.history['val_softmax_loss']\n",
    "\n",
    "epochs = range(len(acc))\n",
    "\n",
    "plt.plot(epochs, acc, 'r', label='Training acc')\n",
    "plt.plot(epochs, val_acc, 'b', label='Validation acc')\n",
    "plt.title('Training and validation accuracy')\n",
    "plt.legend()\n",
    "\n",
    "plt.figure()\n",
    "\n",
    "plt.plot(epochs, loss, 'r', label='Training loss')\n",
    "plt.plot(epochs, val_loss, 'b', label='Validation loss')\n",
    "plt.title('Training and validation loss')\n",
    "plt.legend()\n",
    "\n",
    "plt.figure()\n",
    "plt.plot(epochs, triplet_loss, 'r', label='triplet_loss')\n",
    "plt.plot(epochs, softmax_loss, 'b', label='softmax_loss')\n",
    "plt.title('training triplet and softmax loss')\n",
    "plt.legend()\n",
    "\n",
    "plt.figure()\n",
    "plt.plot(epochs, val_triplet_loss, 'r', label='val_triplet_loss')\n",
    "plt.plot(epochs, val_softmax_loss, 'b', label='val_softmax_loss')\n",
    "plt.title('validation triplet and softmax loss')\n",
    "plt.legend()\n",
    "\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.5"
  },
  "toc": {
   "base_numbering": 1,
   "nav_menu": {},
   "number_sections": true,
   "sideBar": true,
   "skip_h1_title": false,
   "title_cell": "Table of Contents",
   "title_sidebar": "Contents",
   "toc_cell": false,
   "toc_position": {},
   "toc_section_display": true,
   "toc_window_display": false
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
