{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import sys\n",
    "sys.path.append('../')\n",
    "\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "from matplotlib import pyplot as plt\n",
    "import cv2\n",
    "from tqdm import tqdm_notebook\n",
    "import pickle\n",
    "import os\n",
    "import logging\n",
    "import time\n",
    "from IPython.core.debugger import set_trace\n",
    "\n",
    "import torch\n",
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "\n",
    "from dataset.mask_functions import rle2mask, mask2rle\n",
    "from dataset.dataset import prepare_trainset\n",
    "from utils.utils import save_checkpoint, load_checkpoint, set_logger\n",
    "from utils.gpu_utils import set_n_get_device\n",
    "\n",
    "#from model.deeplab_model_kaggler.deeplab import DeepLab\n",
    "#from model.model_unet_classify_zero import UNetResNet34 as ZeroMaskClassifier\n",
    "#from model.CSAILVision.models.models import SegmentationModule\n",
    "#from model.model_unet import UNetResNet34\n",
    "#from model.model_plain_unet import UNetResNet34\n",
    "#from model.model_resnet_fpn import FPNResNet34\n",
    "from model.deeplab_model_kaggler.lr_scheduler import LR_Scheduler\n",
    "\n",
    "import segmentation_models_pytorch as smp\n",
    "\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "====MODEL ACHITECTURE: b3====\n"
     ]
    }
   ],
   "source": [
    "######### Config the training process #########\n",
    "#device = set_n_get_device(\"0, 1, 2, 3\", data_device_id=\"cuda:0\")#0, 1, 2, 3, IMPORTANT: data_device_id is set to free gpu for storing the model, e.g.\"cuda:1\"\n",
    "MODEL = 'b3'\n",
    "#AUX_LOGITS = True#False, only for 'INCEPTION_V3'\n",
    "print('====MODEL ACHITECTURE: %s===='%MODEL)\n",
    "\n",
    "device = set_n_get_device(\"2\", data_device_id=\"cuda:0\")#0, 1, 2, 3, IMPORTANT: data_device_id is set to free gpu for storing the model, e.g.\"cuda:1\"\n",
    "multi_gpu = None #None#[0, 1]#use 2 gpus\n",
    "\n",
    "SEED = 1234 #5678#4567#3456#2345#1234\n",
    "debug = True# if True, load 100 samples\n",
    "IMG_SIZE = (512, 768)\n",
    "BATCH_SIZE = 4\n",
    "NUM_WORKERS = 24\n",
    "torch.cuda.manual_seed_all(SEED)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## the dataset"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Count images in train/test folder:  5546 3698\n",
      "Count of trainset (for training):  942\n",
      "Count of validset (for training):  166\n"
     ]
    }
   ],
   "source": [
    "train_dl, val_dl = prepare_trainset(BATCH_SIZE, NUM_WORKERS, SEED, IMG_SIZE, debug)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "for i, (images, masks) in enumerate(train_dl):\n",
    "    images = images.to(device=device, dtype=torch.float)\n",
    "    masks = masks.to(device=device, dtype=torch.float)\n",
    "    #labels = (torch.sum(masks.reshape(masks.size()[0], -1), dim=1, keepdim=True)==0).to(device=device, dtype=torch.float) #1 for non-zero-mask\n",
    "    if i==0:\n",
    "        break"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(torch.Size([4, 1, 512, 768]), torch.Size([4, 4, 512, 768]))"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "images.size(), masks.size()#, labels.size()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## the model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Downloading: \"http://storage.googleapis.com/public-models/efficientnet/efficientnet-b3-5fb5a3c3.pth\" to /home/endi.niu/.cache/torch/checkpoints/efficientnet-b3-5fb5a3c3.pth\n",
      "100%|██████████| 49388949/49388949 [00:02<00:00, 19715146.27it/s]\n"
     ]
    }
   ],
   "source": [
    "# net = DeepLab(num_classes=4,\n",
    "#               backbone=MODEL,#resnet34, resnet101\n",
    "#               output_stride=16,#default 16, 8\n",
    "#               sync_bn=None,\n",
    "#               freeze_bn=False,\n",
    "#               debug=True\n",
    "#              ).cuda(device=device)\n",
    "\n",
    "#net = SegmentationModule(net_enc='resnet50', net_dec='upernet').cuda(device=device)\n",
    "#net = SegmentationModule(net_enc='hrnetv2', net_dec='c1').cuda(device=device)\n",
    "\n",
    "#net = UNetResNet34(debug=True).cuda(device=device)\n",
    "#net = FPNResNet34(debug=debug).cuda(device=device)\n",
    "\n",
    "#net = smp.PSPNet(encoder_name='resnet34', classes=4, activation=None)\n",
    "net = smp.Unet(encoder_name='efficientnet-b3', \n",
    "         encoder_weights='imagenet', \n",
    "         classes=4,\n",
    "         activation=None,\n",
    "         center=True)\n",
    "\n",
    "#checkpoint_path = 'checkpoint/UNetResNet34_512_v1_seed3456/best.pth.tar'\n",
    "#net, _ = load_checkpoint(checkpoint_path, net)\n",
    "\n",
    "if multi_gpu is not None:\n",
    "    net = nn.DataParallel(net, device_ids=multi_gpu)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "encoder._conv_stem.weight\n",
      "encoder._bn0.weight\n",
      "encoder._bn0.bias\n",
      "encoder._blocks.0._depthwise_conv.weight\n",
      "encoder._blocks.0._bn1.weight\n",
      "encoder._blocks.0._bn1.bias\n",
      "encoder._blocks.0._se_reduce.weight\n",
      "encoder._blocks.0._se_reduce.bias\n",
      "encoder._blocks.0._se_expand.weight\n",
      "encoder._blocks.0._se_expand.bias\n",
      "encoder._blocks.0._project_conv.weight\n",
      "encoder._blocks.0._bn2.weight\n",
      "encoder._blocks.0._bn2.bias\n",
      "encoder._blocks.1._depthwise_conv.weight\n",
      "encoder._blocks.1._bn1.weight\n",
      "encoder._blocks.1._bn1.bias\n",
      "encoder._blocks.1._se_reduce.weight\n",
      "encoder._blocks.1._se_reduce.bias\n",
      "encoder._blocks.1._se_expand.weight\n",
      "encoder._blocks.1._se_expand.bias\n",
      "encoder._blocks.1._project_conv.weight\n",
      "encoder._blocks.1._bn2.weight\n",
      "encoder._blocks.1._bn2.bias\n",
      "encoder._blocks.2._expand_conv.weight\n",
      "encoder._blocks.2._bn0.weight\n",
      "encoder._blocks.2._bn0.bias\n",
      "encoder._blocks.2._depthwise_conv.weight\n",
      "encoder._blocks.2._bn1.weight\n",
      "encoder._blocks.2._bn1.bias\n",
      "encoder._blocks.2._se_reduce.weight\n",
      "encoder._blocks.2._se_reduce.bias\n",
      "encoder._blocks.2._se_expand.weight\n",
      "encoder._blocks.2._se_expand.bias\n",
      "encoder._blocks.2._project_conv.weight\n",
      "encoder._blocks.2._bn2.weight\n",
      "encoder._blocks.2._bn2.bias\n",
      "encoder._blocks.3._expand_conv.weight\n",
      "encoder._blocks.3._bn0.weight\n",
      "encoder._blocks.3._bn0.bias\n",
      "encoder._blocks.3._depthwise_conv.weight\n",
      "encoder._blocks.3._bn1.weight\n",
      "encoder._blocks.3._bn1.bias\n",
      "encoder._blocks.3._se_reduce.weight\n",
      "encoder._blocks.3._se_reduce.bias\n",
      "encoder._blocks.3._se_expand.weight\n",
      "encoder._blocks.3._se_expand.bias\n",
      "encoder._blocks.3._project_conv.weight\n",
      "encoder._blocks.3._bn2.weight\n",
      "encoder._blocks.3._bn2.bias\n",
      "encoder._blocks.4._expand_conv.weight\n",
      "encoder._blocks.4._bn0.weight\n",
      "encoder._blocks.4._bn0.bias\n",
      "encoder._blocks.4._depthwise_conv.weight\n",
      "encoder._blocks.4._bn1.weight\n",
      "encoder._blocks.4._bn1.bias\n",
      "encoder._blocks.4._se_reduce.weight\n",
      "encoder._blocks.4._se_reduce.bias\n",
      "encoder._blocks.4._se_expand.weight\n",
      "encoder._blocks.4._se_expand.bias\n",
      "encoder._blocks.4._project_conv.weight\n",
      "encoder._blocks.4._bn2.weight\n",
      "encoder._blocks.4._bn2.bias\n",
      "encoder._blocks.5._expand_conv.weight\n",
      "encoder._blocks.5._bn0.weight\n",
      "encoder._blocks.5._bn0.bias\n",
      "encoder._blocks.5._depthwise_conv.weight\n",
      "encoder._blocks.5._bn1.weight\n",
      "encoder._blocks.5._bn1.bias\n",
      "encoder._blocks.5._se_reduce.weight\n",
      "encoder._blocks.5._se_reduce.bias\n",
      "encoder._blocks.5._se_expand.weight\n",
      "encoder._blocks.5._se_expand.bias\n",
      "encoder._blocks.5._project_conv.weight\n",
      "encoder._blocks.5._bn2.weight\n",
      "encoder._blocks.5._bn2.bias\n",
      "encoder._blocks.6._expand_conv.weight\n",
      "encoder._blocks.6._bn0.weight\n",
      "encoder._blocks.6._bn0.bias\n",
      "encoder._blocks.6._depthwise_conv.weight\n",
      "encoder._blocks.6._bn1.weight\n",
      "encoder._blocks.6._bn1.bias\n",
      "encoder._blocks.6._se_reduce.weight\n",
      "encoder._blocks.6._se_reduce.bias\n",
      "encoder._blocks.6._se_expand.weight\n",
      "encoder._blocks.6._se_expand.bias\n",
      "encoder._blocks.6._project_conv.weight\n",
      "encoder._blocks.6._bn2.weight\n",
      "encoder._blocks.6._bn2.bias\n",
      "encoder._blocks.7._expand_conv.weight\n",
      "encoder._blocks.7._bn0.weight\n",
      "encoder._blocks.7._bn0.bias\n",
      "encoder._blocks.7._depthwise_conv.weight\n",
      "encoder._blocks.7._bn1.weight\n",
      "encoder._blocks.7._bn1.bias\n",
      "encoder._blocks.7._se_reduce.weight\n",
      "encoder._blocks.7._se_reduce.bias\n",
      "encoder._blocks.7._se_expand.weight\n",
      "encoder._blocks.7._se_expand.bias\n",
      "encoder._blocks.7._project_conv.weight\n",
      "encoder._blocks.7._bn2.weight\n",
      "encoder._blocks.7._bn2.bias\n",
      "encoder._blocks.8._expand_conv.weight\n",
      "encoder._blocks.8._bn0.weight\n",
      "encoder._blocks.8._bn0.bias\n",
      "encoder._blocks.8._depthwise_conv.weight\n",
      "encoder._blocks.8._bn1.weight\n",
      "encoder._blocks.8._bn1.bias\n",
      "encoder._blocks.8._se_reduce.weight\n",
      "encoder._blocks.8._se_reduce.bias\n",
      "encoder._blocks.8._se_expand.weight\n",
      "encoder._blocks.8._se_expand.bias\n",
      "encoder._blocks.8._project_conv.weight\n",
      "encoder._blocks.8._bn2.weight\n",
      "encoder._blocks.8._bn2.bias\n",
      "encoder._blocks.9._expand_conv.weight\n",
      "encoder._blocks.9._bn0.weight\n",
      "encoder._blocks.9._bn0.bias\n",
      "encoder._blocks.9._depthwise_conv.weight\n",
      "encoder._blocks.9._bn1.weight\n",
      "encoder._blocks.9._bn1.bias\n",
      "encoder._blocks.9._se_reduce.weight\n",
      "encoder._blocks.9._se_reduce.bias\n",
      "encoder._blocks.9._se_expand.weight\n",
      "encoder._blocks.9._se_expand.bias\n",
      "encoder._blocks.9._project_conv.weight\n",
      "encoder._blocks.9._bn2.weight\n",
      "encoder._blocks.9._bn2.bias\n",
      "encoder._blocks.10._expand_conv.weight\n",
      "encoder._blocks.10._bn0.weight\n",
      "encoder._blocks.10._bn0.bias\n",
      "encoder._blocks.10._depthwise_conv.weight\n",
      "encoder._blocks.10._bn1.weight\n",
      "encoder._blocks.10._bn1.bias\n",
      "encoder._blocks.10._se_reduce.weight\n",
      "encoder._blocks.10._se_reduce.bias\n",
      "encoder._blocks.10._se_expand.weight\n",
      "encoder._blocks.10._se_expand.bias\n",
      "encoder._blocks.10._project_conv.weight\n",
      "encoder._blocks.10._bn2.weight\n",
      "encoder._blocks.10._bn2.bias\n",
      "encoder._blocks.11._expand_conv.weight\n",
      "encoder._blocks.11._bn0.weight\n",
      "encoder._blocks.11._bn0.bias\n",
      "encoder._blocks.11._depthwise_conv.weight\n",
      "encoder._blocks.11._bn1.weight\n",
      "encoder._blocks.11._bn1.bias\n",
      "encoder._blocks.11._se_reduce.weight\n",
      "encoder._blocks.11._se_reduce.bias\n",
      "encoder._blocks.11._se_expand.weight\n",
      "encoder._blocks.11._se_expand.bias\n",
      "encoder._blocks.11._project_conv.weight\n",
      "encoder._blocks.11._bn2.weight\n",
      "encoder._blocks.11._bn2.bias\n",
      "encoder._blocks.12._expand_conv.weight\n",
      "encoder._blocks.12._bn0.weight\n",
      "encoder._blocks.12._bn0.bias\n",
      "encoder._blocks.12._depthwise_conv.weight\n",
      "encoder._blocks.12._bn1.weight\n",
      "encoder._blocks.12._bn1.bias\n",
      "encoder._blocks.12._se_reduce.weight\n",
      "encoder._blocks.12._se_reduce.bias\n",
      "encoder._blocks.12._se_expand.weight\n",
      "encoder._blocks.12._se_expand.bias\n",
      "encoder._blocks.12._project_conv.weight\n",
      "encoder._blocks.12._bn2.weight\n",
      "encoder._blocks.12._bn2.bias\n",
      "encoder._blocks.13._expand_conv.weight\n",
      "encoder._blocks.13._bn0.weight\n",
      "encoder._blocks.13._bn0.bias\n",
      "encoder._blocks.13._depthwise_conv.weight\n",
      "encoder._blocks.13._bn1.weight\n",
      "encoder._blocks.13._bn1.bias\n",
      "encoder._blocks.13._se_reduce.weight\n",
      "encoder._blocks.13._se_reduce.bias\n",
      "encoder._blocks.13._se_expand.weight\n",
      "encoder._blocks.13._se_expand.bias\n",
      "encoder._blocks.13._project_conv.weight\n",
      "encoder._blocks.13._bn2.weight\n",
      "encoder._blocks.13._bn2.bias\n",
      "encoder._blocks.14._expand_conv.weight\n",
      "encoder._blocks.14._bn0.weight\n",
      "encoder._blocks.14._bn0.bias\n",
      "encoder._blocks.14._depthwise_conv.weight\n",
      "encoder._blocks.14._bn1.weight\n",
      "encoder._blocks.14._bn1.bias\n",
      "encoder._blocks.14._se_reduce.weight\n",
      "encoder._blocks.14._se_reduce.bias\n",
      "encoder._blocks.14._se_expand.weight\n",
      "encoder._blocks.14._se_expand.bias\n",
      "encoder._blocks.14._project_conv.weight\n",
      "encoder._blocks.14._bn2.weight\n",
      "encoder._blocks.14._bn2.bias\n",
      "encoder._blocks.15._expand_conv.weight\n",
      "encoder._blocks.15._bn0.weight\n",
      "encoder._blocks.15._bn0.bias\n",
      "encoder._blocks.15._depthwise_conv.weight\n",
      "encoder._blocks.15._bn1.weight\n",
      "encoder._blocks.15._bn1.bias\n",
      "encoder._blocks.15._se_reduce.weight\n",
      "encoder._blocks.15._se_reduce.bias\n",
      "encoder._blocks.15._se_expand.weight\n",
      "encoder._blocks.15._se_expand.bias\n",
      "encoder._blocks.15._project_conv.weight\n",
      "encoder._blocks.15._bn2.weight\n",
      "encoder._blocks.15._bn2.bias\n",
      "encoder._blocks.16._expand_conv.weight\n",
      "encoder._blocks.16._bn0.weight\n",
      "encoder._blocks.16._bn0.bias\n",
      "encoder._blocks.16._depthwise_conv.weight\n",
      "encoder._blocks.16._bn1.weight\n",
      "encoder._blocks.16._bn1.bias\n",
      "encoder._blocks.16._se_reduce.weight\n",
      "encoder._blocks.16._se_reduce.bias\n",
      "encoder._blocks.16._se_expand.weight\n",
      "encoder._blocks.16._se_expand.bias\n",
      "encoder._blocks.16._project_conv.weight\n",
      "encoder._blocks.16._bn2.weight\n",
      "encoder._blocks.16._bn2.bias\n",
      "encoder._blocks.17._expand_conv.weight\n",
      "encoder._blocks.17._bn0.weight\n",
      "encoder._blocks.17._bn0.bias\n",
      "encoder._blocks.17._depthwise_conv.weight\n",
      "encoder._blocks.17._bn1.weight\n",
      "encoder._blocks.17._bn1.bias\n",
      "encoder._blocks.17._se_reduce.weight\n",
      "encoder._blocks.17._se_reduce.bias\n",
      "encoder._blocks.17._se_expand.weight\n",
      "encoder._blocks.17._se_expand.bias\n",
      "encoder._blocks.17._project_conv.weight\n",
      "encoder._blocks.17._bn2.weight\n",
      "encoder._blocks.17._bn2.bias\n",
      "encoder._blocks.18._expand_conv.weight\n",
      "encoder._blocks.18._bn0.weight\n",
      "encoder._blocks.18._bn0.bias\n",
      "encoder._blocks.18._depthwise_conv.weight\n",
      "encoder._blocks.18._bn1.weight\n",
      "encoder._blocks.18._bn1.bias\n",
      "encoder._blocks.18._se_reduce.weight\n",
      "encoder._blocks.18._se_reduce.bias\n",
      "encoder._blocks.18._se_expand.weight\n",
      "encoder._blocks.18._se_expand.bias\n",
      "encoder._blocks.18._project_conv.weight\n",
      "encoder._blocks.18._bn2.weight\n",
      "encoder._blocks.18._bn2.bias\n",
      "encoder._blocks.19._expand_conv.weight\n",
      "encoder._blocks.19._bn0.weight\n",
      "encoder._blocks.19._bn0.bias\n",
      "encoder._blocks.19._depthwise_conv.weight\n",
      "encoder._blocks.19._bn1.weight\n",
      "encoder._blocks.19._bn1.bias\n",
      "encoder._blocks.19._se_reduce.weight\n",
      "encoder._blocks.19._se_reduce.bias\n",
      "encoder._blocks.19._se_expand.weight\n",
      "encoder._blocks.19._se_expand.bias\n",
      "encoder._blocks.19._project_conv.weight\n",
      "encoder._blocks.19._bn2.weight\n",
      "encoder._blocks.19._bn2.bias\n",
      "encoder._blocks.20._expand_conv.weight\n",
      "encoder._blocks.20._bn0.weight\n",
      "encoder._blocks.20._bn0.bias\n",
      "encoder._blocks.20._depthwise_conv.weight\n",
      "encoder._blocks.20._bn1.weight\n",
      "encoder._blocks.20._bn1.bias\n",
      "encoder._blocks.20._se_reduce.weight\n",
      "encoder._blocks.20._se_reduce.bias\n",
      "encoder._blocks.20._se_expand.weight\n",
      "encoder._blocks.20._se_expand.bias\n",
      "encoder._blocks.20._project_conv.weight\n",
      "encoder._blocks.20._bn2.weight\n",
      "encoder._blocks.20._bn2.bias\n",
      "encoder._blocks.21._expand_conv.weight\n",
      "encoder._blocks.21._bn0.weight\n",
      "encoder._blocks.21._bn0.bias\n",
      "encoder._blocks.21._depthwise_conv.weight\n",
      "encoder._blocks.21._bn1.weight\n",
      "encoder._blocks.21._bn1.bias\n",
      "encoder._blocks.21._se_reduce.weight\n",
      "encoder._blocks.21._se_reduce.bias\n",
      "encoder._blocks.21._se_expand.weight\n",
      "encoder._blocks.21._se_expand.bias\n",
      "encoder._blocks.21._project_conv.weight\n",
      "encoder._blocks.21._bn2.weight\n",
      "encoder._blocks.21._bn2.bias\n",
      "encoder._blocks.22._expand_conv.weight\n",
      "encoder._blocks.22._bn0.weight\n",
      "encoder._blocks.22._bn0.bias\n",
      "encoder._blocks.22._depthwise_conv.weight\n",
      "encoder._blocks.22._bn1.weight\n",
      "encoder._blocks.22._bn1.bias\n",
      "encoder._blocks.22._se_reduce.weight\n",
      "encoder._blocks.22._se_reduce.bias\n",
      "encoder._blocks.22._se_expand.weight\n",
      "encoder._blocks.22._se_expand.bias\n",
      "encoder._blocks.22._project_conv.weight\n",
      "encoder._blocks.22._bn2.weight\n",
      "encoder._blocks.22._bn2.bias\n",
      "encoder._blocks.23._expand_conv.weight\n",
      "encoder._blocks.23._bn0.weight\n",
      "encoder._blocks.23._bn0.bias\n",
      "encoder._blocks.23._depthwise_conv.weight\n",
      "encoder._blocks.23._bn1.weight\n",
      "encoder._blocks.23._bn1.bias\n",
      "encoder._blocks.23._se_reduce.weight\n",
      "encoder._blocks.23._se_reduce.bias\n",
      "encoder._blocks.23._se_expand.weight\n",
      "encoder._blocks.23._se_expand.bias\n",
      "encoder._blocks.23._project_conv.weight\n",
      "encoder._blocks.23._bn2.weight\n",
      "encoder._blocks.23._bn2.bias\n",
      "encoder._blocks.24._expand_conv.weight\n",
      "encoder._blocks.24._bn0.weight\n",
      "encoder._blocks.24._bn0.bias\n",
      "encoder._blocks.24._depthwise_conv.weight\n",
      "encoder._blocks.24._bn1.weight\n",
      "encoder._blocks.24._bn1.bias\n",
      "encoder._blocks.24._se_reduce.weight\n",
      "encoder._blocks.24._se_reduce.bias\n",
      "encoder._blocks.24._se_expand.weight\n",
      "encoder._blocks.24._se_expand.bias\n",
      "encoder._blocks.24._project_conv.weight\n",
      "encoder._blocks.24._bn2.weight\n",
      "encoder._blocks.24._bn2.bias\n",
      "encoder._blocks.25._expand_conv.weight\n",
      "encoder._blocks.25._bn0.weight\n",
      "encoder._blocks.25._bn0.bias\n",
      "encoder._blocks.25._depthwise_conv.weight\n",
      "encoder._blocks.25._bn1.weight\n",
      "encoder._blocks.25._bn1.bias\n",
      "encoder._blocks.25._se_reduce.weight\n",
      "encoder._blocks.25._se_reduce.bias\n",
      "encoder._blocks.25._se_expand.weight\n",
      "encoder._blocks.25._se_expand.bias\n",
      "encoder._blocks.25._project_conv.weight\n",
      "encoder._blocks.25._bn2.weight\n",
      "encoder._blocks.25._bn2.bias\n",
      "encoder._conv_head.weight\n",
      "encoder._bn1.weight\n",
      "encoder._bn1.bias\n",
      "decoder.center.block.0.block.0.weight\n",
      "decoder.center.block.0.block.1.weight\n",
      "decoder.center.block.0.block.1.bias\n",
      "decoder.center.block.1.block.0.weight\n",
      "decoder.center.block.1.block.1.weight\n",
      "decoder.center.block.1.block.1.bias\n",
      "decoder.layer1.block.0.block.0.weight\n",
      "decoder.layer1.block.0.block.1.weight\n",
      "decoder.layer1.block.0.block.1.bias\n",
      "decoder.layer1.block.1.block.0.weight\n",
      "decoder.layer1.block.1.block.1.weight\n",
      "decoder.layer1.block.1.block.1.bias\n",
      "decoder.layer2.block.0.block.0.weight\n",
      "decoder.layer2.block.0.block.1.weight\n",
      "decoder.layer2.block.0.block.1.bias\n",
      "decoder.layer2.block.1.block.0.weight\n",
      "decoder.layer2.block.1.block.1.weight\n",
      "decoder.layer2.block.1.block.1.bias\n",
      "decoder.layer3.block.0.block.0.weight\n",
      "decoder.layer3.block.0.block.1.weight\n",
      "decoder.layer3.block.0.block.1.bias\n",
      "decoder.layer3.block.1.block.0.weight\n",
      "decoder.layer3.block.1.block.1.weight\n",
      "decoder.layer3.block.1.block.1.bias\n",
      "decoder.layer4.block.0.block.0.weight\n",
      "decoder.layer4.block.0.block.1.weight\n",
      "decoder.layer4.block.0.block.1.bias\n",
      "decoder.layer4.block.1.block.0.weight\n",
      "decoder.layer4.block.1.block.1.weight\n",
      "decoder.layer4.block.1.block.1.bias\n",
      "decoder.layer5.block.0.block.0.weight\n",
      "decoder.layer5.block.0.block.1.weight\n",
      "decoder.layer5.block.0.block.1.bias\n",
      "decoder.layer5.block.1.block.0.weight\n",
      "decoder.layer5.block.1.block.1.weight\n",
      "decoder.layer5.block.1.block.1.bias\n",
      "decoder.final_conv.weight\n",
      "decoder.final_conv.bias\n"
     ]
    }
   ],
   "source": [
    "for param in net.named_parameters():\n",
    "    print(param[0])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "-1\n",
      "-2\n",
      "-2\n",
      "-2\n",
      "-3\n",
      "-4\n",
      "-4\n",
      "-4\n",
      "-5\n",
      "-5\n",
      "-6\n",
      "-6\n",
      "-7\n",
      "-7\n",
      "-7\n",
      "-8\n",
      "-9\n",
      "-9\n",
      "-9\n",
      "-10\n",
      "-10\n",
      "-10\n",
      "-10\n",
      "-11\n",
      "-12\n",
      "-13\n",
      "-14\n",
      "-15\n",
      "-15\n",
      "-15\n",
      "-15\n",
      "-15\n",
      "-15\n",
      "-15\n",
      "-16\n",
      "-17\n",
      "-17\n",
      "-17\n",
      "-18\n",
      "-18\n"
     ]
    }
   ],
   "source": [
    "LearningRate = 0.01\n",
    "NUM_EPOCHS = 40\n",
    "enc_params = [p[1] for p in net.named_parameters() if ('resnet' in p[0] or 'encoder' in p[0])]\n",
    "other_params = [p[1] for p in net.named_parameters() if ('resnet' not in p[0] and 'encoder' not in p[0])]\n",
    "train_params = [{'params': enc_params, 'lr': LearningRate},\n",
    "                {'params': other_params, 'lr': LearningRate * 10}]\n",
    "\n",
    "optimizer = torch.optim.SGD(train_params, momentum=0.9, weight_decay=0.0001, lr=LearningRate)\n",
    "#scheduler = LR_Scheduler('poly', LearningRate, NUM_EPOCHS, len(train_dl))#lr_scheduler=['poly', 'step', 'cos']\n",
    "scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='max', \n",
    "                                                      factor=0.5, patience=4,#4 resnet34 \n",
    "                                                      verbose=False, threshold=0.0001, \n",
    "                                                      threshold_mode='rel', cooldown=0, \n",
    "                                                      min_lr=0, eps=1e-08)\n",
    "\n",
    "val_metric = 0\n",
    "for i_epoch in range(NUM_EPOCHS):\n",
    "    #for i, (image, masks) in enumerate(train_dl):\n",
    "    if np.random.randn(1)>0:\n",
    "        val_metric -= 1\n",
    "    print(val_metric)\n",
    "    scheduler.step(val_metric)\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "ename": "RuntimeError",
     "evalue": "Given groups=1, weight of size 64 3 7 7, expected input[4, 1, 512, 768] to have 3 channels, but got 1 channels instead",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mRuntimeError\u001b[0m                              Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-9-44e115016fb1>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mlogit\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnet\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mimages\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
      "\u001b[0;32m~/anaconda3/lib/python3.7/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m    491\u001b[0m             \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    492\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 493\u001b[0;31m             \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    494\u001b[0m         \u001b[0;32mfor\u001b[0m \u001b[0mhook\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    495\u001b[0m             \u001b[0mhook_result\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mhook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/lib/python3.7/site-packages/segmentation_models_pytorch/base/encoder_decoder.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m     22\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     23\u001b[0m         \u001b[0;34m\"\"\"Sequentially pass `x` trough model`s `encoder` and `decoder` (return logits!)\"\"\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 24\u001b[0;31m         \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mencoder\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     25\u001b[0m         \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdecoder\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     26\u001b[0m         \u001b[0;32mreturn\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/lib/python3.7/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m    491\u001b[0m             \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    492\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 493\u001b[0;31m             \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    494\u001b[0m         \u001b[0;32mfor\u001b[0m \u001b[0mhook\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    495\u001b[0m             \u001b[0mhook_result\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mhook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/lib/python3.7/site-packages/segmentation_models_pytorch/encoders/resnet.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m     13\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     14\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 15\u001b[0;31m         \u001b[0mx0\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconv1\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     16\u001b[0m         \u001b[0mx0\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbn1\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     17\u001b[0m         \u001b[0mx0\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrelu\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/lib/python3.7/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m    491\u001b[0m             \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_slow_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    492\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 493\u001b[0;31m             \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    494\u001b[0m         \u001b[0;32mfor\u001b[0m \u001b[0mhook\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_forward_hooks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    495\u001b[0m             \u001b[0mhook_result\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mhook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/lib/python3.7/site-packages/torch/nn/modules/conv.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m    336\u001b[0m                             _pair(0), self.dilation, self.groups)\n\u001b[1;32m    337\u001b[0m         return F.conv2d(input, self.weight, self.bias, self.stride,\n\u001b[0;32m--> 338\u001b[0;31m                         self.padding, self.dilation, self.groups)\n\u001b[0m\u001b[1;32m    339\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    340\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mRuntimeError\u001b[0m: Given groups=1, weight of size 64 3 7 7, expected input[4, 1, 512, 768] to have 3 channels, but got 1 channels instead"
     ]
    }
   ],
   "source": [
    "logit = net(images)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(torch.Size([4, 4, 512, 768]), torch.Size([4, 4, 512, 768]))"
      ]
     },
     "execution_count": 13,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "logit.size(), masks.size()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor(0.7799, device='cuda:0', grad_fn=<AddBackward0>)"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "_loss = net.criterion(logit, masks, nonempty_only=False)\n",
    "_loss"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor(0.0789, device='cuda:0')"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "_metric = net.metric(logit, masks, nonempty_only=False)\n",
    "_metric"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## predict the validset, and analyse"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "#move checkpoint from gamma machine to here\n",
    "cd checkpoint\n",
    "scp -r endi.niu@10.171.36.214:/home/endi.niu/SIIM/checkpoint/deeplabv3plus_resnet_1280_v2_seed2345/ deeplabv3plus_resnet_1280_v2_seed2345\n",
    "cd logging\n",
    "scp -r endi.niu@10.171.36.214:/home/endi.niu/SIIM/logging/deeplabv3plus_resnet_1280_v2_seed2345.log deeplabv3plus_resnet_1280_v2_seed2345.log\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import sys\n",
    "sys.path.append('../')\n",
    "\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import math\n",
    "from matplotlib import pyplot as plt\n",
    "from tqdm import tqdm, tqdm_notebook\n",
    "import pickle\n",
    "import os\n",
    "import logging\n",
    "import time\n",
    "import gc\n",
    "from IPython.core.debugger import set_trace\n",
    "import cv2\n",
    "import copy\n",
    "\n",
    "import torch\n",
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "\n",
    "from utils.utils import save_checkpoint, load_checkpoint, set_logger\n",
    "from utils.gpu_utils import set_n_get_device\n",
    "\n",
    "from dataset.dataset import prepare_trainset\n",
    "\n",
    "\n",
    "use_model = 0\n",
    "if use_model==0:\n",
    "    from model.deeplab_model_kaggler.deeplab import DeepLab, predict_proba\n",
    "elif use_model==1:\n",
    "    from model.EfficientUnet.efficient_unet import Efficient_Unet, predict_proba\n",
    "elif use_model==2:\n",
    "    from model.CSAILVision.models.models import SegmentationModule, predict_proba\n",
    "elif use_model==3:\n",
    "    from model.model_unet import UNetResNet34, predict_proba\n",
    "elif use_model==4:#FPNResNet34\n",
    "    from model.model_resnet_fpn import FPNResNet34, predict_proba\n",
    "elif use_model==5:\n",
    "    from model.model_plain_unet import UNetResNet34, predict_proba\n",
    "elif use_model==6:\n",
    "    from model.model_pspnet import PSPNet, predict_proba\n",
    "\n",
    "#from model.model_unet_classify_zero import UNetResNet34 as NonzeroClf\n",
    "#from model.model_unet_classify_zero import predict_proba as predict_proba_nonzero\n",
    "\n",
    "def sigmoid(x):\n",
    "    return 1 / (1 + np.exp(-x))\n",
    "\n",
    "def inverse_sigmoid(x):\n",
    "    return np.log(x / (1-x))\n",
    "\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "====MODEL ACHITECTURE: resnet====\n"
     ]
    }
   ],
   "source": [
    "######### Config the training process #########\n",
    "\n",
    "if use_model==0:\n",
    "    MODEL = 'resnet'\n",
    "elif use_model==1:\n",
    "    MODEL = 'b5'\n",
    "elif use_model==2:\n",
    "    MODEL = ''\n",
    "elif use_model==3:\n",
    "    MODEL = 'UNetResNet34'\n",
    "elif use_model==4:\n",
    "    MODEL = 'resnet34'\n",
    "elif use_model==5:\n",
    "    MODEL = 'resnet34'\n",
    "elif use_model==6:\n",
    "    MODEL = 'resnet34'\n",
    "else:\n",
    "    MODEL = ''\n",
    "print('====MODEL ACHITECTURE: %s===='%MODEL)\n",
    "\n",
    "device = set_n_get_device(\"0,1,2,3\", data_device_id=\"cuda:0\")#0, 1, 2, 3, IMPORTANT: data_device_id is set to free gpu for storing the model, e.g.\"cuda:1\"\n",
    "multi_gpu = [0,1,2,3] #None#[0, 1]#use 2 gpus\n",
    "\n",
    "SEED = 2110\n",
    "debug = False# if True, load 100 samples\n",
    "if use_model==0:\n",
    "    IMG_SIZE = (512, 768) #(1024, 1536) #(768, 1152) #(512, 768)\n",
    "    output_shape = None\n",
    "elif use_model==2:\n",
    "    IMG_SIZE = (512, 768)\n",
    "    output_shape = None\n",
    "elif use_model==4:\n",
    "    IMG_SIZE = (512, 768)#(1024, 1536)\n",
    "    output_shape = None#(512, 768)\n",
    "elif use_model==5:\n",
    "    IMG_SIZE = (512, 768)\n",
    "    output_shape = None\n",
    "elif use_model==6:\n",
    "    IMG_SIZE = (512, 768)\n",
    "    output_shape = None\n",
    "BATCH_SIZE = 32#64\n",
    "NUM_WORKERS = 24\n",
    "torch.cuda.manual_seed_all(SEED)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Count images in train/test folder:  5546 3698\n",
      "Count of trainset (for training):  4714\n",
      "Count of validset (for training):  832\n"
     ]
    }
   ],
   "source": [
    "train_dl, val_dl = prepare_trainset(BATCH_SIZE, NUM_WORKERS, SEED, IMG_SIZE, debug, \n",
    "                                    nonempty_only=False, crop=False, output_shape=output_shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(832, 4, 512, 768)"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# y should be makeup\n",
    "y_valid = []\n",
    "for i, (image, masks) in enumerate(val_dl):\n",
    "    #if i==10:\n",
    "    #    break\n",
    "    #truth = masks.to(device=device, dtype=torch.float)\n",
    "    y_valid.append(masks.numpy())\n",
    "y_valid = np.concatenate(y_valid, axis=0)\n",
    "y_valid.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "if use_model==0:\n",
    "    net = DeepLab(num_classes=4,\n",
    "                  backbone=MODEL,#resnet34, resnet101\n",
    "                  output_stride=16,#default 16, 8\n",
    "                  sync_bn=None,\n",
    "                  freeze_bn=False,\n",
    "                  debug=False, \n",
    "                  clf_path=True\n",
    "                 ).cuda(device=device)\n",
    "elif use_model==1:\n",
    "    net = Efficient_Unet(num_class=4, drop_connect_rate=0.2).cuda(device=device)\n",
    "elif use_model==2:\n",
    "    #net = SegmentationModule(net_enc='resnet50', net_dec='upernet').cuda(device=device)\n",
    "    net = SegmentationModule(net_enc='hrnetv2', net_dec='c1').cuda(device=device)\n",
    "elif use_model==3:\n",
    "    net = UNetResNet34(debug=False).cuda(device=device)\n",
    "elif use_model==4:\n",
    "    net = FPNResNet34(debug=False).cuda(device=device)\n",
    "elif use_model==5:\n",
    "    net = UNetResNet34(debug=False).cuda(device=device)\n",
    "\n",
    "#checkpoint_path = '../checkpoint/deeplabv3plus_resnet_512x768_v6_seed2001/best.pth.tar'\n",
    "#checkpoint_path = '../checkpoint/deeplabv3plus_resnet_400x600_v7_seed2013/best.pth.tar'\n",
    "#checkpoint_path = '../checkpoint/deeplabv3plus_resnet_768x1152_v7_seed2016/best.pth.tar'\n",
    "#checkpoint_path = '../checkpoint/deeplabv3plus_resnet_512x768_v12_seed2041/best.pth.tar'\n",
    "#checkpoint_path = '../checkpoint/UNetResNet34_512x768_v3_seed2034/best.pth.tar'\n",
    "#checkpoint_path = '../checkpoint/fpn_resnet34_512x768_v1_seed2034/best.pth.tar'\n",
    "#checkpoint_path = '../checkpoint/fpn_resnet34_768x1152_v1_seed2034/best.pth.tar'\n",
    "\n",
    "#checkpoint_path = '../checkpoint/fpn_resnet34_1024x1536_v1_seed2042/best.pth.tar'\n",
    "#checkpoint_path = '../checkpoint/deeplabv3plus_resnet_512x768_v13_seed2046/best.pth.tar'\n",
    "#checkpoint_path = '../checkpoint/unet_resnet34_512x768_v1_seed2061/best.pth.tar'\n",
    "checkpoint_path = '../checkpoint/CSAILVision_hrnetv2_c1_512x768_v3_seed2110/best.pth.tar'\n",
    "\n",
    "net, _ = load_checkpoint(checkpoint_path, net)\n",
    "\n",
    "if multi_gpu is not None:\n",
    "    net = nn.DataParallel(net, device_ids=multi_gpu)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [],
   "source": [
    "# ## tta: flip lr + ud\n",
    "# def predict_proba(net, test_dl, device, multi_gpu=False, mode='test', tta=True):\n",
    "#     if tta:\n",
    "#         print(\"use TTA\")\n",
    "#     else:\n",
    "#         print(\"not use TTA\")\n",
    "#     y_pred = None\n",
    "#     if multi_gpu:\n",
    "#         net.module.set_mode('test')\n",
    "#     else:\n",
    "#         net.set_mode('test')\n",
    "#     with torch.no_grad():\n",
    "#         if mode=='valid':\n",
    "#             for i, (image, masks) in enumerate(test_dl):\n",
    "#                 input_data = image.to(device=device, dtype=torch.float)\n",
    "#                 logit = net(input_data).cpu().numpy()\n",
    "#                 if tta:#horizontal/vertical flip\n",
    "#                     input_data_flip_lr = torch.flip(image, [3]).to(device=device, dtype=torch.float)\n",
    "#                     logit_flip_lr = net(input_data_flip_lr).cpu().numpy()[:,:,:,::-1]\n",
    "#                     input_data_flip_ud = torch.flip(image, [2]).to(device=device, dtype=torch.float)\n",
    "#                     logit_flip_ud = net(input_data_flip_ud).cpu().numpy()[:,:,::-1,:]                    \n",
    "#                     logit = (logit + logit_flip_lr + logit_flip_ud) / 3\n",
    "#                 if y_pred is None:\n",
    "#                     y_pred = logit\n",
    "#                 else:\n",
    "#                     y_pred = np.concatenate([y_pred, logit], axis=0)\n",
    "#         elif mode=='test':\n",
    "#             for i, image in enumerate(test_dl):\n",
    "#                 input_data = image.to(device=device, dtype=torch.float)\n",
    "#                 logit = net(input_data).cpu().numpy()\n",
    "#                 if tta:#horizontal/vertical flip\n",
    "#                     input_data_flip_lr = torch.flip(image, [3]).to(device=device, dtype=torch.float)\n",
    "#                     logit_flip_lr = net(input_data_flip_lr).cpu().numpy()[:,:,:,::-1]\n",
    "#                     input_data_flip_ud = torch.flip(image, [2]).to(device=device, dtype=torch.float)\n",
    "#                     logit_flip_ud = net(input_data_flip_ud).cpu().numpy()[:,:,::-1,:]                    \n",
    "#                     logit = (logit + logit_flip_lr + logit_flip_ud) / 3\n",
    "#                 if y_pred is None:\n",
    "#                     y_pred = logit\n",
    "#                 else:\n",
    "#                     y_pred = np.concatenate([y_pred, logit], axis=0)\n",
    "#     h,w = y_pred.shape[2], y_pred.shape[3]\n",
    "#     return y_pred.reshape(-1, 4, h, w)#Nx4x256x1600"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "use TTA\n",
      "CPU times: user 4min 54s, sys: 41.3 s, total: 5min 35s\n",
      "Wall time: 56 s\n"
     ]
    }
   ],
   "source": [
    "%%time\n",
    "preds_valid, preds_valid_clf = predict_proba(net, val_dl, device, multi_gpu=multi_gpu, mode='valid', tta=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "((832, 4, 512, 768), (832, 4, 512, 768), (832, 4))"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "y_valid.shape, preds_valid.shape, preds_valid_clf.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "# ## NonzeroClf predict 4-ch-empty and delete mask\n",
    "# net_clf = NonzeroClf(debug=False).cuda(device=device)\n",
    "# checkpoint_path = '../checkpoint/nonzero_classifier_UNetResNet34_256x1600_v2_seed1234/best.pth.tar'\n",
    "# net_clf, _ = load_checkpoint(checkpoint_path, net_clf)\n",
    "\n",
    "# if multi_gpu is not None:\n",
    "#     net_clf = nn.DataParallel(net_clf, device_ids=multi_gpu)\n",
    "\n",
    "# preds_valid_clf = predict_proba_nonzero(net_clf, val_dl, device, multi_gpu=multi_gpu, mode='valid', tta=True)\n",
    "# preds_valid_clf.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "def dice_overall(pred, truth, eps=1e-8):\n",
    "#     if nonempty_only:\n",
    "#         n, c = truth_mask.shape[0], truth_mask.shape[1]\n",
    "#         pred_mask = pred_mask.reshape(n*c, -1)\n",
    "#         truth_mask = truth_mask.reshape(n*c, -1)\n",
    "#         indexing = truth_mask.sum(axis=1)>0\n",
    "#         pred_mask = pred_mask[indexing]\n",
    "#         truth_mask = truth_mask[indexing]\n",
    "#         eps = 0.0\n",
    "    ## the correct LB metric: if both GT and pred empty mask image, then dice score=1\n",
    "    is_empty = (truth.sum(axis=1)==0) * (pred.sum(axis=1)==0)\n",
    "    truth_pos = truth[(1-is_empty).astype(bool)]\n",
    "    pred_pos = pred[(1-is_empty).astype(bool)]\n",
    "    intersect_pos = (pred_pos * truth_pos).sum(axis=1).astype(np.float)\n",
    "    union_pos = (pred_pos + truth_pos).sum(axis=1).astype(np.float)\n",
    "    dice_pos = ((2.0*intersect_pos + eps) / (union_pos+eps)).sum()\n",
    "    return (dice_pos + is_empty.sum()) / truth.shape[0]\n",
    "#     intersect = (pred * truth).sum(axis=1).astype(np.float)\n",
    "#     union = (pred + truth).sum(axis=1).astype(np.float)\n",
    "#     return ((2.0*intersect + eps) / (union+eps)).mean()\n",
    "\n",
    "# h, w = 256, 1600\n",
    "# print('For reference')\n",
    "# print('EMPTY_THRESHOLD: ', 400*(h/256)*(w/1600))\n",
    "# print('MASK_THRESHOLD: ', 0.5)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "best_threshold_EMPTY:  [1, 1, 1, 1]\n",
      "best_threshold_CLF:  0.6\n",
      "best_threshold_MASK:  [0.3, 0.3, 0.3, 0.3]\n",
      "truth pos_ratio:  0.5336538461538461\n",
      "pred pos_ratio:  0.4131610576923077\n",
      "truth mean pixel:  0.15079583\n",
      "pred mean pixel:  0.1442530888777513\n",
      "0.6349317709140043\n"
     ]
    }
   ],
   "source": [
    "# search for best thresholds\n",
    "def calculate_dice(logit, truth, EMPTY_THRESHOLD=[200,1500,1500,2000], MASK_THRESHOLD=0.22, CLF_THRESHOLD=0.7, logit_clf=None):\n",
    "    n, c = truth.shape[0], truth.shape[1]\n",
    "#     h, w = truth.shape[2], truth.shape[3]\n",
    "#     logit = logit.reshape(n*c, h, w)\n",
    "#     truth = truth.reshape(n*c, h, w)\n",
    "#     logit = np.array([cv2.resize(ch, (525, 350)) for ch in logit])\n",
    "#     truth = np.array([cv2.resize(ch, (525, 350)) for ch in truth])\n",
    "    MASK_THRESHOLD = np.concatenate([MASK_THRESHOLD]*n).reshape(-1, 1)\n",
    "    \n",
    "    logit = sigmoid(logit).reshape(n*c, -1)\n",
    "    truth = truth.reshape(n*c, -1)\n",
    "    pred = (logit>MASK_THRESHOLD).astype(np.int)\n",
    "    if logit_clf is not None:\n",
    "        logit_clf = sigmoid(logit_clf).reshape(n*c, -1)\n",
    "    \n",
    "    EMPTY_THRESHOLD_4ch = np.concatenate([EMPTY_THRESHOLD]*n)#.reshape(-1, 1)\n",
    "    pred_clf = (pred.sum(axis=1)<EMPTY_THRESHOLD_4ch).astype(np.int)\n",
    "    pred[pred_clf==1, ] = 0#.reshape(-1,)\n",
    "    if logit_clf is not None:\n",
    "        pred[logit_clf.squeeze()<CLF_THRESHOLD, ] = 0\n",
    "    #pos_ratio\n",
    "    print('truth pos_ratio: ', (truth.sum(axis=1)>0).mean())\n",
    "    print('pred pos_ratio: ', (pred.sum(axis=1)>0).mean())\n",
    "    print('truth mean pixel: ', truth.mean())\n",
    "    print('pred mean pixel: ', pred.mean())\n",
    "    return dice_overall(pred, truth)#nonempty_only\n",
    "\n",
    "import copy\n",
    "\n",
    "\n",
    "#nonempty_only = False #for model trained only on nonempty-mask-channels\n",
    "\n",
    "#[[i]*4 for i in np.arange(1, 10000, 2000)] #[[i]*4 for i in np.arange(10000, 45000, 5000)]\n",
    "EMPTY_THRESHOLD_candidate = [[1]*4, [1000]*4, [5000]*4]\n",
    "CLF_THRESHOLD_candidate = [0.5, 0.6, 0.7, 0.8]\n",
    "MASK_THRESHOLD_candidate = [[.3]*4, [.35]*4, [.4]*4, [.45]*4]\n",
    "\n",
    "best_threshold_EMPTY = [1, 1, 1, 1] #[25000, 25000, 25000, 25000]\n",
    "best_threshold_CLF = 0.6 #0.7\n",
    "best_threshold_MASK = [0.3]*4 #0.3\n",
    "best_score = 0\n",
    "\n",
    "opt_mask = 0\n",
    "opt_logit_clf = 0\n",
    "opt_clf = 0\n",
    "\n",
    "if opt_mask + opt_logit_clf + opt_clf == 0:\n",
    "    print('best_threshold_EMPTY: ', best_threshold_EMPTY)\n",
    "    print('best_threshold_CLF: ', best_threshold_CLF)\n",
    "    print('best_threshold_MASK: ', best_threshold_MASK)\n",
    "    best_score = calculate_dice(preds_valid, y_valid, best_threshold_EMPTY, best_threshold_MASK, \n",
    "                         best_threshold_CLF, preds_valid_clf)\n",
    "    print(best_score)\n",
    "\n",
    "##==========================##\n",
    "## pixel 0-1 threshold ##\n",
    "if opt_mask:\n",
    "    for MASK_THRESHOLD in MASK_THRESHOLD_candidate:\n",
    "        EMPTY_THRESHOLD, CLF_THRESHOLD = best_threshold_EMPTY, best_threshold_CLF\n",
    "        dice_score = calculate_dice(preds_valid, y_valid, EMPTY_THRESHOLD, MASK_THRESHOLD, CLF_THRESHOLD, preds_valid_clf)\n",
    "        print('MASK_THRESHOLD: %s, dice_score: %f'%(str(MASK_THRESHOLD), dice_score))\n",
    "        if dice_score>best_score:\n",
    "            best_threshold_MASK = MASK_THRESHOLD\n",
    "            best_score = dice_score\n",
    "\n",
    "##==========================##\n",
    "## logit clf threshold\n",
    "if opt_logit_clf:\n",
    "    for CLF_THRESHOLD in CLF_THRESHOLD_candidate:\n",
    "        EMPTY_THRESHOLD, MASK_THRESHOLD = best_threshold_EMPTY, best_threshold_MASK\n",
    "        dice_score = calculate_dice(preds_valid, y_valid, EMPTY_THRESHOLD, MASK_THRESHOLD, CLF_THRESHOLD, preds_valid_clf)\n",
    "        print('CLF_THRESHOLD: %s, dice_score: %f'%(str(CLF_THRESHOLD), dice_score))\n",
    "        if dice_score>best_score:\n",
    "            best_threshold_CLF = CLF_THRESHOLD\n",
    "            best_score = dice_score\n",
    "\n",
    "##==========================##\n",
    "## remove pixel when sum<threshold ##\n",
    "if opt_clf:\n",
    "    for EMPTY_THRESHOLD in tqdm_notebook(EMPTY_THRESHOLD_candidate):\n",
    "        MASK_THRESHOLD, CLF_THRESHOLD = best_threshold_MASK, best_threshold_CLF\n",
    "        dice_score = calculate_dice(preds_valid, y_valid, EMPTY_THRESHOLD, MASK_THRESHOLD, CLF_THRESHOLD, preds_valid_clf)\n",
    "        print('EMPTY_THRESHOLD: %s, dice_score: %f'%(str(EMPTY_THRESHOLD), dice_score))\n",
    "        if dice_score>best_score:\n",
    "            best_threshold_EMPTY = copy.deepcopy(EMPTY_THRESHOLD)\n",
    "            best_score = dice_score"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "([1, 1, 1, 1], [0.3, 0.3, 0.3, 0.3], 0.7, 0.6328574237403071)"
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "best_threshold_EMPTY, best_threshold_MASK, best_threshold_CLF, best_score\n",
    "\n",
    "#[34000, 36000, 34000, 20000], [0.7, 0.7, 0.7, 0.7], 0.656845819992622"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "# ## experiment?\n",
    "# best_threshold_EMPTY, best_threshold_MASK, best_score = ([50000, 50000, 50000, 50000], \n",
    "#                                                          [0.65, 0.65, 0.65, 0.65], None)\n",
    "# best_threshold_EMPTY, best_threshold_MASK, best_score"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "def predict_mask(logit, EMPTY_THRESHOLD, MASK_THRESHOLD, CLF_THRESHOLD=0.7, logit_clf=None, use_sigmoid=True):\n",
    "    \"\"\"Transform each prediction into mask.\n",
    "    input shape: (256, 256)\n",
    "    \"\"\"\n",
    "    #pred mask 0-1 pixel-wise\n",
    "    #n = logit.shape[0]\n",
    "    #IMG_SIZE = logit.shape[-1] #256\n",
    "    #EMPTY_THRESHOLD = 100.0*(IMG_SIZE/128.0)**2 #count of predicted mask pixles<threshold, predict as empty mask image\n",
    "    #MASK_THRESHOLD = 0.22\n",
    "    #logit = torch.sigmoid(torch.from_numpy(logit)).view(n, -1)\n",
    "    #pred = (logit>MASK_THRESHOLD).long()\n",
    "    #pred[pred.sum(dim=1) < EMPTY_THRESHOLD, ] = 0 #bug here, found it, the bug is input shape is (256, 256) not (16,256,256)\n",
    "    \n",
    "    #logit = cv2.resize(logit, (525, 350))\n",
    "    if use_sigmoid:\n",
    "        logit = sigmoid(logit)#.reshape(n, -1)\n",
    "        if logit_clf is not None:\n",
    "            logit_clf = sigmoid(logit_clf)\n",
    "    pred = (logit>MASK_THRESHOLD).astype(np.int)\n",
    "    if pred.sum() < EMPTY_THRESHOLD:\n",
    "        return np.zeros(pred.shape).astype(np.int)\n",
    "    if logit_clf is not None and logit_clf<CLF_THRESHOLD:\n",
    "        return np.zeros(pred.shape).astype(np.int)\n",
    "    else:\n",
    "        return pred"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAABWCAYAAAD8MYO7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAQ9klEQVR4nO3df4wc9X3G8ffDHWATm2LHAfyD4PAjSCaqgDOXqEGtWlMUHLuhrlKgieJUAZOKtrRRRE0bNShNhFMRtY1KG5P+MLFqglPFQAVuAtSmKi0pvsgOwYTYUAdjHzbUxrUDjgP59I+Ztcbrnbm99c7t7O7zkla3OzM7+93nPjP3ve/szigiMDMzM7P2OqnTDTAzMzPrRe5kmZmZmZXAnSwzMzOzEriTZWZmZlYCd7LMzMzMSuBOlpmZmVkJ3MnqM5LmSgpJg+nj9ZKWtrCed0o6JGmg/a20qnHdWKtcO9aKXqmbrupkpUHVbj+T9Ebm8UcmuC2T0gKYU9L6d2Te3x5J/yhpSrtfJyKujoh7mmzPlZnnvRgRUyLirXa3qd1cN66bVrl2XDutcN24bmq6qpOVBjUlIqYALwKLM9P+aTzrqvWOK25x+l4vAy4HPpOdqURX/Q47wXXjummVa8e10wrXjeumpqfetKT3S/qOpAOSdkv6i8xQY603/zuSnge+n07/oKRtkl6T9JeSnpT00cw6b5L0nKR9kh6SNDud9e/pz+fSHvw1Zb2viNgFrAfeI2mjpC9IegJ4HThP0s9J+ntJo5J2Sfq80qFRSQOS7pT0qqQXgA/WZbZR0g2ZxzdKelbSQUlbJV0maTXwTuBf0vd6q44fyp0l6cE0p+2Sbsys83ZJayV9LV3vM5LmZ+b/Udrug2nWC8rKshHXjeumVa4d104rXDd9VDcR0ZU3YAdwZd20YZJe9ABwPrAd+GQ6bxIQwEPAGcBk4GzgELAIOBm4Ffgp8NH0OdcBzwLvTud/HthQt745Zb8/4BzgGeDPgI0k/xldDAym7bofWAm8DTgT+G/gpvS5nwR+kK5jOrAhbfdgOn8jcEN6/8PArjRDARcA5zbKG5hbt57Hgb9Jc7kEeAVYkM67HTgMLEx/N3cAT6bzLgJ2ArMy6z3fdeO6qVLduHZcO64b100rdVPaDqnsW6PCbbDMcuDeukL7hcz8ZbVCTB+fBOzNFO4G4COZ+SenhX3WBBXuIeA14EdpUUxOC+1zmeXOAn4CTM5Muz6zgf0b6cabPr6qoHC/BdzSTN7Zwk03ireAqZn5dwCrMoX7aGbePOCN9P4FaeZXAie7blw3Vawb145rx3XjummlbrrhWG/TJM0DvkRyXHgySahP1C22M3N/VvZxRPxM0q7M/HOBr0i6KzPtTWAOcKCNTc9zTUQ8mp0gCY59D+eSbFCj6TxINsDaMrPqlv9RweudAzzfQjtnAfsi4mDd68zPPH45c/91YJKkwYjYLukPSIr7YknfAj4VEbtbaEdLXDeum1a5dlw7rXDd9E/d9NRnsoCvAt8lGcI7HfgcyVBiVmTuj5IUIQBKPpg3OzN/J/DxiDgjc5scESN165lo2dfeSfLfwYxMG0+PiIvT+aMkBVnzzoL17iQZuh7rNevtBqZLmlr3Ortylj92xRFrIuIKko0wgC8287w2ct24blrl2nHttMJ10yd102udrKnAgYg4JOli4MYxln8QeK+khemH4j4FTMvM/wrwGUkXAUiaJuk3ACLiJyT/IZzX7jcxHhExCnwb+JKk0yWdJOl8Sb+ULrIW+H1JcyRNIxmWzvN3wKclDSlxgaRz03l7yHmvEbET+E/gjvRDmz8PfAIY81s0ki6S9CuSTiU5Fv4GyXDuRHLduG5a5dpx7bTCddMnddNrnaw/BG6QdAi4C7ivaOH0l3498GXgVZL/FJ4m6W0TEfcCfw18U9L/AZuBX82s4k+Bbyj5tsevtfm9jMfHgFOArcB+4J+Bmem8r5Icv95C8p/TN/NWEhHfAL4ArAEOknxIcXo6+w6Sjfg1SZ9u8PTrSY597wbWAZ+NiEeaaPupwAqS/F8m+TDkHzfxvHZy3bhuWuXace20wnXTJ3Wj9ANdxtHzkbxMcs6P/+p0e6w7uG6sVa4da4Xrpnv02kjWuEm6Wsm5OyYBnyX5oNtIh5tlFee6sVa5dqwVrpvuVEonS9IHlJyoa7ukouOqVfCLwP+QfDVzAfDrEXGkzBfssnwmVBdl47qpkC7LxrVTIV2UjeumC7X9cKGSs7f+kOR48EvAU8D1EbG1rS/UpZxPPmeTz9nkczbFnE8+Z5PP2bRHGSNZw8D2iHgh7WV/HfhQCa/TrZxPPmeTz9nkczbFnE8+Z5PP2bRBGScjnc2xJxR7CXhv/UKSlpGcxZbJkycPzZ07t4SmVNKPSYq15rh8+jWb2bNns2vXrsOZSa6dVJpN9tCAs0k5mzF5n5OjmX2OsznK21XGs88++1pETBtruTI6WfUnVIMGJweLiLuBuwHmzZsXq1evLqEp1TN//vz9DSYfk0+/ZvPoo4+yfPnyQ3WTXTsczeb1usnOBmczFu9z8jWzz3E2x/B2lZo/f/5oM8uV0cl6iWPP2jqH5HwUJ2RoaGhcy4+MHP+li/GuowyTJk168/Dhw23PpxeceeaZkJxDpcbZpNJsTs5McjYpZzOmNylhn9wLqrzP6fTfqyNHjkAPZJPtCwwNDTXsG5SpjE7WU8CFkt5Fcqr664DfKuF1utLAwMAbOJ+G5s2bB8l1opxNnTSbU5zN8ZzNmLzPyeF9Tr7LL78ceiCb+g5ZXgetrM5X2ztZEfGmpN8lOXPrAPAPEfFMu1+nyET3VMdDyYUxO5pPuxX9VzEyMtJwfnZ67fc1ODgI8CI9lE27pNm8jLM5jrNpSk/tc9rF+5x8/ZbNWKNjrfYryhjJIiIeBh4uY92t6vTQa1YV82nFWEOvtXm1DlW2Y5X9fdSt50BEzMcaORQR8zrdiIpyNgV6ZZ9TEu9z8jmbVH0fYsqUKU09r5ROVhXljahY67KdKOC4kamiZc3MzMar2/6O91wnK++PeLf9YqpsrCyz88fTuXUHzMzMeknfX7vQxjY0NNTwMF+zzzUzM+tHPTeSZe3jDpKZmVnrPJJlDTXqYJXd6XKnzszMeklPjWT5Mz2tcefGzHpJ0ZdwavP998ImQl+MZPV7J6Lo/fd7NmbWG2qfHa0/PUzR50m9/7Oy9dRIljXm/9jMrJc1+43n+g6YWasOHaq/rGNjPdPJckeiMQ+Lm1kv6LdOkffb1XCiv4ee6WSZmZlZbzvRE4tnT55dv64yOrZ98Zmsfub/hsysG/nQnrXbeP4etqvmPJLVx7zjMrMqavXkx9Yf8kazxtOJGutSb+0aoOiJTlZRGN5Izcy6g/fX1qxuOUrjw4VmZmZmJXAny8zMzKwE7mSZmZmZlaDrO1ndclzWzMyKeX9uvabrO1lmZtY7RkZG3NmyntET3y7M42+qFMuelK0KvGO1flT29jdlypRS12/W7cq8MkpXj2T5j3J7OEczM+tV9RcOn0g9PZJlzavvaDVTkOO5JEFtuUYnkXMnz8zqnejlU6y/NaqdonoqazSrazpZrbz5qh0Og+av3N1pzebdynLuVJlZM9zRsrG0sz6K1tXq362uPlxoZmZmVpZWjvJkdc1I1onwyImZmZmNV6PraA4NDTX9hRKPZJmZWSX5UKEV6Yb6cCfLzMwqyefMsm7nTpaZmVWaO1tWrxtGscCdLDMz6xLuaFm3cSfLzMzMrATuZJmZmZmVoKlTOEjaARwE3gLejIj5kqYD9wFzgR3Ab0bEfkkC/gpYCLwOfDwivtv+plfD4sWLOe200xgYGGBgYIDVq1dz4MABbrvtNkZHR5k5cyYrVqzg9NNPJyIAzpK0HWdzXDZ33nknwHskfY8ezwZcO0WcTb65c+cydepUBgYGGBwcZNOmTezbt49rr72WHTt2MHfuXNauXcu0adOICA4fPtxT2RSdZHq82dxyyy3gfU7RdnVOVWunW05UO56RrF+OiEsiYn76eDnwWERcCDyWPga4GrgwvS0D/rZdja2qlStXsmbNGlavXg3AqlWrGB4eZt26dQwPD7Nq1SoAnnjiCYBTcTYNs9m5cyfA9+mTbMC1U8TZ5NuwYQObN29m06ZNAKxYsYIFCxawbds2FixYwIoVKwBYv349EdGT2dQ+DJ+9wfiy2bZtG3ifU7RdTaLCtdOoBqr2JYkTOVz4IeCe9P49wDWZ6V+LxJPAGZJmnsDrdJ3HH3+cRYsWAbBo0SI2btx4dDrwmrNpnM3ChQsB6NdswLVTxNnke+CBB1i6dCkAS5cu5f777z86fXBwsG+yOXLkCFu2bDn6eGRkhPvuu49LL70UOD6bK664AvA+p2C7+t9+qZ2yNHvG9wC+LSmAlRFxN3BWRIwCRMSopDPTZWcDOzPPfSmdNppdoaRlJL1jzj777NbfQYdJ4uabb0YSS5YsYcmSJezbt48ZM2YAMGPGDPbv3w/AK6+8AvDTzNOdTSabuvfaMJt0vX2ZD64dZ0OSzVVXXYUkbrrpJpYtW8aePXuYOTP5+zdz5kz27t0LwK5du5DUV9nk1U1tdGN0dJSRkRG2bt3K8PBw9uk9nQ20tF0dyTy9q/IpGs2qHWYc65Bjdh21C0iP9xCl0uOuxQtJsyJid9qRegT4PeDBiDgjs8z+iJgm6SHgjoj4j3T6Y8CtEZH7jiUdBJ4bV8snzgzg1YL5J5Ps4AeBdwMvAhcAmzPLXJI+vgCIiJgGziaVzeZl4O0R8Y5msoFK5zNWNtC/teNsinmfk6+j+xxJrwA/HqMNneLtKl8z2YzXuRHxjrEWamokKyJ2pz/3SloHDAN7JM1MR7FmAnvTxV8Czsk8fQ6we4yXeC7zWa9KkbSp2bZJuh04BNwILM5kszH9ssBKYGPmKc6mLpuIuDd9SjPZQEXzGU826fK30ye142yKeZ+Tr9P7nLQzNq76nSjervJ18nc25meyJL1N0tTafeAqkg8KPggsTRdbCjyQ3n8Q+JgS7wMO1A4r9hpnk8/ZFHM++ZxNPmeTz9kUcz6d0cxI1lnAOkm15ddExL9KegpYK+kTJEOOH06Xf5jk9A21r33+dttbXR3OJp+zKeZ88jmbfM4mn7Mp5nw6oKnPZJXeCGlZ+mH6yul02zr9+kWq0LYqtKGRKrSrCm1opArtqkIb8nS6bZ1+/SJVaFsV2tBIFdpVhTY00sl2VaKTZWZmZtZrfFkdMzMzsxK4k2VmZmZWgo53siR9QNJzkrZLWj72M9r++jskPS1ps6RN6bTpkh6RtC39WTtPiCR9OW3r9yRdVnLbnE1+2zqaTdqGSubjbMZsm7er/LY5m/y2ebvKb5ezyRMRHbsBA8DzwHnAKcAWYN4Et2EHMKNu2p8Dy9P7y4EvpvcXAusBAe8DvuNs+jObqubjbKqfj7NxNr2Uj7MpvnV6JGsY2B4RL0TEEeDrJNc+7LQqXJfR2eSrajbQ+XycTbGq5uNs8jmbYp3Ox9kU6HQnK+86hxOpdl3GESXXYIK66zICY12XsQzOJl8VsoFq5uNsilUhH2eTz9kUq2I+zqZAsxeILosaTJvoc0q8PzLXZZT0g4JlJ7K9zqYar1Wkivk4m2JVyMfZ5HM2xaqYj7Mp0OmRrFauc9hWkbkuI3DMdRkBdOLXZWyVs8nX8Wygsvk4m2Idz8fZ5HM2xSqaj7Mp0OlO1lPAhZLeJekU4DqS6yVNCFX7Wk7OJl9Hs4FK5+Nsinm7yuds8nm7yudsipT1ifpmbySf8v8hybcT/mSCX/s8km9CbAGeqb0+8HbgMWBb+nN6Ol3AXWlbnwbmO5v+y6bq+Tib6ubjbJxNL+bjbPJvvqyOmZmZWQk6fbjQzMzMrCe5k2VmZmZWAneyzMzMzErgTpaZmZlZCdzJMjMzMyuBO1lmZmZmJXAny8zMzKwE/w+XughjhdoLRwAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 720x576 with 8 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAABWCAYAAAD8MYO7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAP/UlEQVR4nO3dfYwc9X3H8feHMw82tosdB/ADxuEhIBNViLs6UYNatdAoOHZDXaUKShRSBUgq+hhF1LRRg9JEkIqobVTamLSNiRWTkCoursBNgNpUpSUFR3YIEIKhDuZ8PBXj2jkSA/n2j/kdmlt25vbWO7ezu5+XtPLuzuzsbz/3nbmvZ+Z2FBGYmZmZWWcd0+0BmJmZmfUjN1lmZmZmFXCTZWZmZlYBN1lmZmZmFXCTZWZmZlYBN1lmZmZmFXCTNWAkrZAUkmalx9skXd7GcpZLOixpqPOjtLpx3Vi7XDvWjn6pm55qslJQE7efSXo59/gDMzyWE1IBLKto+Xtzn+9ZSV+WNLfT7xMRl0TELS2O5+Lc656KiLkR8Vqnx9RprhvXTbtcO66ddrhuXDcTeqrJSkHNjYi5wFPA2txzX53Osia645pbmz7rBcAvAJ/MT1Smp36G3eC6cd20y7Xj2mmH68Z1M6GvPrSkd0r6jqSDkvZL+svcrsaJbv53JD0BfD89/x5Jj0t6SdJfSbpf0gdzy/yopMckvSjpDklL06R/T/8+ljr4S6v6XBExCmwD3iZph6TPSroPGAfOkPRzkv5B0pikUUmfUdo1KmlI0o2SXpD0JPCehsx2SLoi9/hKSY9KOiTpEUkXSNoELAf+JX3Wa/TGXblLJG1NOe2RdGVumddJuk3SV9JyH5Y0kpv+x2nch1LWF1WVZTOuG9dNu1w7rp12uG4GqG4ioidvwF7g4obnVpF10UPAmcAe4GNp2glAAHcAJwGzgVOBw8Aa4FjgGuAV4IPpNe8HHgXemqZ/BtjesLxlVX8+4DTgYeDPgR1k/zM6D5iVxvXPwAbgROBk4L+Bj6bXfgz4QVrGQmB7GvesNH0HcEW6/z5gNGUo4Czg9GZ5AysalnMv8Lcpl/OB54GL0rTrgJ8Aq9PP5nrg/jTtHGAfsCS33DNdN66bOtWNa8e147px3bRTN5VtkKq+NSvcJvOsB25tKLRfzE2/aqIQ0+NjgOdyhbsd+EBu+rGpsE+ZocI9DLwE/CgVxexUaJ/OzXcK8FNgdu65y3Ir2L+RVt70+F0lhfst4A9ayTtfuGmleA2Yl5t+PbAxV7h356atBF5O989KmV8MHOu6cd3UsW5cO64d143rpp266YVjvS2TtBL4PNlx4dlkod7XMNu+3P0l+ccR8TNJo7nppwNflHRT7rlXgWXAwQ4OvcilEXF3/glJMPkznE62Qo2laZCtgBPzLGmY/0cl73ca8EQb41wCvBgRhxreZyT3+Jnc/XHgBEmzImKPpD8kK+7zJH0L+HhE7G9jHG1x3bhu2uXace20w3UzOHXTV+dkAV8Cvku2C28+8GmyXYl5kbs/RlaEACg7MW9pbvo+4MMRcVLuNjsidjYsZ6bl33sf2f8OFuXGOD8izkvTx8gKcsLykuXuI9t1PdV7NtoPLJQ0r+F9Rgvmn7zgiM0RcSHZShjA51p5XQe5blw37XLtuHba4boZkLrptyZrHnAwIg5LOg+4cor5twJvl7Q6nRT3cWBBbvoXgU9KOgdA0gJJvwkQET8l+x/CGZ3+ENMREWPAt4HPS5ov6RhJZ0r65TTLbcDvS1omaQHZbukifw98QtKwMmdJOj1Ne5aCzxoR+4D/BK5PJ23+PPARYMq/opF0jqRflXQ82bHwl8l2584k143rpl2uHddOO1w3A1I3/dZk/RFwhaTDwE3A18tmTj/0y4AvAC+Q/U/hIbJum4i4Ffgb4JuS/g/YBfxabhF/BnxD2V97/HqHP8t0fAg4DngEOAD8E7A4TfsS2fHr3WT/c/pm0UIi4hvAZ4HNwCGykxQXpsnXk63EL0n6RJOXX0Z27Hs/sAX4VETc1cLYjwduIMv/GbKTIf+khdd1kuvGddMu145rpx2umwGpG6UTuozXv4/kGbLv/Pivbo/HeoPrxtrl2rF2uG56R7/tyZo2SZco++6OE4BPkZ3otrPLw7Kac91Yu1w71g7XTW+qpMmS9G5lX9S1R1LZcdU6+CXgf8j+NPMi4Dci4kiVb9hj+cyoHsrGdVMjPZaNa6dGeigb100P6vjhQmXf3vpDsuPBTwMPAJdFxCMdfaMe5XyKOZtizqaYsynnfIo5m2LOpjOq2JO1CtgTEU+mLvtrwHsreJ9e5XyKOZtizqaYsynnfIo5m2LOpgOq+DLSpUz+QrGngbc3ziTpKrJvsWX27NnDK1asqGAotfRjsmKd8IZ8BjWbpUuXMjo6+pPcU1PWzoknnjh87rnnVjqu8fHxSpffipRN/tBA17KpQx557WQzSOsV3uYUamWb42xe5/Uq59FHH30pIhZMNV8VTVbjF6pBky8Hi4ibgZsBVq5cGZs2bTqqNx0eHj6q17dr587pnXc4MjJyoMnTk/LpdDa94u6772b9+vWHG54urZ2RkZF48MEHKx3XdH/GVUjZNHY3XcmmDnnktZPNIK1X3uYUa2Wb42wm6Yv1qtV+oWxbNzIyMtbKMqposp5m8re2LiP7PgrLvIrzaerkk0+G7DtUJjibJGVzbO6prmRTtwYL6pNNjXmbU8DbnGL9lE27O2GavW6628Aqzsl6ADhb0lskHUd2dfCtFbxPr3oZ59PUypUrIbtOlLNpkLI5rpvZ1LHBgnpkU3Pe5hTwNqdYv2TT6aNcw8PD01pmx/dkRcSrkn6X7Jtbh4B/jIiHO/0+Pc75NDFr1iyAp3A2b5CyeQZn8wbOpiXe5jThbU6xfsimW6cR5VVxuJCIuBO4s4pl9wPnU+pgRIxMPdtAOhwRK7s9iJpyNiW8zSnlbU6xns2mDg0WVNRkmZmZmVWpLo1UmYG/rI6ZtaYXNmhmZlWbzvmpbrLMrGVutMys26Z78nk3+XChmZmZ1V6vNFZ5brLMrCV1/QoHM+t/dWmwprsd7Kkmqy4hmw0aN1hm1avD77g6ret1yONo9VSTZdZofHy8VhuFfuNszWym9UNzNcFNlpkBbqjMZlI/NRKd1G+5uMkyGwCNDdTw8LCbKjObpJvbhF5ortrJZ2CbLP+CsUHTuBHLP/b6YGbd0gsNVrsGtsky63eN56u5kapOXX5J+GdsvaYu605V3GSZmZkNuJlu0Pu9uZpQuyZrUII3MzMbNL36O77dJtSX1TEzMzOrQO32ZJmZmdnMmYlDhb26B+toDUST5ZNB+9ecOXNaXnldB2ZmNl1H87vDhwvNzMzMKuAmy8zMzCozqIcKwU2WmVlf8OFwa4frplpusszMzMyaONom1E2WmZmZVWKQDxVCDf+60Lsuzcymx9tNa0fVdTPoDRZ4T5aZmZlZJdxkmZmZWUf1w16sTuzpc5NlZtbDfKjQ2jFV3RxNk9QPDVanuMkyMzMzq4CbLDMzM+sI78WazE2WmZmZWc5Uh1Pnzp3b0nLcZJmZ9Sifj2XtqKpu5syZU8lye5mbLDOzHuVDM9aOqeqm3boaHx9v63X9rKUvI5W0FzgEvAa8GhEjkhYCXwdWAHuB34qIA5IE/DWwGhgHPhwR3+380Oth7dq1zJkzh6GhIYaGhti0aRMHDx7k2muvZWxsjMWLF3PDDTcwf/58IgLgFEl7cDZvyObGG28EeJuk79Hn2YBrp4yzKbZixQrmzZvH0NAQR44ccTY53uaUm6idI0eOlOYDTNTOaYNSO1WZzp6sX4mI8yNiJD1eD9wTEWcD96THAJcAZ6fbVcDfdWqwdbVhwwY2b97Mpk2bANi4cSOrVq1iy5YtrFq1io0bNwJw3333ARyPs2mazb59+wC+z4BkA66dMs6m2Pbt29m1a5ezacLbnHLbt29vKZ9t27YBnMA0aqdfDhd28nDq0RwufC9wS7p/C3Bp7vmvROZ+4CRJi4/ifXrOvffey5o1awBYs2YNO3bseP154CVn0zyb1atXAzCdbMbHx9m5c2dLt17g2inmbCbL17SzKdbpbU6v271796THRfncfvvtAP87yLXTCa1euzCAb0sKYENE3AycEhFjABExJunkNO9SYF/utU+n58byC5R0FVl3zKmnntr+J+gySVx99dVIYt26daxbt44XX3yRRYsWAbBo0SIOHDgAwPPPPw/wSu7lziaXTcNnbZpNWu5A5oNrp7bZzGQT/8orr3DhhRf2TDYzqYptTr9kA9PLZ3R0FOBI7uVT5rN8+fLqP0SPUTruWj6TtCQi9qdG6i7g94CtEXFSbp4DEbFA0h3A9RHxH+n5e4BrIqJwKyTpEPDYUX6WqiwCXiiZfizZRmwW8FbgKeAsYFdunvPT47OAiIgF4GySfDbPAG+KiDe3kg3UOp+psoHBrR1nU87bnGJd3eZIeh748RRj6BavV8VayWa6To+IN081U0t7siJif/r3OUlbgFXAs5IWp71Yi4Hn0uxPA6flXr4M2D/FWzyWO9erViQ92OrYJF0HHAauBNbmstmR/lhgA7Aj9xJn05BNRNyaXtJKNlDTfKaTTZr/OgakdpxNOW9zinV7m5OasWnV70zxelWsmz+zKc/JknSipHkT94F3kZ0ouBW4PM12OXB7ur8V+JAy7wAOThxW7DfOppizKed8ijmbYs6mmLMp53y6o5U9WacAWyRNzL85Iv5V0gPAbZI+QrbL8X1p/jvJvr5h4s8+f7vjo64PZ1PM2ZRzPsWcTTFnU8zZlHM+XdDSOVmVD0K6Kp1MXzvdHlu3379MHcZWhzE0U4dx1WEMzdRhXHUYQ5Fuj63b71+mDmOrwxiaqcO46jCGZro5rlo0WWZmZmb9xpfVMTMzM6uAmywzMzOzCnS9yZL0bkmPSdojaf3Ur+j4+++V9JCkXZIeTM8tlHSXpMfTvxPfEyJJX0hj/Z6kCyoem7MpHltXs0ljqGU+zmbKsXm9Kh6bsykem9er4nE5myIR0bUbMAQ8AZwBHAfsBlbO8Bj2AosanvsLYH26vx74XLq/GtgGCHgH8B1nM5jZ1DUfZ1P/fJyNs+mnfJxN+a3be7JWAXsi4smIOAJ8jezah91Wh+syOptidc0Gup+PsylX13ycTTFnU67b+TibEt1usoqucziTJq7LuFPZNZig4bqMwFTXZayCsylWh2ygnvk4m3J1yMfZFHM25eqYj7Mp0eoFoquiJs/N9HdKvDNy12WU9IOSeWdyvM6mHu9Vpo75OJtydcjH2RRzNuXqmI+zKdHtPVntXOewoyJ3XUZg0nUZAXT012Vsl7Mp1vVsoLb5OJtyXc/H2RRzNuVqmo+zKdHtJusB4GxJb5F0HPB+suslzQjV+1pOzqZYV7OBWufjbMp5vSrmbIp5vSrmbMpUdUZ9qzeys/x/SPbXCX86w+99BtlfQuwGHp54f+BNwD3A4+nfhel5ATelsT4EjDibwcum7vk4m/rm42ycTT/m42yKb76sjpmZmVkFun240MzMzKwvuckyMzMzq4CbLDMzM7MKuMkyMzMzq4CbLDMzM7MKuMkyMzMzq4CbLDMzM7MK/D9lkYhQNV1LhAAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 720x576 with 8 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAABWCAYAAAD8MYO7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAQCElEQVR4nO3df5AX9X3H8efLO0WIUCFEPUC9oMYZzHQc7ySZptNOi3UigcbSSY0mE9KJYjr2ZyZjsc00TpqMpGOmbaa2wfQHhikm2AlKR2miFuzU1lQugzFqDKhEPA7QghSCxKjv/rF7ZDm+u/e9L9+9736/39dj5obvd3e/u5/vi/d3ebO7t19FBGZmZmbWXKe0egBmZmZmnchNlpmZmVkJ3GSZmZmZlcBNlpmZmVkJ3GSZmZmZlcBNlpmZmVkJ3GR1GUn9kkJSb/p8k6TlDaznPEmHJfU0f5RWNa4ba5RrxxrRKXXTVk1WGtToz1uSXss8/8gkj+X0tADmlbT+nZn3t1fSP0k6o9nbiYirIuKuOsdzReZ1L0bEGRHxZrPH1GyuG9dNo1w7rp1GuG5cN6PaqslKgzojIs4AXgSWZqb980TWNdodV9zS9L1eBlwOfCY7U4m2+jtsBdeN66ZRrh3XTiNcN66bUR31piW9T9J3JB2UtFvSX2YONY52878j6Tng++n0D0jaLulVSX8l6TFJH82s80ZJz0raL+l+SXPTWf+R/vls2sFfXdb7iohhYBPwbklbJH1B0qPAEWC+pJ+T9A+SRiQNS/q80kOjknok3S7pFUnPAx8Yk9kWSddnnt8g6RlJhyQ9LekySWuB84B/Td/rzTrxUO4cSRvTnHZIuiGzzlslrZf0tXS9T0kazMz/43Tch9KsF5WVZS2uG9dNo1w7rp1GuG66qG4ioi1/gJ3AFWOmLSTponuAC4AdwCfTeacDAdwPnAlMBc4BDgNLgFOBm4GfAh9NX/Nh4BngXen8zwObx6xvXtnvDzgXeAr4c2ALyf+MLgF603HdC6wG3gacBfwPcGP62k8CP0jXMQvYnI67N52/Bbg+ffwhYDjNUMCFwPm18gb6x6znEeBv01wuBV4GFqXzbgWOAovTv5vbgMfSeRcDu4A5mfVe4Lpx3VSpblw7rh3XjeumkbopbYdU9k+twq2xzErg7jGF9guZ+StGCzF9fgqwL1O4m4GPZOafmhb22ZNUuIeBV4EfpUUxNS20z2WWOxv4CTA1M+3azAfs30k/vOnzKwsK91vAH9STd7Zw0w/Fm8D0zPzbgDWZwn0oM28B8Fr6+MI08yuAU103rpsq1o1rx7XjunHdNFI37XCut26SFgBfIjkvPJUk1EfHLLYr83hO9nlEvCVpODP/fOArku7ITHsDmAccbOLQ81wdEQ9lJ0iC49/D+SQfqJF0HiQfwNFl5oxZ/kcF2zsXeK6Bcc4B9kfEoTHbGcw835N5fAQ4XVJvROyQ9IckxX2JpG8Bn4qI3Q2MoyGuG9dNo1w7rp1GuG66p2466pos4KvAd0kO4c0APkdyKDErMo9HSIoQACUX5s3NzN8FfDwizsz8TI2IoTHrmWzZbe8i+d/B7MwYZ0TEJen8EZKCHHVewXp3kRy6Hm+bY+0GZkmaPmY7wznLH7/iiHUR8YskH8IAvljP65rIdeO6aZRrx7XTCNdNl9RNpzVZ04GDEXFY0iXADeMsvxF4j6TF6UVxnwJmZuZ/BfiMpIsBJM2U9JsAEfETkv8hzG/2m5iIiBgBvg18SdIMSadIukDSL6eLrAd+X9I8STNJDkvn+Xvg05IGlLhQ0vnpvL3kvNeI2AX8F3BbetHmzwOfAMb9LRpJF0v6VUlTSM6Fv0ZyOHcyuW5cN41y7bh2GuG66ZK66bQm64+A6yUdBu4AvlG0cPqXfi3wZeAVkv8pPEnSbRMRdwN/A3xT0v8B24Bfy6ziz4B7lPy2x683+b1MxMeA04CngQPAvwB96byvkpy/foLkf07fzFtJRNwDfAFYBxwiuUhxVjr7NpIP8auSPl3j5deSnPveDWwAPhsRD9Yx9inAKpL895BcDPkndbyumVw3rptGuXZcO41w3XRJ3Si9oMs4dj+SPST3/PjvVo/H2oPrxhrl2rFGuG7aR6cdyZowSVcpuXfH6cBnSS50G2rxsKziXDfWKNeONcJ1055KabIkvV/Jjbp2SCo6r1oFvwS8QPKrmYuA34iI18vcYJvlM6naKBvXTYW0WTaunQppo2xcN22o6acLldy99Yck54NfAh4Hro2Ip5u6oTblfPI5m3zOJp+zKeZ88jmbfM6mOco4krUQ2BERz6dd9teBD5awnXblfPI5m3zOJp+zKeZ88jmbfM6mCcq4Gelcjr+h2EvAe8YuJGkFyV1smTp16kB/f38JQ6mkH5MU66gT8unWbObOncvw8PDRzCTXTirNJntqYNKymTZt2gnTjhw50tDryjB//nxeeOGFlmTTJrzPyVHPPsfZHOPPVcYzzzzzakTMHG+5MpqssTdUgxo3B4uIO4E7ARYsWBBr164tYSjVMzg4eKDG5OPy6dZsHnroIVauXHl4zGTXDseyGdvZNC2bgYGBY4+HhoZqTs/KLlPPOst0zz33cN1115WWTbvzPidfPfscZ3Mcf65Sg4ODI/UsV0aT9RLH37V1Hsn9KCzxBs6nprPOOguSe6iMcjapNJtTM5Oams3Q0NCxpmiymqNmmTdvHm+99VZp2XQA73NyeJ+Tz9k0RxnXZD0OXCTpnZJOI/l28I0lbKddvYbzqWnBggWQfE+Usxkjzea0MrMZGho67qcZ65sMl19+ORFRajZtzvucHN7n5HM2zdH0I1kR8Yak3yW5c2sP8I8R8VSzt9PmnE8Nvb29AC/ibE6QZrOHScwme3Srynp7e5kyZcqeo0ePum7yeZ9Tg/c5+ZxNc5RxupCIeAB4oIx1dwLnU+hgRAyOv1h1TGIjcjgiFpS5gYGBgbZprrJ6e3tLz6adeZ9TqO32OZPI2Zykrr/ju5n9zOgpvvFOG7ZbE2Zm1gpussxO0mRde9Qqtd5fve+507MxMyviJsvMCtU6auUjWWZm43OTZWa5ipopN1pmZsXcZJlZLp/uMzNrnJssMzMzsxKUcgsHs3ZUdPprvCM67Xjbg3p18nszMyuTj2SZ2bh8Kwczs4lzk2VmpfJ1XWbWrdxkmZmZmZXATZaZmZlZCdxkmdWhnmuPuvW0mK/LMjOrzU2WmdWlW5tIM7NGuckys7qczBErN2hm1o3cZFnXGxgYqKuB8GmxfM7GzOxEbrLMzMzMSuAmy8zq5tN+Zmb1c5Nl1kSd3oT4tKCZWf3cZJlZU4zXgHV6A2pmNpabLLMJ8JEcMzOrl5ssMxuXm0szs4lzk2VmTeNmzMzsZ9xkmU2Qrz0yM7N6uMkys0I+OmVm1hg3WWZWqJlH5nyUz8y6iZssM2sqH/kyM0u4yTJrQLc1Ej4CZWY2cW6yzMzMzErgJsusBD7yY2ZmbrLMGuRThvmKsnEDambdoreehSTtBA4BbwJvRMSgpFnAN4B+YCfwWxFxQJKAvwYWA0eAj0fEd5s/9GpYunQp06ZNo6enh56eHtauXcvBgwe55ZZbGBkZoa+vj1WrVjFjxgwiAuBsSTtwNidkc/vttwO8W9L36PBsAPr7+5k+fTo9PT309vaydetW9u/fzzXXXMPOnTvp7+9n/fr1zJw5szK1MzQ0dFwDNfZ5s0w0m6NHj7Y8m8nifU4+73OKNVA753ZL7ZRlIkeyfiUiLo2IwfT5SuDhiLgIeDh9DnAVcFH6swL4u2YNtqpWr17NunXrWLt2LQBr1qxh4cKFbNiwgYULF7JmzRoAHn30UYApOJua2ezatQvg+3RJNgCbN29m27ZtbN26FYBVq1axaNEitm/fzqJFi1i1ahUAmzZtggrWTplH8yaSTURULpsyeZ+Tz/ucYhOsndPpotopw8mcLvwgcFf6+C7g6sz0r0XiMeBMSX0nsZ2288gjj7BkyRIAlixZwpYtW45NB151NrWzWbx4MQDtlE2zm4z77ruP5cuXA7B8+XLuvffeY9OpSO206nRfUTa9vb2VyKZVvM/J12n7nGYbp3b+t5trpxnqOl0IBPBtSQGsjog7gbMjYgQgIkYknZUuOxfYlXntS+m0kewKJa0g6Y4555xzGn8HLSaJm266CUksW7aMZcuWsX//fmbPng3A7NmzOXDgAAAvv/wywE8zL3c2mWzGvNea2aTrbWo+rWoaJHHllVciiRtvvJEVK1awd+9e+vqS/VhfXx/79u0DYHh4GCpUO9nMyjiaNdFsJFUmm7J5n5OvjH1Op2QDDdXO65mXd3w+ZVB63rV4IWlOROxOG6kHgd8DNkbEmZllDkTETEn3A7dFxH+m0x8Gbo6I3H/JJB0Cnj3J91KW2cArBfNPJdmJ9QLvAl4ELgS2ZZa5NH1+IRARMROcTSqbzR7g7RHxjnqygUrnM1420L2142yKeZ+Tr6X7HEkvAz8eZwyt4s9VvnqymajzI+Id4y1U15GsiNid/rlP0gZgIbBXUl96FKsP2Jcu/hJwbubl84Dd42zi2cy1XpUiaWu9Y5N0K3AYuAFYmslmS/rLAquBLZmXOJsx2UTE3elL6skGKprPRLJJl7+VLqkdZ1PM+5x8rd7npM3YhOp3svhzla+Vf2fjXpMl6W2Spo8+Bq4kuVBwI7A8XWw5cF/6eCPwMSXeCxwcPa3YaZxNPmdTzPnkczb5nE0+Z1PM+bRGPUeyzgY2SBpdfl1E/Jukx4H1kj5BcsjxQ+nyD5DcvmH01z5/u+mjrg5nk8/ZFHM++ZxNPmeTz9kUcz4tUNc1WaUPQlqRXkxfOa0eW6u3X6QKY6vCGGqpwriqMIZaqjCuKowhT6vH1urtF6nC2KowhlqqMK4qjKGWVo6rEk2WmZmZWafx1+qYmZmZlcBNlpmZmVkJWt5kSXq/pGcl7ZC0cvxXNH37OyU9KWmbpK3ptFmSHpS0Pf1z9D4hkvTldKzfk3RZyWNzNvlja2k26RgqmY+zGXds/lzlj83Z5I/Nn6v8cTmbPBHRsh+gB3gOmA+cBjwBLJjkMewEZo+Z9hfAyvTxSuCL6ePFwCZAwHuB7zib7symqvk4m+rn42ycTSfl42yKf1p9JGshsCMino+I14Gvk3z3YatV4XsZnU2+qmYDrc/H2RSraj7OJp+zKdbqfJxNgVY3WXnfcziZRr+XcUjJdzDBmO9lBMb7XsYyOJt8VcgGqpmPsylWhXycTT5nU6yK+TibAvV+QXRZVGPaZN9T4n2R+V5GST8oWHYyx+tsqrGtIlXMx9kUq0I+ziafsylWxXycTYFWH8lq5HsOmyoy38sIHPe9jAA6+e9lbJSzydfybKCy+TibYi3Px9nkczbFKpqPsynQ6ibrceAiSe+UdBrwYZLvS5oUqvZ3OTmbfC3NBiqdj7Mp5s9VPmeTz5+rfM6mSFlX1Nf7Q3KV/w9JfjvhTyd52/NJfhPiCeCp0e0DbwceBranf85Kpwu4Ix3rk8Cgs+m+bKqej7Opbj7Oxtl0Yj7OJv/HX6tjZmZmVoJWny40MzMz60husszMzMxK4CbLzMzMrARusszMzMxK4CbLzMzMrARusszMzMxK4CbLzMzMrAT/D7cVXfEuLb6WAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 720x576 with 8 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAABWCAYAAAD8MYO7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAPgklEQVR4nO3dfYwc9X3H8feHMw8mtosdx3C2wQ4PQTJRhfDViZqqVWsaBcduqKqUoKA4VcBJRR+jiJoSNShNhFMRtY1KG5w+mFg1iVNhcAVuwpOpSkuKL4IQIMSGOpizsaE2rh1ICOTbP+Z31vi8M7e3t7M7u/d5SafbnZnd+e3H3/n5tzNzM4oIzMzMzKy9Tup2A8zMzMz6kQdZZmZmZhXwIMvMzMysAh5kmZmZmVXAgywzMzOzCniQZWZmZlYBD7KmGEmLJYWkaen5NkmrW3ifcyQdlTTQ/lZa3bhurFWuHWtFv9RNTw2yUlCjPz+T9Fru+Yc73JbTUgEsrOj9d+c+335J/yRpRrvXExGXRcRtTbbn0tzrno+IGRHxZrvb1G6uG9dNq1w7rp1WuG5cN6N6apCVgpoRETOA54FVuWn/PJH3Gh0d19yq9FkvAX4B+HR+pjI99W/YDa4b102rXDuunVa4blw3o/rqQ0t6j6RvSzosaa+kv8ztahwdzf+upGeB76Xp75e0U9Irkv5K0iOSrsq958clPSPpoKS7JS1Is/49/X4mjeAvr+pzRcQIsA14p6Ttkj4v6WHgVeBcST8n6R8k7ZM0IulzSrtGJQ1IulnSy5KeA94/JrPtkq7OPb9G0tOSjkh6StIlkjYC5wD/mj7rdTpxV+58SVtTTrskXZN7zxslbZb01fS+T0oays3/k9TuIynr5VVl2YjrxnXTKteOa6cVrpspVDcR0ZM/wG7g0jHTlpGNogeA84BdwCfSvNOAAO4GzgCmA2cBR4GVwMnAdcBPgavSaz4EPA28I83/HPDgmPdbWPXnA84GngT+HNhO9s3oImBaatedwK3AW4B5wH8DH0+v/QTw/fQec4AHU7unpfnbgavT4w8CIylDAecDixrlDSwe8z4PAX+bcrkYeAlYnubdCPwYWJH+bW4CHknzLgT2APNz73ue68Z1U6e6ce24dlw3rptW6qayDqnqn0aF22CZtcDtYwrtF3Pz14wWYnp+EnAgV7gPAh/OzT85FfaZHSrco8ArwA9TUUxPhfbZ3HJnAj8BpuemXZnbwB4gbbzp+XtLCvebwB82k3e+cNNG8SYwMzf/JmBDrnDvy81bAryWHp+fMr8UONl147qpY924dlw7rhvXTSt10wvHepsmaQnwRbLjwtPJQn14zGJ7co/n559HxM8kjeTmLwK+LOmW3LQ3gIXA4TY2vcjlEXFffoIkOP4zLCLboPaleZBtgKPLzB+z/A9L1nc28GwL7ZwPHIyII2PWM5R7/mLu8avAaZKmRcQuSX9EVtwXSfom8MmI2NtCO1riunHdtMq149pphetm6tRNX52TBXwF+A7ZLrxZwGfJdiXmRe7xPrIiBEDZiXkLcvP3AB+NiDNyP9MjYnjM+3Raft17yL4dzM21cVZEXJTm7yMryFHnlLzvHrJd1+Otc6y9wBxJM8esZ6Rg+ePfOGJTRPwS2UYYwBeaeV0buW5cN61y7bh2WuG6mSJ102+DrJnA4Yg4Kuki4Jpxlt8KvEvSinRS3CeB2bn5XwY+LelCAEmzJf0WQET8hOwbwrnt/hATERH7gG8BX5Q0S9JJks6T9Ctpkc3AH0haKGk22W7pIn8PfErSUmXOl7QozdtPwWeNiD3AfwI3pZM2fx74GDDuX9FIulDSr0k6lexY+Gtku3M7yXXjummVa8e10wrXzRSpm34bZP0xcLWko8AtwNfLFk7/6FcCXwJeJvum8ATZaJuIuB34G+AOSf8HPAb8eu4t/gz4hrK/9viNNn+WifgIcArwFHAI+BdgMM37Ctnx68fJvjndUfQmEfEN4PPAJuAI2UmKc9Lsm8g24lckfarBy68kO/a9F9gCfCYi7m2i7acC68jyf5HsZMg/beJ17eS6cd20yrXj2mmF62aK1I3SCV3GseuRvEh2zY//6nZ7rDe4bqxVrh1rheumd/TbnqwJk3SZsmt3nAZ8huxEt+EuN8tqznVjrXLtWCtcN72pkkGWpPcpu1DXLkllx1Xr4JeB/yH708zlwG9GxOtVrrDH8umoHsrGdVMjPZaNa6dGeigb100PavvhQmVXb/0B2fHgF4BHgSsj4qm2rqhHOZ9izqaYsynmbMo5n2LOppizaY8q9mQtA3ZFxHNplP014AMVrKdXOZ9izqaYsynmbMo5n2LOppizaYMqLka6gOMvKPYC8K6xC0laQ3YVW6ZPn7508eLFFTSlln5EVqyjTshnqmazYMECRkZGfpyb5NpJUjb5QwPOJnE243KfU6CZPsfZHOPtKufpp59+JSJmj7dcFYOssRdUgwYXB4uI9cB6gCVLlsTGjRsraEr9DA0NHWow+bh8pmo29913H2vXrj06ZrJrh2PZvDpmsrPB2YzHfU6xZvocZ3Mcb1fJ0NDQvmaWq+Jw4Qscf9XWhWTXo7DMGzifhubNmwfZNVRGOZskZXNybpKzSZzNuNznFHCfU8zZtEcVg6xHgQskvV3SKWR3B99awXp61Ws4n4aWLFkC2X2inM0YKZtTnM2JnM243OcUcJ9TzNm0R9sPF0bEG5J+j+zKrQPAP0bEk+1eT49zPg1MmzYN4HmczQlSNi/ibE7gbJriPqcB9znFnE17VHFOFhFxD3BPFe/dD5xPqcMRMTT+YlPS0YhY0u1G1JSzKeE+p5T7nGLOZpKm/BXfzczMzKrgQZaZmZlZBSo5XGhmZmbWT5YuXXrs8YwZM5p6jfdkmZmZmVXAgywzMzOzEvm9WBPhQZaZmZlZBTzIMjMzM6uAB1lmZmZmBVo9VAgeZJmZmZlVwoMsMzMzswp4kGVmZmZWAQ+yzMzMzCrgQZaZtcXoyaFlJ4kuXbp0UieRmpl10mT7K99Wx6asbv5nPzw83LV1t0uj/JoZaI2d3w9ZmPUqf+mplgdZZtYUd8ZmZhPjQZaZFerEwGrsOrxny6wz/MWpeh5kmXVY3QcRPoxqZtYePvHdzI7xN1szs0w7+kMPsswM8ADLzKzdPMgyMzMzq4AHWWYd5HOOijkbs87xnuvO8CDLzMzMrAIeZJlZ7flbt5n1Ig+yzDrEh8PMbKqaqrfU8nWyzKzrnZ8HoGb9aWzf0isXH25Xn+g9WWZmZtZWze656vYXvKp5kGXWAXX9ttYLmr3ptJlZu7Srz/Ygy8zMzLqi37+AepBlZl3t6Pq9kzWzYnXeQ92OvsmDLDMzM2ur4eHhpgYpdf+SNdn2NfXXhZJ2A0eAN4E3ImJI0hzg68BiYDfw2xFxSJKAvwZWAK8CH42I70yqlTW2atUqTj/9dAYGBhgYGGDjxo0cPnyY66+/nn379jE4OMi6deuYNWsWEQFwpqRdOJsTsrn55psB3inpu/R5NlCv2qnbt8l8NjNnzmTHjh088MAD3q6oV93Ujfucco22q4MHD3LFFVewe/duFi9ezObNm5k9e/Zo7Zw9mdpp9sT3ug+0JmMie7J+NSIujoih9HwtcH9EXADcn54DXAZckH7WAH/XrsbW1a233sqmTZvYuHEjABs2bGDZsmVs2bKFZcuWsWHDBgAefvhhgFNxNg2z2bNnD8D36LNsyjqQutROtzq5ZrLZsWMHw8PD3q5y6lI3deQ+p1x+uwJYt24dy5cvZ+fOnSxfvpx169YBsG3bNoDTaKF2JnpNrDpfQ2uy7ZrM4cIPALelx7cBl+emfzUyjwBnSBqcxHp6zkMPPcTKlSsBWLlyJdu3bz82HXjF2TTOZsWKFQBM1WzAtVPG2RRzNsXc55S76667WL16NQCrV6/mzjvvPDYd+N9O1U5d92ZNtl3NDrIC+JakYUlr0rQzI2IfQPo9L01fAOzJvfaFNO04ktZI2iFpx6FDh1prfQ1I4tprr+Wqq67ijjvuAODgwYPMnTsXgLlz5zL6+V566SWAn+Ze7mxy2Zx11ln5lzfMJr3vlMyHimun2XMoOiGfzQ033AB4uxpVt7qpkyr6nH7JBo7PZ/369QDs37+fwcFs7DQ4OMiBAwcAGBkZAXg99/K+z6fIZPpFpeOu5QtJ8yNir6R5wL3A7wNbI+KM3DKHImK2pLuBmyLiP9L0+4HrIqKwlZKOAM+0/CmqNRd4uWT+yWSd2DTgHcDzwPnAY7llLk7PzwciImaDs0ny2bwIvDUi3tZMNlDrfMbLBqZu7Tibcu5zinW1z5H0EvCjcdrQLd6uijWTzUQtioi3jbdQUye+R8Te9PuApC3AMmC/pMGI2Jd2IR5Ii78AnJ17+UJg7zireCZ3rletSNrRbNsk3QgcBa4BVuWy2Z7+WOBWYHvuJc5mTDYRcXt6STPZQE3zmUg2afkbmSK142zKuc8p1u0+Jw3GJlS/neLtqlg3/83GPVwo6S2SZo4+Bt5LdqLgVmB1Wmw1cFd6vBX4iDLvBg6PHlbsN86mmLMp53yKOZtizqaYsynnfLqjmT1ZZwJbJI0uvyki/k3So8BmSR8j2+X4wbT8PWSXbxj9s8/faXur68PZFHM25ZxPMWdTzNkUczblnE8XNHVOVuWNkNZExPput6ORbret2+svU4e21aENjdShXXVoQyN1aFcd2lCk223r9vrL1KFtdWhDI3VoVx3a0Eg321WLQZaZmZlZv/FtdczMzMwq4EGWmZmZWQW6PsiS9D5Jz0jaJWnt+K9o+/p3S3pC0mOSdqRpcyTdK2ln+j16nRBJ+lJq63clXVJx25xNcdu6mk1qQy3zcTbjts3bVXHbnE1x27xdFbfL2RSJiK79AAPAs8C5wCnA48CSDrdhNzB3zLS/ANamx2uBL6THK4BtgIB3A992NlMzm7rm42zqn4+zcTb9lI+zKf/p9p6sZcCuiHguIl4HvkZ278Nuq8N9GZ1NsbpmA93Px9mUq2s+zqaYsynX7XycTYluD7Kaus9hxdp+X8Y2cTbF6pAN1DMfZ1OuDvk4m2LOplwd83E2JZq6rU6F1GBap68p8Z7I3ZdR0vdLlu1ke51NPdZVpo75OJtydcjH2RRzNuXqmI+zKdHtPVmt3OewrSJ3X0bguPsyAmjy92VslbMp1vVsoLb5OJtyXc/H2RRzNuVqmo+zKdHtQdajwAWS3i7pFOBDZPdL6gjV+15OzqZYV7OBWufjbMp5uyrmbIp5uyrmbMpUdUZ9sz9kZ/n/gOyvE27o8LrPJftLiMeBJ0fXD7wVuB/YmX7PSdMF3JLa+gQw5GymXjZ1z8fZ1DcfZ+Ns+jEfZ1P849vqmJmZmVWg24cLzczMzPqSB1lmZmZmFfAgy8zMzKwCHmSZmZmZVcCDLDMzM7MKeJBlZmZmVgEPsszMzMwq8P/r4Eo4F6urBQAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 720x576 with 8 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAABWCAYAAAD8MYO7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAO+0lEQVR4nO3de4wV533G8e/jXV8wlxpCwAvY3vgSSziqLLMlUVO1anGtmEDjqkptNyikik1SudcocnEbNVaaCFI5ahvVaUzaBAcVJ6QytiubJsYFV6V1aohwHJsQsEsMyxpwwRR8iWPn1z/mXTQsO7OHw5k9s2efj3S058z1Pc/+ZvTuzOyMIgIzMzMza62z2t0AMzMzs07kTpaZmZlZBdzJMjMzM6uAO1lmZmZmFXAny8zMzKwC7mSZmZmZVcCdrHFGUq+kkNSdPm+QtLSJ5Vws6bikrta30urGdWPNcu1YMzqlbsZUJysFNfj6maTXcp8/NMptOS8VwJyKlr8n9/0OSPqapEmtXk9EXB8R9zbYnmtz870QEZMi4q1Wt6nVXDeum2a5dlw7zXDduG4GjalOVgpqUkRMAl4AFueG/dPpLGuwd1xzi9N3vQb4BeBT+ZHKjKnfYTu4blw3zXLtuHaa4bpx3QzqqC8t6b2SvivpqKT9kv46d6hxsDf/e5KeA36Qhr9f0i5JL0v6G0lPSFqSW+bHJO2UdFjSw5Jmp1H/nn7uTD34G6r6XhHRD2wA3iVps6TPSdoCvApcKunnJP2jpAFJ/ZI+q3RoVFKXpLskvSTpeeD9QzLbLOmW3OdbJe2QdEzSs5KukbQGuBj4l/Rdb9eph3JnSXoo5bRb0q25Zd4paZ2kr6flPiOpLzf+T1O7j6WsF1SV5XBcN66bZrl2XDvNcN2Mo7qJiDH5AvYA1w4ZNp+sF90FXAbsBj6exp0HBPAwcAEwAbgQOA4sAs4Gbgd+CixJ89wE7ADemcZ/Ftg0ZHlzqv5+wEXAM8BfApvJ/jK6CuhO7XoAuAeYCMwA/hv4WJr348AP0zKmAZtSu7vT+M3ALen9B4H+lKGAy4FLhssb6B2ynMeBL6VcrgYOAQvSuDuB14GF6XezAngijbsS2AvMyi33MteN66ZOdePace24blw3zdRNZTukql/DFe4w0ywH7htSaL+YG79ssBDT57OAg7nC3QR8KDf+7FTYM0epcI8DLwM/TkUxIRXaZ3LTzQR+AkzIDbs5t4H9G2njTZ+vKyncbwN/1Eje+cJNG8VbwOTc+BXA6lzhbsyNmwu8lt5fnjK/FjjbdeO6qWPduHZcO64b100zdTMWzvU2TNJc4Atk54UnkIW6Zchke3PvZ+U/R8TPJPXnxl8CfFnS3blhbwJzgKMtbHqRGyJiY36AJDj5O1xCtkENpHGQbYCD08waMv2PS9Z3EfBcE+2cBRyOiGND1tOX+/xi7v2rwHmSuiNit6Q/JivuqyR9G/hEROxvoh1Ncd24bprl2nHtNMN1M37qpqOuyQK+AnyP7BDeFOAzZIcS8yL3foCsCAFQdmHe7Nz4vcBHIuKC3GtCRGwbspzRll/3XrK/Dqbn2jglIq5K4wfICnLQxSXL3Ut26HqkdQ61H5gmafKQ9fQXTH/ygiPWRsQvkW2EAXy+kflayHXjummWa8e10wzXzTipm07rZE0GjkbEcUlXAbeOMP1DwLslLUwXxX0CmJob/2XgU5KuBJA0VdJvAUTET8j+Qri01V/idETEAPAd4AuSpkg6S9Jlkn4lTbIO+ENJcyRNJTssXeQfgE9KmqfM5ZIuSeMOUPBdI2Iv8J/AinTR5s8DHwVG/C8aSVdK+jVJ55KdC3+N7HDuaHLduG6a5dpx7TTDdTNO6qbTOll/Atwi6ThwN/DNsonTL/1m4IvAS2R/KTxN1tsmIu4D/g64X9L/AduBX88t4i+Abyn7b4/faPF3OR0fBs4BngWOAP8M9KRxXyE7f/0U2V9O9xctJCK+BXwOWAscI7tIcVoavYJsI35Z0ieHmf1msnPf+4H1wKcj4tEG2n4usJIs/xfJLob8swbmayXXjeumWa4d104zXDfjpG6ULugyTtyP5EWye378V7vbY2OD68aa5dqxZrhuxo5OO5J12iRdr+zeHecBnya70G1bm5tlNee6sWa5dqwZrpuxqZJOlqT3KbtR125JZedV6+CXgf8h+9fMBcBvRsQbVa5wjOUzqsZQNq6bGhlj2bh2amQMZeO6GYNafrpQ2d1bf0R2Pngf8CRwc0Q829IVjVHOp5izKeZsijmbcs6nmLMp5mxao4ojWfOB3RHxfOplfwP4QAXrGaucTzFnU8zZFHM25ZxPMWdTzNm0QBU3I53NyTcU2we8e+hEkpaR3cWWCRMmzOvt7a2gKbX0ClmxDjoln/GazezZs+nv7389N8i1k6Rs8qcGnE3ibEbkfU6BRvY5zuYEb1c5O3bseDkipo40XRWdrKE3VINhbg4WEauAVQBz586NNWvWVNCU+unr6zsyzOCT8hmv2WzcuJHly5cfHzLYtcOJbF4dMtjZ4GxG4n1OsUb2Oc7mJN6ukr6+voFGpqvidOE+Tr5r6xyy+1FY5k2cz7BmzJgB2T1UBjmbJGVzdm6Qs0mczYi8zyngfU4xZ9MaVRzJehK4QtI7yG5VfxPwOxWsp1Lz5s2rZLkTJ0587ZVXXhnz+VRh7ty5kD0nytkMkbI5x9mcytmM6DU6YJ9cBe9zijmb1mh5Jysi3pT0+2R3bu0CvhoRz7R6PWOVsgdjOp9hdHd3A7yAszlFyuZFnM0pnE1DvM8Zhvc5xZxNa1RxJIuIeAR4pIpldwLnU+poRPSNPNm4dDwi5ra7ETXlbEp4n1PK+5xizuYMjfs7vpuZmZlVoZIjWXVQ1TVVZmZmZo3wkSwzMzOzCriTZWZmZlYBd7LMzMzMKuBOlpmZmVkF3MkyMzMzq4A7WWZmZmYVcCfLzMzMrAIdcZ+sOt0Ta9u2baXjjx8f+lBzMzMz60Q+kmVmZmZWAXeyzMzMzCrQEacLqzbSKUAzMzOzoXwky8zMzKwC7mSZmZmZVcCdLDMzM7MK+Joss1FyprcamTRpUotaYmZmo6EjOlm+MN3MzMzqxqcLzczMzCrQEUeyzMzMxrpWPL3EZ3bqxUeyzMzMzCrgTpaZmZlZBdzJMjMzM6uAO1lmZmZmFXAny8zMzKwC7mSZmZmZVcCdLDMzM7MKuJNlZmZmVgF3sszMzMwq4E6WmZmZWQUaeqyOpD3AMeAt4M2I6JM0Dfgm0AvsAX47Io5IEvC3wELgVeAjEfG91je9HhYvXsz5559PV1cXXV1drFmzhqNHj3LHHXcwMDBAT08PK1euZMqUKUQEwExJu3E2p2Rz1113AbxL0vfp8GwAent7mTx5Ml1dXXR3d7N161YOHz7MjTfeyJ49e+jt7WXdunVMnTqViOD111937Xi7cjYlvM8p10TtXDReaqcqp3Mk61cj4uqI6EuflwOPRcQVwGPpM8D1wBXptQz4+1Y1tq7uuece1q5dy5o1awBYvXo18+fPZ/369cyfP5/Vq1cDsGXLFoBzcTbDZrN3716AHzBOsgHYtGkT27dvZ+vWrQCsXLmSBQsWsGvXLhYsWMDKlSsB2LBhAxHh2vF2BTibMt7nlDvN2jmPcVQ7VTiT04UfAO5N7+8FbsgN/3pkngAukNRzBusZcx5//HEWLVoEwKJFi9i8efOJ4cDLzmb4bBYuXAjAeM0G4MEHH2Tp0qUALF26lAceeODE8O7ubteOt6thOZti3ueUG6F2/nc8104rNNrJCuA7krZJWpaGzYyIAYD0c0YaPhvYm5t3Xxp2EknLJG2VtPXIkSPNtb4GJHHbbbexZMkS7r//fgAOHz7M9OnTAZg+fTqD3+/QoUMAP83N7mxy2Vx44YX52YfNJi23Y/K57rrrmDdvHqtWrQLgwIED9PRk+7Genh4OHjwIQH9/P5JcO96unE2JKvY5nZINNFU7b+Rm7/h8qqB03rV8ImlWROyXNAN4FPgD4KGIuCA3zZGImCrpYWBFRPxHGv4YcHtEbCtZ/jFg5xl+l6pMB14qGX822U6sG3gn8AJwObA9N83V6fPlQETEVHA2ST6bF4G3RcTbG8kGap3PSNnA+K0dZ1PO+5xibd3nSDoEvDJCG9rF21WxRrI5XZdExNtHmqihC98jYn/6eVDSemA+cEBST0QMpEOIB9Pk+4CLcrPPAfaPsIqduWu9akXS1kbbJulO4DhwK7A4l83m9M8C9wCbc7M4myHZRMR9aZZGsoGa5nM62aTp72Sc1I6zKed9TrF273NSZ+y06ne0eLsq1s7f2YinCyVNlDR58D1wHdmFgg8BS9NkS4EH0/uHgA8r8x7g6OBpxU7jbIo5m3LOp5izKeZsijmbcs6nPRo5kjUTWC9pcPq1EfGvkp4E1kn6KNkhxw+m6R8hu33D4L99/m7LW10fzqaYsynnfIo5m2LOppizKed82qCha7Iqb4S0LCJWtbsdw2l329q9/jJ1aFsd2jCcOrSrDm0YTh3aVYc2FGl329q9/jJ1aFsd2jCcOrSrDm0YTjvbVYtOlpmZmVmn8WN1zMzMzCrgTpaZmZlZBdreyZL0Pkk7Je2WtHzkOVq+/j2Snpa0XdLWNGyapEcl7Uo/B+8TIklfTG39vqRrKm6bsyluW1uzSW2oZT7OZsS2ebsqbpuzKW6bt6vidjmbIhHRthfQBTwHXAqcAzwFzB3lNuwBpg8Z9lfA8vR+OfD59H4hsAEQ8B7gu85mfGZT13ycTf3zcTbOppPycTblr3YfyZoP7I6I5yPiDeAbZM8+bLc6PJfR2RSrazbQ/nycTbm65uNsijmbcu3Ox9mUaHcnq6HnHFas5c9lbBFnU6wO2UA983E25eqQj7Mp5mzK1TEfZ1OiocfqVEjDDBvte0q8N3LPZZT0w5JpR7O9zqYe6ypTx3ycTbk65ONsijmbcnXMx9mUaPeRrGaec9hSkXsuI3DScxkBdObPZWyWsynW9mygtvk4m3Jtz8fZFHM25Wqaj7Mp0e5O1pPAFZLeIekc4Cay5yWNCtX7WU7Oplhbs4Fa5+Nsynm7KuZsinm7KuZsylR1RX2jL7Kr/H9E9t8Jfz7K676U7D8hngKeGVw/8DbgMWBX+jktDRdwd2rr00Cfsxl/2dQ9H2dT33ycjbPpxHycTfHLj9UxMzMzq0C7TxeamZmZdSR3sszMzMwq4E6WmZmZWQXcyTIzMzOrgDtZZmZmZhVwJ8vMzMysAu5kmZmZmVXg/wEEvAPz6MswWAAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 720x576 with 8 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAABWCAYAAAD8MYO7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAPe0lEQVR4nO3df4wc9X3G8ffDHRg72MWOA5xt8MVAkExUITicqEGtWlMUHLuhrlICieJUAZOK/owiatqoQWkinIqobVTamPSHiVWTmCoGV+AmQG2q0pLii0wIJsQGHMz5sKE2rh1w+PXpH/M9d7zc7N0tO7uzt89LWt3uzOzMd5/7zOi7M7MzigjMzMzMrLlOaHcDzMzMzCYjd7LMzMzMSuBOlpmZmVkJ3MkyMzMzK4E7WWZmZmYlcCfLzMzMrATuZHUZSf2SQlJver1Z0ooG5nOWpCOSeprfSqsa1401yrVjjZgsddNRnawU1MjjTUmv5F5/rMVtOTkVwLyS5r879/n2SfpHSac0ezkRcXlE3D7O9lyae9+zEXFKRLzR7DY1m+vGddMo145rpxGuG9fNiI7qZKWgTomIU4BngWW5Yf80kXmN9I4rbln6rBcCFwOfy49UpqP+h+3gunHdNMq149pphOvGdTNiUn1oSR+Q9D1JhyTtlfQXuV2NI73535b0FPDDNPxDknZKeknSX0p6WNLHc/O8TtKTkg5IukfS3DTq39PfJ1MP/oqyPldEDAGbgfdK2irpS5IeAl4GFkj6OUl/L2lY0pCkLyrtGpXUI+kWSS9Kehr4UE1mWyVdk3t9raQnJB2WtEPShZLWAWcB/5I+6w16667cOZI2pZx2Sbo2N8+bJG2Q9I0038clDeTG/1Fq9+GU9eKyshyN68Z10yjXjmunEa6bLqqbiOjIB7AbuLRm2CKyXnQPcDawC/h0GncyEMA9wKnAVOAM4AiwFDgRuAF4Dfh4es9HgSeA96TxXwS21MxvXtmfDzgTeBz4M2Ar2Tej84He1K67gDXAO4DTgP8Grkvv/TTwozSPWcCW1O7eNH4rcE16/hFgKGUo4Bxg/mh5A/0183kQ+JuUywXAC8DiNO4m4CiwJP1vbgYeTuPOA/YAc3LzPdt147qpUt24dlw7rhvXTSN1U9oGqezHaIU7yjSrgDtqCu0XcuNXjhRien0CsD9XuFuAj+XGn5gK+/QWFe4R4CXgJ6kopqZC+0JuutOBnwFTc8Ouyq1g/0ZaedPry+oU7neA3x9P3vnCTSvFG8D03PibgbW5wr0/N24h8Ep6fk7K/FLgRNeN66aKdePace24blw3jdRNJxzrHTdJC4GvkB0XnkoW6kM1k+3JPZ+Tfx0Rb0oayo2fD3xN0q25Ya8D84BDTWx6kSsi4v78AElw/GeYT7ZCDadxkK2AI9PMqZn+J3WWdybwVAPtnAMciIjDNcsZyL1+Pvf8ZeBkSb0RsUvSH5AV9/mSvgN8JiL2NtCOhrhuXDeNcu24dhrhuumeuplU52QBXwe+T7YLbwbwBbJdiXmRez5MVoQAKDsxb25u/B7gkxFxau4xNSIGa+bTavll7yH7djA718YZEXF+Gj9MVpAjzqoz3z1ku67HWmatvcAsSdNrljNUMP3xM45YHxGXkK2EAXx5PO9rIteN66ZRrh3XTiNcN11SN5OtkzUdOBQRRySdD1w7xvSbgPdJWpJOivsMMDM3/mvA5ySdByBppqTfAIiIn5F9Q1jQ7A8xERExDHwX+IqkGZJOkHS2pF9Kk2wAfk/SPEkzyXZLF/k74LOSLlLmHEnz07h9FHzWiNgD/Cdwczpp8+eBTwFj/opG0nmSfkXSFLJj4a+Q7c5tJdeN66ZRrh3XTiNcN11SN5Otk/WHwDWSjgC3At+qN3H6p18FfBV4keybwmNkvW0i4g7gr4FvS/pfYDvwq7lZ/Clwp7Jfe/xakz/LRHwCOAnYARwE/hnoS+O+Tnb8+lGyb07fLppJRNwJfAlYDxwmO0lxVhp9M9lK/JKkz47y9qvIjn3vBTYCn4+I+8bR9inAarL8nyc7GfKPx/G+ZnLduG4a5dpx7TTCddMldaN0Qpdx7Hokz5Nd8+O/2t0e6wyuG2uUa8ca4brpHJNtT9aESbpc2bU7TgY+T3ai22Cbm2UV57qxRrl2rBGum85USidL0geVXahrl6R6x1Wr4BeBZ8h+mrkY+PWIeLXMBXZYPi3VQdm4biqkw7Jx7VRIB2XjuulATT9cqOzqrT8mOx78HPAIcFVE7GjqgjqU8ynmbIo5m2LOpj7nU8zZFHM2zVHGnqxFwK6IeDr1sr8JfLiE5XQq51PM2RRzNsWcTX3Op5izKeZsmqCMi5HO5fgLij0HvK92Ikkrya5iy9SpUy/q7+8voSmV9FOyYh3xlny6NZu5c+cyNDR0NDfItZOkbPKHBjo2m2nTpjV1fgsWLOCZZ56ZFNmUxNucAuPZ5jibY7xe5TzxxBMvRcTMsaYro5NVe0E1GOXiYBFxG3AbwMKFC2PdunUlNKV6BgYGDo4y+Lh8ujWb+++/n1WrVh2pGVyp2rnoootatqy8O++8k6uvvvrlmsGVymY0rcirU7NpFW9zio1nm+NsjuP1KhkYGBgez3RlHC58juOv2jqP7HoUlnkd5zOq0047DbJrqIxwNsm8efN48803T8wPouLZtKpD2onZtJi3OQW8zSnmbJqjjE7WI8C5kt4t6SSyu4NvKmE5neoVnM+oFi5cCNl9opxNjYsvvpiIOKmTshkcbM2vyzsxmxbzNqeAtznFnE1zNP1wYUS8Lul3yK7c2gP8Q0Q83uzldDjnM4re3l6AZ3E2b9Hb28uUKVOeP3r0aMdk06o9WZ2YTRt4mzMKb3OKOZvmKOOcLCLiXuDeMuY9GTifug5FxMDYk3Wf3t7eIxGxsN3tGEs7zlvrlGzaxducurzNKeZs3qauv+K7mTVXqw4TmplVnTtZZtZU7foFpplZ1biTZWZmZlYCd7LMzMzMSuBOlpmZmVkJ3MkyMzMzK0Epl3Aws+7jE97NzI7nPVlmZmZmJXAny8yawtfHMjM7njtZZtY07miZmf0/d7LMrKnc0TIzy7iTZWZNNzg46M6WmXU9d7LMzMzMSuBOlpmZmVkJ3Mkys9L4kKGZdTNfjNRsAibSafDFOc3Mupv3ZJlZqbw3y8y6lTtZZmZmZiVwJ8vMzMysBO5kmVnpfMjQzLqRO1lmZmZmJXAny8zMzKwE7mSZmZmZlcCdLDMzM7MSuJNlZqXzhVnNrBu5k2VmZmZWAneyzMzMzErgTpaZlc7XyTKzbuQbRJtZS9R2tHyelplNdt6TZWZmZlaCce3JkrQbOAy8AbweEQOSZgHfAvqB3cBvRsRBSQL+ClgCvAx8MiK+3/ymV8OyZcuYNm0aPT099PT0sG7dOg4dOsSNN97I8PAwfX19rF69mhkzZhARAKdL2oWzeUs2t9xyC8B7Jf2ASZ4NQH9/P9OnT6enp4fe3l62bdvGgQMHuPLKK9m9ezf9/f1s2LCBmTNnEhEcPXq0a2rH2RTzNqeYtzn1NVA7Z3ZL7ZRlInuyfjkiLoiIgfR6FfBARJwLPJBeA1wOnJseK4G/bVZjq2rNmjWsX7+edevWAbB27VoWLVrExo0bWbRoEWvXrgXgoYceApiCsxk1mz179gD8kC7JBmDLli1s376dbdu2AbB69WoWL17Mzp07Wbx4MatXrwZg8+bNRMSkqp3BwcFjj9F0czZj8TanmLc59U2wdk6mi2qnDG/ncOGHgdvT89uBK3LDvxGZh4FTJfW9jeV0nAcffJClS5cCsHTpUrZu3XpsOPCSsxk9myVLlgDQrdkA3H333axYsQKAFStWcNdddx0b3tvb29W142yKeZtTzNuc+saonf/p5tpphvGe+B7AdyUFsCYibgNOj4hhgIgYlnRamnYusCf33ufSsOH8DCWtJOsdc8YZZzT+CdpMEtdffz2SWL58OcuXL+fAgQPMnj0bgNmzZ3Pw4EEAXnjhBYDXcm93Nrlsaj7rqNmk+U6afC677DIkcd1117Fy5Ur27dtHX1+2Hevr62P//v0ADA0NIWnS1k7t3qzXXnuNSy655Lja2bt377EHwPDwMIODg+zYsWNSZ1PL25xiZWxzJks20FDtvJp7+6TPpwxKx13rTyTNiYi9qSN1H/C7wKaIODU3zcGImCnpHuDmiPiPNPwB4IaIKPwNt6TDwJNv87OUZTbwYp3xJ5JtxHqB9wDPAucA23PTXJBenwNERMwEZ5Pks3keeGdEvGs82UCl8xkrG+je2nE29XmbU6yt2xxJLwA/HaMN7eL1qth4spmo+RHxrrEmGteerIjYm/7ul7QRWATsk9SX9mL1AfvT5M8BZ+bePg/YO8Yinsyd61UpkraNt22SbgKOANcCy3LZbE0/FlgDbM29xdnUZBMRd6S3jCcbqGg+E8kmTX8TXVI7zqY+b3OKtXubkzpjE6rfVvF6Vayd/7Mxz8mS9A5J00eeA5eRnSi4CViRJlsB3J2ebwI+ocz7gUMjhxUnG2dTzNnU53yKOZtizqaYs6nP+bTHePZknQ5slDQy/fqI+FdJjwAbJH2KbJfjR9L095JdvmHkZ5+/1fRWV4ezKeZs6nM+xZxNMWdTzNnU53zaYFznZJXeCGllOpm+ctrdtnYvv54qtK0KbRhNFdpVhTaMpgrtqkIbirS7be1efj1VaFsV2jCaKrSrCm0YTTvbVYlOlpmZmdlk49vqmJmZmZXAnSwzMzOzErS9kyXpg5KelLRL0qqx39H05e+W9Jik7ZK2pWGzJN0naWf6O3KdEEn6amrrDyRdWHLbnE1x29qaTWpDJfNxNmO2zetVcducTXHbvF4Vt8vZFImItj2AHuApYAFwEvAosLDFbdgNzK4Z9ufAqvR8FfDl9HwJsBkQ8H7ge86mO7Opaj7Opvr5OBtnM5nycTb1H+3ek7UI2BURT0fEq8A3ye592G5VuC+jsylW1Wyg/fk4m/qqmo+zKeZs6mt3Ps6mjnZ3soruc9hKI/dlHFR2DyaouS8jMNZ9GcvgbIpVIRuoZj7Opr4q5ONsijmb+qqYj7OpY7w3iC6LRhnW6mtKfCBy92WU9KM607ayvc6mGsuqp4r5OJv6qpCPsynmbOqrYj7Opo5278lq5D6HTRW5+zICx92XEUBv/76MjXI2xdqeDVQ2H2dTX9vzcTbFnE19Fc3H2dTR7k7WI8C5kt4t6STgo2T3S2oJVfteTs6mWFuzgUrn42zq83pVzNkU83pVzNnUU9YZ9eN9kJ3l/2OyXyf8SYuXvYDslxCPAo+PLB94J/AAsDP9nZWGC7g1tfUxYMDZdF82Vc/H2VQ3H2fjbCZjPs6m+OHb6piZmZmVoN2HC83MzMwmJXeyzMzMzErgTpaZmZlZCdzJMjMzMyuBO1lmZmZmJXAny8zMzKwE7mSZmZmZleD/AOX4JEJolBuUAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 720x576 with 8 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAABWCAYAAAD8MYO7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAQnklEQVR4nO3dfYwc9X3H8feHOx5sbIodBzjbmAsPAZmoQtzhRE3VqjVFwbEb6ioQlChOFTCp6GMUUdNGDUoT4VREbaPSxqS0JlZNYqoYXIGb8GBTlZYUX2SHACE21MHYhw21ce0cxIF8+8fMofGys0+3szt3+3lJJ+/uzO789nPfnf16Zm5GEYGZmZmZtdcJ3R6AmZmZ2VTkJsvMzMysAG6yzMzMzArgJsvMzMysAG6yzMzMzArgJsvMzMysAG6yeoykQUkhqT+9v1nSihZeZ4Gko5L62j9KKxvXjbXKtWOtmCp1M6marDSo8Z+fS3otc/+jHR7LKWkBzC/o9Xdn3t9+Sf8kaUa7lxMRV0bEXQ2O5/LM816IiBkR8Wa7x9RurhvXTatcO66dVrhuXDfjJlWTlQY1IyJmAC8AyzKP/XMzrzXeHZfcsvS9XgpcBnw2O1GJSfU77AbXjeumVa4d104rXDeum3FT6k1Ler+k70o6LGmfpL/KbGoc7+Z/V9JzwA/Sxz8oaaekVyX9taTHJX0s85o3SHpW0kFJ90ual0769/TfZ9MO/qqi3ldE7AU2A++RtFXSFyU9BowB50r6BUl3ShqVtFfSF5RuGpXUJ+k2Sa9Ieh74YEVmWyVdl7l/vaRnJB2R9LSkSyWtAxYA/5q+15v09k25cyVtSnPaJen6zGveImmDpK+nr/uUpOHM9D9Jx30kzXpxUVlW47px3bTKtePaaYXrpofqJiIm5Q+wG7i84rFFJF10H3AesAv4VDrtFCCA+4HTgWnAWcBRYClwInAT8DPgY+lzPgI8A7w7nf4FYEvF680v+v0BZwNPAX8BbCX5n9HFQH86rnuBNcCpwBnAfwM3pM/9FPDD9DVmA1vScfen07cC16W3PwzsTTMUcD5wTrW8gcGK13kU+Ls0l0uAl4HF6bRbgNeBJenv5lbg8XTahcAeYG7mdc9z3bhuylQ3rh3XjuvGddNK3RS2Qir6p1rhVplnFXB3RaH9Umb6yvFCTO+fABzIFO4W4KOZ6SemhX1mhwr3KPAq8OO0KKalhfb5zHxnAj8FpmUeuzbzAXuE9MOb3r+iRuF+G/jDRvLOFm76oXgTmJmZfiuwNlO4D2WmLQReS2+fn2Z+OXCi68Z1U8a6ce24dlw3rptW6mYy7OttmKSFwJdJ9gtPIwn1sYrZ9mRuz83ej4ifS9qbmX4O8FVJt2ceewOYDxxu49DzXBURD2UfkATHv4dzSD5Qo+k0SD6A4/PMrZj/xzWWdzbwXAvjnAscjIgjFcsZztx/KXN7DDhFUn9E7JL0RyTFfbGkbwOfjoh9LYyjJa4b102rXDuunVa4bnqnbqbUMVnA14DvkWzCOw34PMmmxKzI3B4lKUIAlByYNy8zfQ/wiYg4PfMzLSJGKl6n07LL3kPyv4M5mTGeFhEXp9NHSQpy3IIar7uHZNN1vWVW2gfMljSzYjl7c+Y//oUj1kfEL5N8CAP4UiPPayPXjeumVa4d104rXDc9UjdTrcmaCRyOiKOSLgaurzP/JuC9kpakB8V9GpiVmf5V4LOSLgSQNEvSbwNExE9J/odwbrvfRDMiYhT4DvBlSadJOkHSeZJ+NZ1lA/AHkuZLmkWyWTrPPwCfkTSkxPmSzkmn7SfnvUbEHuA/gVvTgzZ/EfgkUPevaCRdKOnXJZ1Msi/8NZLNuZ3kunHdtMq149ppheumR+pmqjVZfwxcJ+kocDvwzVozp7/0a4GvAK+Q/E/hSZJum4i4G/hb4FuS/g/YDvxG5iX+HLhHyV97/Gab30szPg6cBDwNHAL+BRhIp32NZP/1DpL/OX0r70Ui4h7gi8B64AjJQYqz08m3knyIX5X0mSpPv5Zk3/c+YCPwuYh4sIGxnwysJsn/JZKDIf+0gee1k+vGddMq145rpxWumx6pG6UHdBlvnY/kJZJzfvxXt8djk4Prxlrl2rFWuG4mj6m2Jatpkq5Ucu6OU4DPkRzoNtLlYVnJuW6sVa4da4XrZnIqpMmS9AElJ+raJanWftUy+BXgf0j+NHMx8FsRcazIBU6yfDpqEmXjuimRSZaNa6dEJlE2rptJqO27C5WcvfVHJPuDXwSeAK6NiKfbuqBJyvnkczb5nE0+Z1Ob88nnbPI5m/YoYkvWImBXRDyfdtnfAD5UwHImK+eTz9nkczb5nE1tziefs8nnbNqgiJORzuP4E4q9CLy3ciZJK0nOYsu0adOGBgcHCxhKKf2EpFjHvS2fbDannnrq0EUXXdSRgY2NjR13f/r06R1Z7rh58+axd+/e1zMPuXZSaTbZXQPOJuVs6mpqndNL2TSyzqnMZuHChRNaZuV6tqxaWR8X/X3VSnYT/R7LW+YzzzzzakTMqjoxo4gmq/KEalDl5GARcQdwB8DChQtj3bp1BQylfIaHhw9Vefi4fLLZDA8Px7Zt2zoxNEZG3n4M5dDQUEeWDXDPPfdw9dVXH6142LUDPPTQQ6xatary0+5scDb1NLvO6aVs0tqpuc6pzKad6+Nq69yyaCQb6Oz3Vat5TfR7rNpyh4eHRxt5bhFN1oscf9bW+STno7DEGzifqubPnw/JOVTeeghnA8AZZ5wBySUpxjmblLOpy+ucHGnteJ1TRdmymUhD2spzxxuziTbCRRyT9QRwgaR3STqJ5OrgmwpYzmT1Gs6nqssuuwyS60Q5mwrpLoqTnM3bOZu6vM7JkdaO1zlVdCObyoZmZGTkrZ9OaleDBQVsyYqINyT9HsmZW/uAf4yIp9q9nEmulPkMDQ11dfN1f38/wAuUMJtuS7N5CWfzNs6mIaVc53Sb1zn5upFNdrdeN7+L2rnsInYXEhEPAA80Ov/06dM7duxPXnjNLH+iv4Bm8+mmkZGRjh6XRXI9r+H6s/WkoxExsaNupy5nU8NkWud0gdc5+ZzNBBXSZJVZt7fWmJmZ2fEbLMr03Vxtw0KrY+u5JsvMzMy6r/LYp7I0WvXG0MzeHTdZqS7sFiulshS5mZlNTdW+Y8r4vVPtAPhm+wQ3WWZmZjZh2RN3lrFpqmd8I0O9RqqZ9+YmK8Nbs8zMrEiTsfloVPbs6u08DUKnjI+11pibfT9uslowkUZsxowZbRxJMSp3Gbr5NDNLTKamodPGxsacT4UiTkZaem4YzMzMrGg92WSZmZmZFc1NVgVv6jQzM7N2cJNlDXHzaWZm1hw3WVaVj1szMzObGDdZZmZmZgXo2Sar1pYa7xpLZDPyli0zM7Pm+DxZ1hCfK2vinJ+ZWW/p2S1Z1hg3BmZmZq1xk5XDuwwTlWd+NzMzs8a4yTIzMzMrgJssa4q3ZpmZmTWmp5ssH29kZmZmRenpJqseb7VxI2pmZtYqN1lmZmZmBXCTZU3zFj4zM7P6er7J8u6w1rjRMjMzq60UTdbY2Fi3h5DLzYSZmZm1wpfVsbqGhobcbLaBMzQz6y2l2JJl5ZbXHLhpMDMzy+cmy8zMzKwAbrIa0OtbbDr1xwG9nrOZmU0tpTkmq9YXbNFf8j7mqHUjIyNt+f04fzMzm2pK02RZ73FjZWZmU1lDTZak3cAR4E3gjYgYljQb+CYwCOwGro6IQ5IE/A2wBBgDPhER32v/0MthcHCQmTNn0tfXR39/P9u2bePgwYNcc8017N69m8HBQTZs2MCsWbOICF5//fUzJe2iR7N55JFHuPnmmxkdHWVgYIDVq1dz2mmnERHcdtttAO+R9H2meDYAy5YtY/r06fT19dHX18e6des4fPhwbj5Az9SOs8nnbPI1m43XOXVr5+xeqZ2iNHNM1q9FxCURMZzeXwU8HBEXAA+n9wGuBC5If1YCf9+uwXZTra0uW7ZsYfv27Wzbtg2A1atXs3jxYnbu3MnixYtZvXo1AJs3byYiTqbHsrnzzjtZs2YNIyMjrF27lkWLFrFx40YWLVrE2rVrAXjsscfYs2cPwA+YQtnUs2bNGtavX8+6desAauYDTLnaqcXZ5HM2+ZrJxuucurVzCj1UO0WYyIHvHwLuSm/fBVyVefzrkXgcOF3SwASWM+ncd999rFixAoAVK1Zw7733vvV4f3//q72SzbFjx9ixY8dxjz366KMsXboUgKVLl7J169a3Hl+yZAkAvZBNnlr5AD1TO9U4m3zOJp/XObXVqZ3/7eXaaYdGj8kK4DuSAlgTEXcAZ0bEKEBEjEo6I513HrAn89wX08dGsy8oaSVJd8xZZ53V+jtok1YPfpfEFVdcgSRuuOEGVq5cyf79+xkYSGpxYGCAAwcOALB3714k/Szz9LrZLFiwoKX302nVspPEjTfeiCSWL1/O8uXLOXjwIHPmzAFgzpw5HDp0CICXX365sg6qZpO+bqlqp1XN5gM0VTvOxtk4m4mvc6ZKNtBS7RzLPH3K51MEpftda88kzY2IfWkj9SDw+8CmiDg9M8+hiJgl6X7g1oj4j/Txh4GbIiK3g5F0BHh2gu+lKHOAV2pMP5FkJdYPvBt4ATgf2J6Z55L0/vlARMQscDapbDYvAe+IiHc2kg2UOp962UDv1o6zqc3rnHxdXedIehn4SZ0xdIs/V/kayaZZ50TEO+vN1NCWrIjYl/57QNJGYBGwX9JAuhVrADiQzv4icHbm6fOBfXUW8WzmWK9SkbSt0bFJugU4ClwPLMtkszX9Y4E1wNbMU5xNRTYRcXf6lEaygZLm00w26fy30CO142xq8zonX7fXOWkz1lT9doo/V/m6+Ture0yWpFMlzRy/DVxBcqDgJmBFOtsK4L709ibg40q8Dzg8vltxqnE2+ZxNbc4nn7PJ52zyOZvanE93NLIl60xgo6Tx+ddHxL9JegLYIOmTJJscP5zO/wDJ6RvG/+zzd9o+6vJwNvmcTW3OJ5+zyeds8jmb2pxPFzR0TFbhg5BWpgfTl063x9bt5ddShrGVYQzVlGFcZRhDNWUYVxnGkKfbY+v28mspw9jKMIZqyjCuMoyhmm6OqxRNlpmZmdlU4wtEm5mZmRXATZaZmZlZAbreZEn6gKRnJe2StKr+M9q+/N2SnpS0XdK29LHZkh6UtDP9d/w8IZL0lXSs35d0acFjczb5Y+tqNukYSpmPs6k7Nn+u8sfmbPLH5s9V/ricTZ6I6NoP0Ac8B5wLnATsABZ2eAy7gTkVj/0lsCq9vQr4Unp7CbAZEPA+4LvOpjezKWs+zqb8+TgbZzOV8nE2tX+6vSVrEbArIp6PiGPAN0iufdhtZbguo7PJV9ZsoPv5OJvaypqPs8nnbGrrdj7OpoZuN1l51znspPHrMo4ouQYTVFyXEah3XcYiOJt8ZcgGypmPs6mtDPk4m3zOprYy5uNsamj0AtFFUZXHOn1OifdH5rqMkn5YY95OjtfZlGNZtZQxH2dTWxnycTb5nE1tZczH2dTQ7S1ZrVznsK0ic11G4LjrMgJo4tdlbJWzydf1bKC0+Tib2rqej7PJ52xqK2k+zqaGbjdZTwAXSHqXpJOAj5BcL6kjVO5rOTmbfF3NBkqdj7OpzZ+rfM4mnz9X+ZxNLUUdUd/oD8lR/j8i+euEP+vwss8l+UuIHcBT48sH3gE8DOxM/52dPi7g9nSsTwLDzqb3sil7Ps6mvPk4G2czFfNxNvk/vqyOmZmZWQG6vbvQzMzMbEpyk2VmZmZWADdZZmZmZgVwk2VmZmZWADdZZmZmZgVwk2VmZmZWADdZZmZmZgX4fw7sszhOTqWqAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 720x576 with 8 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAABWCAYAAAD8MYO7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAPhklEQVR4nO3dfYwc9X3H8ffHZ/wEuNhxDH4Ch4eATFQh7npETUXVmkbBsRuqKlVQEE4VcFLRljaKqN1GDUoT4VREbaOSBtIHE6uGOBUGV2ASoJiqtKSYCEKAEBvqYM5nDLVx7ZiEQL79Y36Hxued3b1l53Z29/OSVrc7M7vz24+/O/7ezNysIgIzMzMza68pnR6AmZmZWS9yk2VmZmZWAjdZZmZmZiVwk2VmZmZWAjdZZmZmZiVwk2VmZmZWAjdZfUbSUkkhaWp6vE3S6hZe53RJRyQNtH+UVjWuG2uVa8da0St101VNVgpq7PZzSa/lHn90kscyIxXA4pJef3fu/b0k6Z8kndTu9UTEpRFxa5PjuST3vBci4qSIeLPdY2o3143rplWuHddOK1w3rpsxXdVkpaBOioiTgBeAVblp/zyR1xrrjituVXqvFwK/BHwmP1OZrvo37ATXjeumVa4d104rXDeumzE99aYlvU/SdyQdkrRX0l/ldjWOdfO/J+k54Ptp+gcl7ZT0qqS/lvSIpCtyr/kJSc9KOiDpbkmL0qx/Tz+fTR38ZWW9r4gYAbYB75G0XdIXJD0MHAXOlPQLkv5B0qikEUmfV9o1KmlA0o2SXpH0PPDBcZltl3RV7vHVkp6RdFjS05IulLQROB341/Rer9Pxu3IXStqactol6erca14vabOkr6fXfUrSUG7+n6RxH05ZLy8ry1pcN66bVrl2XDutcN30Ud1ERFfegN3AJeOmDZN10QPAWcAu4JNp3gwggLuBU4CZwGnAEWAlcAJwHfAz4Ir0nI8AzwDvTvM/Dzw47vUWl/3+gCXAU8BfANvJfjM6H5iaxnUncDNwIjAf+G/gE+m5nwR+kF5jLvBgGvfUNH87cFW6/2FgJGUo4GzgjFp5A0vHvc5DwFdSLhcALwPL07zrgZ8AK9K/zQ3AI2neucAeYGHudc9y3bhuqlQ3rh3XjuvGddNK3ZS2QSr7VqtwayyzFrhtXKH9cm7+mrFCTI+nAPtzhfsg8NHc/BNSYZ86SYV7BHgV+FEqipmp0D6XW+5U4KfAzNy0y3MfsH8jfXjT4/fXKdxvAdc2k3e+cNOH4k3g5Nz8G4ANucK9PzdvGfBaun92yvwS4ATXjeuminXj2nHtuG5cN63UTTcc622apGXAl8iOC88kC/XhcYvtyd1fmH8cET+XNJKbfwbwVUk35aa9ASwGDrVx6EUui4j78xMkwbHv4QyyD9RomgfZB3BsmYXjlv9RnfUtAZ5rYZwLgQMRcXjceoZyj/fl7h8FZkiaGhG7JP0RWXGfL+lbwKciYm8L42iJ68Z10yrXjmunFa6b/qmbnjonC/ga8F2yXXizgc+R7UrMi9z9UbIiBEDZiXmLcvP3AB+LiFNyt5kR8di415ls+XXvIfvtYF5ujLMj4vw0f5SsIMecXud195Dtum60zvH2AnMlnTxuPSMFyx/7whGbIuJXyD6EAXyxmee1kevGddMq145rpxWumz6pm15rsk4GDkXEEUnnA1c3WH4rcJGkFemkuE8Bc3Lzvwp8RtK5AJLmSPptgIj4KdlvCGe2+01MRESMAt8GviRptqQpks6S9Ktpkc3AH0paLGkO2W7pIn8PfFrSoDJnSzojzXuJgvcaEXuA/wRuSCdt/iLwcaDhX9FIOlfSr0uaTnYs/DWy3bmTyXXjummVa8e10wrXTZ/UTa81WX8MXCXpCHAT8I16C6d/9MuBLwOvkP2m8CRZt01E3Ab8LXCHpP8DHgd+I/cSfw58U9lfe/xmm9/LRFwJTAOeBg4C/wIsSPO+Rnb8+gmy35zuKHqRiPgm8AVgE3CY7CTFuWn2DWQf4lclfbrG0y8nO/a9F9gCfDYi7mti7NOB9WT57yM7GfJPm3heO7luXDetcu24dlrhuumTulE6oct463ok+8iu+fFfnR6PdQfXjbXKtWOtcN10j17bkzVhki5Vdu2OGcBnyU50e6zDw7KKc91Yq1w71grXTXcqpcmS9AFlF+raJanecdUquBj4H7I/zVwO/FZEvF7mCrssn0nVRdm4biqky7Jx7VRIF2XjuulCbT9cqOzqrT8kOx78IvAocHlEPN3WFXUp51PM2RRzNsWcTX3Op5izKeZs2qOMPVnDwK6IeD512bcDHyphPd3K+RRzNsWcTTFnU5/zKeZsijmbNijjYqSLOPaCYi8CF41fSNIasqvYMnPmzMGlS5eWMJRK+jFZsY45Lp9+zWbRokWMjIz8JDfJtZOkbPKHBpxN0mo2y5Ytm6QRNu/o0aNlvOyEtjknnnji4HnnnVfGONqmXTk1s83px88UeHvcyDPPPPNqRMxptFwZTdb4C6pBjYuDRcQtwC0Ay5Yti40bN5YwlOoZGho6WGPyMfn0azb3338/a9euPTJusmuHt7IZ/z+Ls6H1bHbs2DEZw5uQxx5r/3nME93mDA0NVTKbvHbl1Mw2px8/U+DtcSNDQ0OjzSxXxuHCFzn2qq2Lya5HYZk3cD41zZ8/H7JrqIxxNknK5oTcJGeTOJuGvM0pcPHFF4O3OTV5e9weZTRZjwLnSHqXpGlk3w6+tYT1dKvXcD41pcM3M5zN8VI205zN8VrJZtasWZMxtKrwNqfAlClTwNucmrw9bo+2Hy6MiDck/T7ZlVsHgH+MiKfavZ4u53xqmDp1KsALOJvjpGz24WyO42ya4m1ODRdddBF4m1OTt8ftUcY5WUTEPcA9Zbx2L3A+dR2KiKHGi/WlIxFRvbO1q8HZ1NGv25zBwcG37o8/jys3z9ucYs7mber7K76bmVl3yTdPzah1onwZf2RgNp6bLDMz60mNGik3WlY2N1lmZtaz3EhZJ7nJMjOzntTosOJEDzuaTVQpJ76bmZmVaXBwsHAvVb55ciNlneQmy8zMespY81WvETObDD5caGZmPWdsD9bg4KD3ZlnHeE+WmZn1lFpNVb1rZpmVxXuyzMyspxRdF8vNlU02N1lmZtaVJnIYMH/YcPxPs7L4cKGZmfU8X/XdOsFNlpmZ9TzvtbJO8OFCMzMzsxK4yTIzs57jQ4FWBW6yzMzMzErgc7LMzKxr+arutTXKxeeoTQ7vyTIzs57U782XG6nOc5NlZmbWZ/q9AZ0sbrLMzMzMSuAmy8zMzKwEbrLMzKyr1Tv3yIfFijmb8rnJMjMz61E++b2z3GSZmZmZlcBNlpmZmVkJ3GSZmVlP87lHxZxNudxkmZmZmZXATZaZmXU9n+BtVeQmy8zMel4/HxZr1ID2czZlc5NlZmZmVgI3WWZmZmYlmNrMQpJ2A4eBN4E3ImJI0lzgG8BSYDfwOxFxUJKAvwFWAEeBj0XEd9s/9GpYtWoVs2bNYmBggIGBATZu3MihQ4dYt24do6OjLFiwgPXr1zN79mwiAuBUSbtwNsdlc+ONNwK8R9L36PFswLVTj7Mp5myKNZPNvffey5w5c4gIrr32WvA2h3Xr1nHgwAGWLl3K5s2b38oHWNIvtVOWiezJ+rWIuCAihtLjtcADEXEO8EB6DHApcE66rQH+rl2Draqbb76ZTZs2sXHjRgA2bNjA8PAwW7ZsYXh4mA0bNgDw8MMPA0zH2dTMZs+ePQDfp0+yAddOPc6mmLMp1iib9evXA7Bt2zZ27twJ3uYwPDzM7bffzvLly4/JB5hBH9VOGd7O4cIPAbem+7cCl+Wmfz0yjwCnSFrwNtbTdR566CFWrlwJwMqVK9m+fftb04FXnU3tbFasWAFAv2YDrp16qpJNFU8Srko2VTQ+mzvvvBOAu+66iyuvvBLwNmcsn9WrVx+TD/C//Vw77dBskxXAtyU9JmlNmnZqRIwCpJ/z0/RFwJ7cc19M044haY2kHZJ2HDx4sLXRV4AkrrnmGq644gruuOMOAA4cOMC8efMAmDdvHmPv7+WXXwb4We7pziaXzWmnnZZ/es1s0uv2ZT64dtqWTXpOSzp9qYAqZ9Np06dPb5jN6OgoACMjIyxZsiT/9J7+TEHj2lmwYAH79+8HsnyA13NP7/l8yqB03LX+QtLCiNgraT5wH/AHwNaIOCW3zMGImCPpbuCGiPiPNP0B4LqIKPz1T9Jh4Nm3+V7KMg94pc78E8g2YlOBdwMvAGcDj+eWuSA9PhuIiJgDzibJZ7MPeEdEvLOZbKDS+TTKBvq3dpxNfd7mFOvoNkfSy8CPG4yhU/y5KtZMNhN1RkS8s9FCTZ34HhF708/9krYAw8BLkhZExGjahbg/Lf4ikP/1YDGwt8Eqns2d61UpknY0OzZJ1wNHgKuBVblstqc/FrgZ2J57irMZl01E3Jae0kw2UNF8JpJNWv56+qR2nE193uYU6/Q2JzVjE6rfyeLPVbFO/ps1PFwo6URJJ4/dB95PdqLgVmB1Wmw1cFe6vxW4Upn3AofGDiv2GmdTzNnU53yKOZtizqaYs6nP+XRGM3uyTgW2SBpbflNE3CvpUWCzpI+T7XL8cFr+HrLLN4z92efvtn3U1eFsijmb+pxPMWdTzNkUczb1OZ8OaOqcrNIHIa2JiFs6PY5aOj22Tq+/niqMrQpjqKUK46rCGGqpwriqMIYinR5bp9dfTxXGVoUx1FKFcVVhDLV0clyVaLLMzMzMeo2/VsfMzMysBG6yzMzMzErQ8SZL0gckPStpl6S1jZ/R9vXvlvSkpMcl7UjT5kq6T9LO9HPsOiGS9OU01u9JurDksTmb4rF1NJs0hkrm42wajs2fq+KxOZvisflzVTwuZ1MkIjp2AwaA54AzgWnAE8CySR7DbmDeuGl/CaxN99cCX0z3VwDbAAHvBb7jbPozm6rm42yqn4+zcTa9lI+zqX/r9J6sYWBXRDwfEa8Dt5N992GnVeF7GZ1NsapmA53Px9nUV9V8nE0xZ1Nfp/NxNnV0uslq6nsOS9b272VsE2dTrArZQDXzcTb1VSEfZ1PM2dRXxXycTR1Nfa1OiVRj2mRfU+J9kfteRkk/qLPsZI7X2VRjXfVUMR9nU18V8nE2xZxNfVXMx9nU0ek9Wa18z2FbRe57GYFjvpcRQG//exlb5WyKdTwbqGw+zqa+jufjbIo5m/oqmo+zqaPTTdajwDmS3iVpGvARsu9LmhSq9nc5OZtiHc0GKp2Ps6nPn6tizqaYP1fFnE09ZZ1R3+yN7Cz/H5L9dcKfTfK6zyT7S4gngKfG1g+8A3gA2Jl+zk3TBdyUxvokMORs+i+bqufjbKqbj7NxNr2Yj7MpvvlrdczMzMxK0OnDhWZmZmY9yU2WmZmZWQncZJmZmZmVwE2WmZmZWQncZJmZmZmVwE2WmZmZWQncZJmZmZmV4P8BnCBFUvuIvaYAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 720x576 with 8 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAABWCAYAAAD8MYO7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAQr0lEQVR4nO3dfYwc9X3H8feHO8C+YBc7DuAHzIWHIJmoQtzViRrUqjWNgmM31FUKNFGcKkBS0QcaRdS0UYPSRDgVUdsotDHpg4lVkzhVXKiAJkBtV6Ul5S4yIUCIDXUw9vFUG9fOkfCQb/+YWXdu2dnb29vZnd39vKTV7e7Mzvz2c9+Z+91vZmcVEZiZmZlZa53Q6QaYmZmZ9SJ3sszMzMwK4E6WmZmZWQHcyTIzMzMrgDtZZmZmZgVwJ8vMzMysAO5k9RlJw5JC0mD6+B5J65tYznJJxyQNtL6VVjauG2uWa8ea0St101WdrDSoyu2nkl7OPP5Am9syJy2AZQUtf1/m/T0n6e8lndLq9UTEpRFxW4PtuSTzuqcj4pSIeL3VbWo1143rplmuHddOM1w3rpuKrupkpUGdEhGnAE8DazPP/cNMllXpHZfc2vS9XgT8HPDJ7EQluup32AmuG9dNs1w7rp1muG5cNxU99aYlvUvStyUdkXRQ0p9nhhorvfnflvQk8L30+fdK2iPpJUl/IelBSR/MLPOjkp6QdEjSXZKWppP+Lf35RNqDv6yo9xURB4B7gLdL2inps5IeACaBsyX9jKS/lTQh6YCkzygdGpU0IOlmSS9Kegp4b1VmOyVdlXl8taTHJR2V9JikiyRtAZYD/5y+1+v1xqHcJZLuTHPaK+nqzDJvlLRN0lfS5T4qaTQz/Q/Tdh9Ns15VVJa1uG5cN81y7bh2muG66aO6iYiuvAH7gEuqnltJ0oseAM4B9gIfS6fNAQK4CzgVmAucARwD1gAnAtcDrwIfTF9zBfA48LZ0+meAHVXLW1b0+wPOBB4F/hTYSfKf0QXAYNqufwI2AW8CTgP+C/ho+tqPAd9Pl7EQ2JG2ezCdvhO4Kr3/fuBAmqGAc4GzauUNDFctZxfwV2kuFwIvAKvSaTcCPwZWp7+bm4AH02nnA/uBJZnlnuO6cd2UqW5cO64d143rppm6KWyHVPStVuHWmGcDcHtVof18Zvo1lUJMH58APJ8p3B3ABzLTT0wL+/Q2Fe4x4CXgh2lRzE0L7dOZ+U4HfgLMzTx3ZWYD+1fSjTd9/O46hftN4PcbyTtbuOlG8TowLzP9JmBzpnDvy0xbAbyc3j83zfwS4ETXjeumjHXj2nHtuG5cN83UTTcc622YpBXA50mOC88lCfWBqtn2Z+4vyT6OiJ9KOpCZfhbwJUm3ZJ57DVgGHGlh0/NcFhH3ZZ+QBFPfw1kkG9REOg2SDbAyz5Kq+X9YZ31nAk820c4lwKGIOFq1ntHM42cz9yeBOZIGI2KvpOtIivsCSd8EPh4RB5toR1NcN66bZrl2XDvNcN30T9301DlZwJeB75AM4c0HPk0ylJgVmfsTJEUIgJIT85Zmpu8HPhwRp2ZucyNivGo57ZZd936S/w4WZdo4PyIuSKdPkBRkxfI6y91PMnQ93TqrHQQWSppXtZ4DOfNPXXDE1oi4mGQjDOBzjbyuhVw3rptmuXZcO81w3fRJ3fRaJ2secCQijkm6ALh6mvnvBN4haXV6UtzHgQWZ6V8CPinpfABJCyT9OkBE/ITkP4SzW/0mZiIiJoBvAZ+XNF/SCZLOkfSL6SzbgN+TtEzSApJh6Tx/A3xC0ogS50o6K532HDnvNSL2A/8B3JSetPmzwEeAaT9FI+l8Sb8s6WSSY+EvkwzntpPrxnXTLNeOa6cZrps+qZte62T9AXCVpGPALcDX6s2c/tKvBL4AvEjyn8IjJL1tIuJ24IvANyT9L7Ab+JXMIv4E+LqST3v8aovfy0x8CDgJeAw4DPwjsDid9mWS49cPk/zn9I28hUTE14HPAluBoyQnKS5MJ99EshG/JOkTNV5+Jcmx74PAduBTEXFvA20/GdhIkv+zJCdD/lEDr2sl143rplmuHddOM1w3fVI3Sk/oMo5fj+RZkmt+/Gen22PdwXVjzXLtWDNcN92j10ayZkzSpUqu3TEH+BTJiW7jHW6WlZzrxprl2rFmuG66UyGdLEnvUXKhrr2S6h1XLYNfAP6b5KOZq4Bfi4hXilxhl+XTVl2UjeumRLosG9dOiXRRNq6bLtTyw4VKrt76A5Ljwc8ADwFXRsRjLV1Rl3I++ZxNPmeTz9nU53zyOZt8zqY1ihjJWgnsjYin0l72V4H3FbCebuV88jmbfM4mn7Opz/nkczb5nE0LFHEx0qVMvaDYM8A7qmeSdA3JVWyZO3fuyPDwcAFNKaUfkRRrxRvy6ddsli5dyoEDB36cearh2hkaGmpTKxOTk5NtXV+aTfbQQEPZrFixoqHlt/v9tFKz2fTLdoX3Obka2ec4m+O8XWU8/vjjL0XEgunmK6KTVX1BNahxcbCIuBW4FWDFihWxZcuWAppSPqOjo4drPD0ln37N5r777mPDhg3Hqp5uqHZGRkba0cTjxsfbe75pmk11T2jabMbGxhpeR7vfU6s0m02/bFfe5+RrZJ/jbKYo3XZVve9v135sdHR0opH5ijhc+AxTr9q6jOR6FJZ4DedT02mnnQbJNVQqnE0qzebEzFPOJuVspuV9Tg7vc/J1Qza1/rlu9z/c0yliJOsh4DxJbyW5VP0VwG8WsJ5u9TLOp6b00NacmWZTto2qCGk2JxVZN5Ucu21Eqx3ZdDnvc3I0u8/pB2XMptF9fa35OrVfa3knKyJek/Q7JFduHQD+LiIebfV6upzzqWFwcBDgaZzNG6TZPMsMsmn2PLXsDqobOlzNZNOHvM+pwfucfGXKppv/kS5iJIuIuBu4u4hl9wLnU9eRiBidfra+dCwiGjuTvf84mzq8z6nL+5x8Hc+mmztY4Cu+m5mZmRXCnSwze4NuOExoZr2tlaNYIyMjHRkVK+RwoZl1t24fojczq6Xd55x6JMvMcnXqvz8z62+9st/xSJaZ5fJhQzNrp17pXFV4JMvMzMw6rt0drHaM1Hsky8zewCNYZla0Xhu1qsWdLLM+Uek45e3Y3LEys3YpUwdrZGSksP2fO1lmfWZ8fLxUOzizflD0NtdN/yT10/7HnSyzHjU5OQnU3vlmO1rdtHM2K4uydRRa3Z5W7RfKllO7uZNlXc+dhHz1snFuZlaEyuG3fu9ggTtZMzLbgvEfNTMz61VDQ0PH/052YweriHOzfAkHMzMz62tFdQrdyTIzMzOj9dfOcifLzMwa1o2Hgcw6xedkmVlf6nRnoeznaNbLZ7prrfmTq2YJj2SZmdkUzXZAqw+1dLoja9ZpHskyM7PjhoaGWrq86o6WR7es7FpZox7JMjOztvHolpWdT3w3M7Ou1epPcFk5TE5OeqSyijtZZmbWEZXOljtc5eKOUutGs9zJMjOzjmvm04xmRZtt7bmTZWZmpVD9B82dq+40Pj7eM6Nhs30f/nShmZmVhjtaViaz/T5Dj2SZmVlX8bW4ukO3j2a1osPvTpaZmXWFWleSz953h6t8ur2jNVvuZJmZWek10oHq9z/oZdULv5dmO/A+J6tNeqHIzMw6LfvHbnx8fMo5M7M9f8aK+1tVvdxuGnWcTVs9kmVmZl2j8se6VmfAHSwrG49kmZlZKVVGqrLK8l2I7tDNTnb0saxq1d9MNdTJkrQPOAq8DrwWEaOSFgJfA4aBfcBvRMRhSQL+ElgNTAIfjojvzKqVJTY8PMy8efMYGBhgcHCQsbExDh06xOWXX86+ffsYHh5m27ZtAEQEwOmS9tIH2axdu5ahoSEGBgYYGBhgy5YtHDlyhBtuuIGJiQkWL17Mxo0bmT9/PhHBzTffDPB2Sd+lx7OBmeeDa8fZ4Gwqqg8bgvc502mids4sunbyOqud7nxVf7ii2UPRMzlc+EsRcWFEjKaPNwD3R8R5wP3pY4BLgfPS2zXAX8+4VV1mx44d7N69m7GxMQA2btzIqlWr2LNnD6tWreK6664D4IEHHgA4mT7KZtOmTWzdupUtW7YAsHnzZlauXMn27dtZuXIlmzdvBpJs9u/fD/A9+iQbmFk+uHacTcrZ/L/qP3ze59Q3w9qZQ4dqp+iRwuoLplYe13p+Nu2ZzTlZ7wNuS+/fBlyWef4rkXgQOFXS4lmsp+vccccdrF+/HoD169ezc+dOAHbt2gXwUj9ns2vXLtasWQPAmjVrpmSzevVqAPo1G6ifD66drsumesddayfeCt2YTbt4n1PfNLXzP52snaK2l1qdq6I0ek5WAN+SFMCmiLgVOD0iJgAiYkLSaem8S4H9mdc+kz43kV2gpGtIesecccYZzb+DNqr1i3j11Ve5+OKLkcS6detYt24dBw8ePH4DOHz4MAAvvPACwKuZl/dMNrVI4tprr52SzaFDh1i0aBEAixYtmpJN1XutmU263L7MB9dOS7Pp1nNqis5m+fLlxb+JHLM9mb2IfU6vbFPQVO28knl5R/Jp9WHDdm/3So+71p9JWhIRB9OO1L3A7wJ3RsSpmXkOR8QCSXcBN0XEv6fP3w9cHxG570zSUeCJWb6XoiwCXqwz/USSndgg8DbgaeBcYHdmngvTx+cCERELwNmkstk8C7w5It7SSDZQ6nymywb6t3acTX3e5+Tr6D5H0gvAj6ZpQ6d4u8rXSDYzdVZEvGW6mRoayYqIg+nP5yVtB1YCz0lanI5iLQaeT2d/Bjgz8/JlwMFpVvFE5lyvUpE01mjbJN0IHAOuBtZmstmZflhgE7Az8xJnU5VNRNyevqSRbKCk+cwkm3T+G+mT2nE29Xmfk6/T+5y0Mzaj+m0Xb1f5Ovk7m/acLElvkjSvch94N8mJgncC69PZ1gN3pPfvBD6kxDuBI5XDir3G2eRzNvU5n3zOJp+zyeds6nM+ndHISNbpwHZJlfm3RsS/SHoI2CbpIyRDju9P57+b5PINlY99/lbLW10eziafs6nP+eRzNvmcTT5nU5/z6YCGzskqvBHSNenJ9KXT6bZ1ev31lKFtZWhDLWVoVxnaUEsZ2lWGNuTpdNs6vf56ytC2MrShljK0qwxtqKWT7SpFJ8vMzMys1/i7C83MzMwK4E6WmZmZWQE63smS9B5JT0jaK2nD9K9o+fr3SXpE0m5JY+lzCyXdK2lP+rNynRBJ+kLa1u9Kuqjgtjmb/LZ1NJu0DaXMx9lM2zZvV/ltczb5bfN2ld8uZ5MnIjp2AwaAJ4GzgZOAh4EVbW7DPmBR1XN/BmxI728APpfeXw3cAwh4J/BtZ9Of2ZQ1H2dT/nycjbPppXycTf1bp0eyVgJ7I+KpiHgF+CrJdx92Whm+l9HZ5CtrNtD5fJxNfWXNx9nkczb1dTofZ1NHpztZed9z2E6V72UcV/IdTFD1vYzAdN/LWARnk68M2UA583E29ZUhH2eTz9nUV8Z8nE0djX5BdFFU47l2X1PiXZH5XkZJ368zbzvb62zKsa56ypiPs6mvDPk4m3zOpr4y5uNs6uj0SFYz33PYUpH5XkZgyvcyAmj238vYLGeTr+PZQGnzcTb1dTwfZ5PP2dRX0nycTR2d7mQ9BJwn6a2STgKuIPm+pLZQub/Lydnk62g2UOp8nE193q7yOZt83q7yOZt6ijqjvtEbyVn+PyD5dMIft3ndZ5N8EuJh4NHK+oE3A/cDe9KfC9PnBdyStvURYNTZ9F82Zc/H2ZQ3H2fjbHoxH2eTf/PX6piZmZkVoNOHC83MzMx6kjtZZmZmZgVwJ8vMzMysAO5kmZmZmRXAnSwzMzOzAriTZWZmZlYAd7LMzMzMCvB/GLPxNqq1rzIAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 720x576 with 8 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAABWCAYAAAD8MYO7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAP7UlEQVR4nO3de6wc5X3G8e+DzcWHS7HjGHwhOFwCMlGFOKcmaqpWLTQKjt1QVamgQSFVwElFr1FETRs1KE0EqYjaRiUNTi8mVk1CKlyouCRAMVVpSbEjCMGEYKiDsc2tGNfOISGQX/+Yd9F4fWZ2z3rn7Ozs85GOzu7M7Oy7z/nN7LvvzJlVRGBmZmZm/XXYoBtgZmZm1kTuZJmZmZlVwJ0sMzMzswq4k2VmZmZWAXeyzMzMzCrgTpaZmZlZBdzJGjGSlkoKSbPT/TslXdrDet4mab+kWf1vpdWN68Z65dqxXjSlboaqk5WCav38VNKrufsfnOG2HJUKYElF69+ee33PS/pHScf0+3ki4oKIuLHL9pyfe9wzEXFMRLzR7zb1m+vGddMr145rpxeuG9dNy1B1slJQx0TEMcAzwKrctH+azrpaveOaW5Ve6znAzwGfzM9UZqj+hoPgunHd9Mq149rphevGddPSqBct6d2SviVpr6Rdkv4yN9TY6s3/jqSngO+m6e+T9KSkVyT9laQHJV2SW+dHJT0h6WVJt0tanGb9e/r9ROrBX1jV64qIncCdwDslbZL0WUkPAJPAKZJ+RtLfS9otaaekzygNjUqaJek6SS9Jehp4X1tmmyRdlrt/uaTHJe2TtFXSOZLWA28D/jW91it18FDuIkm3pZy2Sbo8t86rJd0s6StpvY9JmsjN/+PU7n0p6/OqynIqrhvXTa9cO66dXrhuRqhuImIof4DtwPlt05aT9aJnAacC24CPpXlHAQHcDhwPzAFOBPYDK4HDgSuBnwCXpMdcBDwOvCPN/wxwX9v6llT9+oCTgMeAPwc2kX0yOguYndr1L8ANwNHAAuC/gY+mx34M+F5axzzgvtTu2Wn+JuCydPsDwM6UoYDTgJOnyhtY2rae+4EvplzOBl4EzkvzrgZ+BKxIf5trgAfTvDOAHcCi3HpPdd24bupUN64d147rxnXTS91UtkOq+meqwp1imTXATW2F9vO5+atbhZjuHwa8kCvc+4AP5uYfngr7hBkq3P3AK8APUlHMSYX26dxyJwA/Bubkpl2c28D+jbTxpvvvKSncbwB/0E3e+cJNG8UbwLG5+dcA63KFe09u3jLg1XT7tJT5+cDhrhvXTR3rxrXj2nHduG56qZthONbbNUnLgM+THReeQxbqA22L7cjdXpS/HxE/lbQzN/9k4EuSrs9Nex1YAuztY9OLXBgR9+QnSIIDX8PJZBvU7jQPsg2wtcyituV/UPJ8JwFP9dDORcDLEbGv7Xkmcvefy92eBI6SNDsitkn6Q7LiPkvSN4CPR8SuHtrRE9eN66ZXrh3XTi9cN6NTN406Jwv4MvBtsiG844BPkw0l5kXu9m6yIgRA2Yl5i3PzdwAfjojjcz9zImJL23pmWv65d5B9Opifa+NxEXFWmr+brCBb3lay3h1kQ9ednrPdLmCepGPbnmdnwfIHrjhiQ0T8AtlGGMDnunlcH7luXDe9cu24dnrhuhmRumlaJ+tYYG9E7Jd0FnB5h+VvA86VtCKdFPdxYG5u/peAT0o6A0DSXEm/ARARPyb7hHBKv1/EdETEbuCbwOclHSfpMEmnSvqltMjNwO9LWiJpLtmwdJG/Az4haVyZ0ySdnOY9T8FrjYgdwH8C16STNn8W+AjQ8b9oJJ0h6VckHUl2LPxVsuHcmeS6cd30yrXj2umF62ZE6qZpnaw/Ai6TtB+4Hvha2cLpj34x8AXgJbJPCo+S9baJiJuAvwFukfR/wMPAr+ZW8WfA15X9t8ev9fm1TMeHgCOArcAe4J+BhWnel8mOXz9C9snplqKVRMTXgc8CG4B9ZCcpzkuzryHbiF+R9IkpHn4x2bHvXcBG4FMRcXcXbT8SuJYs/+fITob8ky4e10+uG9dNr1w7rp1euG5GpG6UTugy3rweyXNk1/z4r0G3x4aD68Z65dqxXrhuhkfTRrKmTdIFyq7dcRTwKbIT3bYMuFlWc64b65Vrx3rhuhlOlXSyJL1X2YW6tkkqO65aB78I/A/Zv2aeB/x6RLxW5RMOWT4zaoiycd3UyJBl49qpkSHKxnUzhPp+uFDZ1Vu/T3Y8+FngIeDiiNja1ycaUs6nmLMp5myKOZtyzqeYsynmbPqjipGs5cC2iHg69bK/Cry/gucZVs6nmLMp5myKOZtyzqeYsynmbPqgiouRLubAC4o9C5zbvpCk1WRXsWXOnDnjS5curaAptfRDsmJtOSifqrIZGxs74P7k5GTp/OkuV/TYsbGxg9YxlcWLF7Nz584f5Sa5dpKUTf7QgLNJnE1HA9vn1F03+xxn86aO29XRRx89fuaZZ1bSnm7eQ2bS448//kpEzO20XBWdrPYLqsEUFweLiLXAWoBly5bF+vXrK2hK/UxMTOyZYvIB+Uw3m/Hx8b61rypbtnQ+P/Oee+5hzZo1+9smu3Z4M5v2vYyzwdl0UsU+pym62ec4mwOUblcTExOxefPmvrelm/ePmTYxMbG7m+Wq6GQ9y4FXbV1Cdj0Ky7yO85nSggULILuGSouzSVI2h+cmOZvE2XTkfU4B73OKzXQ2W7ZseXPAoI6dql5V0cl6CDhd0tvJLlV/EfBbh7rSpozWkF0ltu/5NMGyZcsg+54oZ9MmZXOEszmYs+nI+5wC3ucU6yWbycnJwvfBbjpQTepctfS9kxURr0v6XbIrt84C/iEiHuv389TNNIujb/k0qPPJ7NmzAZ5hxGqnGymb53A2B3E2XRm5fXI3vM8p1ks2Y2NjjI+PH7DPb3+Par/fWrb9cU1RxUgWEXEHcEcV666r6RTIKOYzDXsjYqLzYiNpf0QsG3QjasrZlPA+p5T3OcV6ymY6H/7zy+ZvN6XDVUknaxTNREEMw6iVmZlZ2WjWVMs0lTtZQ8IdLDMzGxbdvGe1jgA1cQSrZeS/u9DMzMwGI3+qTdM6WOBOVl/MRGE0sfjMzMyafKTGhwv7YKb+K6KbY9zDqqn/WWJmZt2Z6n1t2N8X3Mnqg5kqgqZ1rFqa+rrMzKx3w97BAh8uHCrDWHDdtnkYX5uZmVkZd7KGjDsjZmbWdE15r/PhwkM0iEJo2rlZTdmYzMzM8tzJGnL5rySoG3eerM6mu824ns1mTlO+LNqHC4fc+Ph4LTtY3RrmtpuZmZXxSNaQakrnZNg/pZiZmRXxSNYQakoHy8zMrEgTPoR7JGsIuFNlZmajoAkdqzyPZNXc2NjYoJtgZmZmPfBI1iFoWo97pvmrdMzMRtOo7Ps9kmWVGJUNyMzMpm9UToNxJ8sGxh0xMzNrMneybCBG5VOMmZmNLp+T1SOPwhwa52dmNtraP2w38X3BI1lmZmY2ME3sXLV4JMvMzMxmXL5z1dSOlkeyrO+aurGYmVl/jMr7hEeyzMzM7JBNTk4Co9OB6oY7WWY2kvxGYNZ/3q4O5MOFPXARmZmZWSe1G8ny9ZPMzMysCTySZWZmZlaB2o1k2XArO5SaH6X0IVczM2s6j2TZjHLnyszMRkVXI1mStgP7gDeA1yNiQtI84GvAUmA78JsRsUeSgL8GVgCTwIcj4tv9b3o9rFq1irGxMWbNmsWsWbNYv349e/fu5aqrrmL37t0sXLiQa6+9luOOO46IADhB0jZGMJutW7eyd+9ezj333Cmzue666wDeKek7NDwbcO2UcTbFnE2x6WbjfU7H2jlpVGqnKtMZyfrliDg7IibS/TXAvRFxOnBvug9wAXB6+lkN/G2/GlsHU43E3HDDDWzYsIH169cDsG7dOpYvX87GjRtZvnw569atA+CBBx4AOJKGZjOV9mzuuuuuwmx27NgB8F1GJBtw7ZRxNsWcTbHpZON9TsfaOYoRqp0qHMrhwvcDN6bbNwIX5qZ/JTIPAsdLWngIzzN07r//flauXAnAypUr2bRp05vTgVdGMZvW+Vi33nprYTYrVqwAYNSyyXPtFHM2xZxNsbJsvM/pWDv/O8q10w/ddrIC+KakLZJWp2knRMRugPR7QZq+GNiRe+yzadoBJK2WtFnS5j179vTW+hqQxBVXXMEll1zCLbfcAsDLL7/M/PnzAZg/fz6t1/fiiy8C/CT38I7ZpMcMhfZRvnw2a9euBWDXrl2F2Zx44on5h0+ZTVqva8fblbNxNgeZbjbd7HOakg30VDuv5R7e+HyqoHTctXwhaVFE7JK0ALgb+D3gtog4PrfMnoiYK+l24JqI+I80/V7gyogoPONZ0j7giUN8LVWZD7xUMv9wsp3YbOAdwDPAacDDuWXOTvdPAyIi5oKzSfLZPAe8JSLe2k02UOt8OmUDo1s7zqac9znFBrrPkfQi8MMObRgUb1fFuslmuk6OiLd2WqirE98jYlf6/YKkjcBy4HlJCyNidxpCfCEt/ixwUu7hS4BdHZ7iidy5XrUiaXO3bZN0NbAfuBxYlctmU/pngRuATbmHOJu2bCLipvSQbrKBmuYznWzS8lczIrXjbMp5n1Ns0Puc1BmbVv3OFG9XxQb5N+t4uFDS0ZKObd0G3kN2ouBtwKVpsUuBW9Pt24APKfMuYG/rsGLTOJtizqac8ynmbIo5m2LOppzzGYxuRrJOADZKai2/ISLukvQQcLOkj5ANOX4gLX8H2eUbWv/2+dt9b3V9OJtizqac8ynmbIo5m2LOppzzGYCuzsmqvBHS6ohYO+h2TGXQbRv085epQ9vq0Iap1KFddWjDVOrQrjq0ocig2zbo5y9Th7bVoQ1TqUO76tCGqQyyXbXoZJmZmZk1jb9Wx8zMzKwC7mSZmZmZVWDgnSxJ75X0hKRtktZ0fkTfn3+7pEclPSxpc5o2T9Ldkp5Mv1vXCZGkL6S2fkfSORW3zdkUt22g2aQ21DIfZ9Oxbd6uitvmbIrb5u2quF3OpkhEDOwHmAU8BZwCHAE8Aiyb4TZsB+a3TfsLYE26vQb4XLq9ArgTEPAu4FvOZjSzqWs+zqb++TgbZ9OkfJxN+c+gR7KWA9si4umIeA34Ktl3Hw5aHb6X0dkUq2s2MPh8nE25uubjbIo5m3KDzsfZlBh0J6ur7zmsWN+/l7FPnE2xOmQD9czH2ZSrQz7OppizKVfHfJxNia6+VqdCmmLaTF9T4t2R+15GSd8rWXYm2+ts6vFcZeqYj7MpV4d8nE0xZ1Oujvk4mxKDHsnq5XsO+ypy38sIHPC9jAA69O9l7JWzKTbwbKC2+TibcgPPx9kUczblapqPsykx6E7WQ8Dpkt4u6QjgIrLvS5oRqvd3OTmbYgPNBmqdj7Mp5+2qmLMp5u2qmLMpU9UZ9d3+kJ3l/32y/0740xl+7lPI/hPiEeCx1vMDbwHuBZ5Mv+el6QKuT219FJhwNqOXTd3zcTb1zcfZOJsm5uNsin/8tTpmZmZmFRj04UIzMzOzRnIny8zMzKwC7mSZmZmZVcCdLDMzM7MKuJNlZmZmVgF3sszMzMwq4E6WmZmZWQX+Hwlfu68YUz3bAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 720x576 with 8 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlkAAABWCAYAAAD8MYO7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAQDElEQVR4nO3de6wc5X3G8e/DOVxMbBc7jsEXsMMlSCaqEJw6UVO1aqFRcOyGqkoVFBRSBZxU9BpF1LRRg9JEkIqobVTaQHoxsWoSUuHWFdAEqO2qtKT4RHYIEIKhDsY+3IpxfQKJA/n1j3kPGi87sxfvnJ3dfT7SyrszszPvPuc343dnZmcUEZiZmZlZbx3X7waYmZmZDSN3sszMzMwq4E6WmZmZWQXcyTIzMzOrgDtZZmZmZhVwJ8vMzMysAu5kjRhJKyWFpPH0+m5JV3QxnzMkTUsa630rrW5cN9Yt1451Y1jqZqA6WSmomcdPJL2Se/3BWW7LSakAllc0/725z/espL+XNLfXy4mISyLi1jbbc3HufU9FxNyIeK3Xbeo1143rpluuHddON1w3rpsZA9XJSkHNjYi5wFPAutywf+hkXjO945pblz7rBcDPAJ/Mj1RmoP6G/eC6cd10y7Xj2umG68Z1M2OoPrSkd0n6pqRDkg5I+rPcrsaZ3vxvSnoC+E4a/l5Jj0t6SdKfS3pA0uW5eX5U0mOSXpR0p6RladS/p38fSz34S6v6XBGxH7gbeLuk7ZI+K+l+4GXgTEk/JelvJU1J2i/pM0q7RiWNSbpR0guSngTe25DZdklX5l5fJelRSYclPSLpAkmbgDOAf0mf9Rq9cVfuUklbU057JF2Vm+d1km6X9OU034clTeTG/0Fq9+GU9UVVZdmM68Z10y3XjmunG66bEaqbiBjIB7AXuLhh2GqyXvQYcBawB/hYGncSEMCdwCnAHOA0YBpYCxwPXAP8GLg8vecDwKPA29L4zwDbGua3vOrPB5wOPAz8CbCd7JvRecB4atc/ATcDbwIWA/8NfDS992PAd9M8FgLbUrvH0/jtwJXp+fuB/SlDAWcDK5rlDaxsmM8O4K9SLucDzwMXpXHXAT8E1qS/zfXAA2ncucA+YGluvme5blw3daob145rx3XjuummbirbIFX9aFa4TabZANzWUGg/mxu/fqYQ0+vjgOdyhbsN+GBu/PGpsE+dpcKdBl4Cvp+KYk4qtE/npjsV+BEwJzfsstwK9m+klTe9fndJ4X4d+N128s4XblopXgPm5cZfD2zMFe69uXGrgFfS87NT5hcDx7tuXDd1rBvXjmvHdeO66aZuBuFYb9skrQI+T3ZceA5ZqPc3TLYv93xp/nVE/ETS/tz4FcAXJd2UG/YqsBw41MOmF7k0Iu7ND5AER3+GFWQr1FQaB9kKODPN0obpv1+yvNOBJ7po51LgxYg43LCcidzrZ3LPXwZOkjQeEXsk/R5ZcZ8n6evAxyPiQBft6IrrxnXTLdeOa6cbrpvRqZuhOicL+BLwLbJdePOBT5PtSsyL3PMpsiIEQNmJecty4/cBH46IU3KPOREx2TCf2ZZf9j6ybweLcm2cHxHnpfFTZAU544yS+e4j23XdapmNDgALJc1rWM7+gumPnnHE5oj4ObKVMIDPtfO+HnLduG665dpx7XTDdTMidTNsnax5wKGImJZ0HnBVi+m3Au+QtCadFPdxYEFu/BeBT0o6F0DSAkm/BhARPyL7hnBmrz9EJyJiCvgG8HlJ8yUdJ+ksSb+QJrkd+B1JyyUtINstXeRvgE9IulCZsyWtSOOepeCzRsQ+4D+B69NJmz8NfARo+SsaSedK+iVJJ5IdC3+FbHfubHLduG665dpx7XTDdTMidTNsnazfB66UNA3cBHy1bOL0R78M+ALwAtk3hYfIettExG3AXwJ3SPo/YBfwy7lZ/DHwNWW/9viVHn+WTnwIOAF4BDgI/COwJI37Etnx691k35zuKJpJRHwN+CywGThMdpLiwjT6erKV+CVJn2jy9svIjn0fALYAn4qIe9po+4nADWT5P0N2MuQftvG+XnLduG665dpx7XTDdTMidaN0Qpfx+vVIniG75sd/9bs9NhhcN9Yt1451w3UzOIZtT1bHJF2i7NodJwGfIjvRbbLPzbKac91Yt1w71g3XzWCqpJMl6T3KLtS1R1LZcdU6+Hngf8h+mnkR8KsRcaTKBQ5YPrNqgLJx3dTIgGXj2qmRAcrGdTOAen64UNnVW79Hdjz4aeBB4LKIeKSnCxpQzqeYsynmbIo5m3LOp5izKeZseqOKPVmrgT0R8WTqZX8FeF8FyxlUzqeYsynmbIo5m3LOp5izKeZseqCKi5Eu4+gLij0NvKNxIknrya5iy5w5cy5cuXJlBU2ppR+QFeuMN+QzqtksW7aM/fv3/zA3yLWTpGzyhwacTeJsWvI2p0A72xxn8zqvVzmPPvroSxGxoNV0VXSyGi+oBk0uDhYRtwC3AKxatSo2bdpUQVPqZ2Ji4mCTwUflM6rZ3HvvvWzYsGG6YbBrh9ezeblhsLPB2bTibU6xdrY5zuYoXq+SiYmJqXamq+Jw4dMcfdXW5WTXo7DMqzifphYvXgzZNVRmOJskZXN8bpCzSZxNS97mFPA2p5iz6Y0qOlkPAudIequkE8juDr61guUMqldwPk2tWrUKsvtEOZsGKZsTnM0bOZuWvM0p4G1OMWfTGz0/XBgRr0r6LbIrt44BfxcRD/d6OQPO+TQxPj4O8BTO5g1SNs/gbN7A2bTF25wmvM0p5mx6o4pzsoiIu4C7qpj3MHA+pQ5FxETryUbSdESs6ncjeuXCCy8EYHKyJ9dTHKpses3bnFK13+bMrCu90sE6V/ts6q6STpaZDbduNvr5DXv+/fnnPepwmZnVgjtZZtaxxg5TY+coP6zZ+Pw4M7Nh5U6WmZVq1klqNk2rYe10qBqnabbcdtpjZlYH7mSZWUvHusep2/e7Q2Vmg8ydLDOrtcZzttzpMrNB4U6WmZUqOmG9X3r8q0Qzs8pUcTFSMxtCdTp0V5d2mJmV8Z4sM2tbv/dkzSzfl30ws0HgTpaZlep3x6qVurfPzEaXDxeaWamZPUV1O+k83y4zszryniwza6mOFw/NHzqcO3dun1tjVp1R/iIx6KcGuJNlZm2rY2fLzIZPOxc4hvp3vNzJMrOOTU5OuqNl1qFO1pm6dx6q1ElOdfrVczPuZJmZmVWs0y8lZdPXuVNhR3Mny8y6MrM3q9nNoc3MOnUst9+CenY+/etCM+tas41aHTd0ZlZvvfhyVscveO5kmZmZ2VCoW0fLnSwz67m6XVPLzEZHnTpa7mSZWSXqtKEzs3ob1i9mPvHdzCpRdEJ82eUffGkIs9HU6/W+LifDj1wnazY24P3+o5rVUbvrRTudMF8U1Wy4DOs67cOFZtZ3ZR2wmcMIRfcqnJycZHp6utL2mR2rYT0c1mu9zqnfnbaR25NlZv3X6tIP7X6r9X9aZsOnikOH/dpWuJNlZh3p5Qar8TytZnupzGy0HOsFjuu03fDhQjNrWzcdrFa3Byk6DGhmBoO9bfCeLDMzM6u9fGer8ctbXTti7mSZWc807unKbwjbvYSDmVmjQT2VwIcLzaxtrTZsnWz4BmUjaWbWLe/JMrO2dXqBv/xeq1aXaTAzGzbuZJlZpcou1+DOlY0a1/xoaauTJWkvcBh4DXg1IiYkLQS+CqwE9gK/HhEHJQn4C2AN8DLw4Yj4Vu+bXg8rV65k3rx5jI2NMT4+zs6dOzl06BDXXnstU1NTLFmyhBtuuIH58+cTEQCnStrDCGSzbt06Tj75ZMbGxhgbG2PTpk2l2dx4440Ab5f0bYY8G+g8H2pSO7Pxn8SgZjMbnE0xb3PKdVE7p49K7VSlk3OyfjEizo+IifR6A3BfRJwD3JdeA1wCnJMe64G/7lVj62rbtm3s2rWLnTt3ArBx40ZWr17Nli1bWL16NRs3bgTg/vvvBziREcrm5ptvZvPmzWzatAkoz2bfvn0A32FEsoHO8qEGtZM//Ff1lZQHLZvZ5GyKeZtTrsPaOYkRqp0qHMuJ7+8Dbk3PbwUuzQ3/cmQeAE6RtOQYltNT+Vt09OJx5MgRdu/efdSwHTt2sHbtWgDWrl3L9u3bAdixYwfAS3XNZjaUZbNmzRoARjUbKM+HmtROvw71DUI2/eJsinmbU65F7fzvKNdOL7TbyQrgG5ImJa1Pw06NiCmA9O/iNHwZsC/33qfTsKNIWi9pp6SdBw8e7K71NSCJq6++mssvv5w77rgDgBdffJFFixYBsGjRImY+3/PPPw/w49zbnU0um9NOOy3/9qbZpPmOZD64dpwNzqZMFducYckGuqqdI7m3D30+VVA67lo+kbQ0Ig5IWgzcA/w2sDUiTslNczAiFki6E7g+Iv4jDb8PuCYiCr/2SjoMPHaMn6Uqi4AXSsYfT7YRGwfeBjwFnA3syk1zfnp9NhARsQCcTZLP5hngzRHxlnaygVrn0yobGN3acTblvM0p1tdtjqTngR+0aEO/eL0q1k42nVoREW9pNVFbJ75HxIH073OStgCrgWclLYmIqbQL8bk0+dPA6bm3LwcOtFjEY7lzvWpF0s522ybpOmAauApYl8tme/qxwM3A9txbnE1DNhFxW3pLO9lATfPpJJs0/XWMSO04m3Le5hTr9zYndcY6qt/Z4vWqWD//Zi0PF0p6k6R5M8+Bd5OdKLgVuCJNdgXwz+n5VuBDyrwTODRzWHHYOJtizqac8ynmbIo5m2LOppzz6Y929mSdCmyRNDP95oj4V0kPArdL+gjZLsf3p+nvIrt8w8zPPn+j562uD2dTzNmUcz7FnE0xZ1PM2ZRzPn3Q1jlZlTdCWh8Rt/S7Hc30u239Xn6ZOrStDm1opg7tqkMbmqlDu+rQhiL9blu/l1+mDm2rQxuaqUO76tCGZvrZrlp0sszMzMyGjW8QbWZmZlYBd7LMzMzMKtD3Tpak90h6TNIeSRtav6Pny98r6SFJuyTtTMMWSrpH0uPp35nrhEjSF1Jbvy3pgorb5myK29bXbFIbapmPs2nZNq9XxW1zNsVt83pV3C5nUyQi+vYAxoAngDOBE4DdwKpZbsNeYFHDsD8FNqTnG4DPpedrgLsBAe8EvulsRjObuubjbOqfj7NxNsOUj7Mpf/R7T9ZqYE9EPBkRR4CvkN37sN/qcF9GZ1OsrtlA//NxNuXqmo+zKeZsyvU7H2dTot+drLbuc1ixnt+XsUecTbE6ZAP1zMfZlKtDPs6mmLMpV8d8nE2Jtm6rUyE1GTbb15R4V+TuyyjpuyXTzmZ7nU09llWmjvk4m3J1yMfZFHM25eqYj7Mp0e89Wd3c57CnIndfRuCo+zIC6Njvy9gtZ1Os79lAbfNxNuX6no+zKeZsytU0H2dTot+drAeBcyS9VdIJwAfI7pc0K1Tvezk5m2J9zQZqnY+zKef1qpizKeb1qpizKVPVGfXtPsjO8v8e2a8T/miWl30m2S8hdgMPzywfeDNwH/B4+ndhGi7gptTWh4AJZzN62dQ9H2dT33ycjbMZxnycTfHDt9UxMzMzq0C/DxeamZmZDSV3sszMzMwq4E6WmZmZWQXcyTIzMzOrgDtZZmZmZhVwJ8vMzMysAu5kmZmZmVXg/wHSmUmPcBCzsgAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 720x576 with 8 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "## visualize predicted masks\n",
    "start = 3\n",
    "rows = 10\n",
    "\n",
    "#cmaps = {0: \"Reds\", 1: \"Blues\", 2: \"Greens\", 3: \"Oranges\"}\n",
    "titles = {0: \"Class=1\", 1: \"Class=2\", 2: \"Class=3\", 3: \"Class=4\"}\n",
    "\n",
    "cnt = 0\n",
    "for idx, (img, mask) in enumerate(val_dl):\n",
    "    if idx<start:\n",
    "        continue\n",
    "    mask = mask.numpy()\n",
    "    img = img.numpy()\n",
    "    for j in range(BATCH_SIZE):#BATCH_SIZE=8\n",
    "        cnt+=1\n",
    "        fig, axes = plt.subplots(ncols=8, figsize=(10, 8))\n",
    "        for ch in range(4):\n",
    "            if preds_valid_clf is not None:\n",
    "                _preds_valid_clf = preds_valid_clf[idx*BATCH_SIZE+j][ch]\n",
    "            else:\n",
    "                _preds_valid_clf = None\n",
    "            pred_mask = predict_mask(preds_valid[idx*BATCH_SIZE+j][ch], best_threshold_EMPTY[ch], \n",
    "                                     best_threshold_MASK[ch], best_threshold_CLF, _preds_valid_clf)\n",
    "            #axes[ch*2].imshow(img[j][0], cmap='gray')\n",
    "            axes[ch*2].imshow(mask[j][ch], cmap='gray', alpha=0.2)\n",
    "            axes[ch*2].set_title('Target')\n",
    "            \n",
    "            #axes[ch*2+1].imshow(img[j][0], cmap='gray')\n",
    "            axes[ch*2+1].imshow(pred_mask, cmap='gray', alpha=0.2)\n",
    "            axes[ch*2+1].set_title('Predictions')\n",
    "        if cnt>rows:\n",
    "            break\n",
    "    if cnt>rows:\n",
    "            break"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## predict the testset"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "import glob\n",
    "from dataset.dataset import prepare_testset"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(3698, '10055ff')"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "test_fnames = [f.split('/')[-1][:-4] for f in glob.glob('../data/raw/test/*')]\n",
    "len(test_fnames), test_fnames[0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "test_dl = prepare_testset(BATCH_SIZE, NUM_WORKERS, IMG_SIZE)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### here predict by single model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "use TTA\n",
      "CPU times: user 27min 11s, sys: 7min 48s, total: 35min\n",
      "Wall time: 3min 40s\n"
     ]
    }
   ],
   "source": [
    "%%time\n",
    "preds_test, preds_test_clf = predict_proba(net, test_dl, device, multi_gpu=multi_gpu, mode='test', tta=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "((3698, 4, 512, 768), (3698, 4))"
      ]
     },
     "execution_count": 24,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "preds_test.shape, preds_test_clf.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [],
   "source": [
    "# ## visualize predicted masks\n",
    "# start = 0\n",
    "# total = 19\n",
    "# ch=0\n",
    "\n",
    "# fig=plt.figure(figsize=(15, 20))\n",
    "# cnt = 0\n",
    "# for idx, img in enumerate(test_dl):\n",
    "#     if idx<start:\n",
    "#         continue\n",
    "#     for j in range(BATCH_SIZE):#BATCH_SIZE=8\n",
    "#         cnt+=1\n",
    "#         if preds_test_clf is not None:\n",
    "#             _preds_test_clf = preds_test_clf[idx*BATCH_SIZE+j][ch]\n",
    "#         else:\n",
    "#             _preds_test_clf = None\n",
    "#         pred_mask = predict_mask(preds_test[idx*BATCH_SIZE+j][ch], best_threshold_EMPTY[ch], \n",
    "#                                  best_threshold_MASK[ch], best_threshold_CLF, _preds_test_clf)\n",
    "#         #if pred_mask.float().mean()==0:\n",
    "#         #    continue\n",
    "#         ax = fig.add_subplot(5, 4, cnt)\n",
    "#         plt.imshow(img[j][0].numpy())\n",
    "#         plt.imshow(pred_mask, alpha=0.2, cmap=\"gray\")\n",
    "#         if pred_mask.sum()>0:\n",
    "#             plt.title('Predict Mask')\n",
    "#         else:\n",
    "#             plt.title('Predict Empty')\n",
    "#         if cnt>total:\n",
    "#             break\n",
    "#     if cnt>total:\n",
    "#             break"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## here ensemble sigmoids for different models"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "device = set_n_get_device(\"0,1,2,3\", data_device_id=\"cuda:0\")#0, 1, 2, 3, IMPORTANT: data_device_id is set to free gpu for storing the model, e.g.\"cuda:1\"\n",
    "multi_gpu = [0,1,2,3] #None#[0, 1]#use 2 gpus\n",
    "\n",
    "SEED = None\n",
    "debug = False# if True, load 100 samples\n",
    "IMG_SIZE = (512, 768) #(512, 768)\n",
    "BATCH_SIZE = 64\n",
    "NUM_WORKERS = 24\n",
    "#torch.cuda.manual_seed_all(SEED)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "# checkpoint_path_list = ['../checkpoint/deeplabv3plus_resnet_512x768_v6_seed2001/best.pth.tar', \n",
    "#                         '../checkpoint/deeplabv3plus_resnet_512x768_v6_seed2003/best.pth.tar', \n",
    "#                         '../checkpoint/deeplabv3plus_resnet_512x768_v6_seed2008/best.pth.tar'\n",
    "#                        ]#[35000, 35000, 35000, 35000], [0.6, 0.6, 0.6, 0.6], pos_ratio=.450, LB=.659\n",
    "\n",
    "# checkpoint_path_list = ['../checkpoint/deeplabv3plus_resnet_400x600_v7_seed2013/best.pth.tar', \n",
    "#                         '../checkpoint/deeplabv3plus_resnet_400x600_v7_seed2014/best.pth.tar', \n",
    "#                         '../checkpoint/deeplabv3plus_resnet_400x600_v7_seed2015/best.pth.tar'\n",
    "#                        ]#[20000, 20000, 20000, 20000], [0.55, 0.55, 0.55, 0.55], pos_ratio=.465, LB=.656\n",
    "\n",
    "# checkpoint_path_list = ['../checkpoint/deeplabv3plus_resnet_512x768_v4_seed4567/best.pth.tar', \n",
    "#                         '../checkpoint/deeplabv3plus_resnet_512x768_v4_seed7890/best.pth.tar', \n",
    "#                         '../checkpoint/deeplabv3plus_resnet_512x768_v4_seed8901/best.pth.tar'\n",
    "#                        ]#[30000, 30000, 30000, 30000], [0.6, 0.6, 0.6, 0.6], pos_ratio=.455, LB=.658\n",
    "\n",
    "# checkpoint_path_list = ['../checkpoint/deeplabv3plus_resnet_512x768_v6_seed2001/best.pth.tar', \n",
    "#                         '../checkpoint/deeplabv3plus_resnet_512x768_v10_seed2021/best.pth.tar', \n",
    "#                         '../checkpoint/deeplabv3plus_resnet_512x768_v4_seed4567/best.pth.tar'\n",
    "#                        ]#[30000, 30000, 30000, 30000], [0.6, 0.6, 0.6, 0.6], pos_ratio=.444, LB=.662\n",
    "\n",
    "# checkpoint_path_list = ['../checkpoint/fpn_resnet34_512x768_v1_seed2034/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_512x768_v1_seed2035/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_512x768_v1_seed2036/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_512x768_v1_seed2037/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_512x768_v1_seed2038/best.pth.tar'\n",
    "#                        ]#[25000, 25000, 25000, 25000], [0.6, 0.6, 0.6, 0.6], pos_ratio=.412, LB=.655\n",
    "\n",
    "# checkpoint_path_list = ['../checkpoint/fpn_resnet34_1024x1536_v1_seed2043/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_1024x1536_v1_seed2044/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_1024x1536_v1_seed2045//best.pth.tar', \n",
    "#                        ]#[1, 1, 1, 1], [0.3, 0.3, 0.3, 0.3], 0.65, pos_ratio=., LB=.6577\n",
    "\n",
    "# checkpoint_path_list = ['../checkpoint/deeplabv3plus_resnet_512x768_v13_seed2047/best.pth.tar', \n",
    "#                         '../checkpoint/deeplabv3plus_resnet_512x768_v13_seed2048/best.pth.tar', \n",
    "#                         '../checkpoint/deeplabv3plus_resnet_512x768_v13_seed2049/best.pth.tar', \n",
    "#                         '../checkpoint/deeplabv3plus_resnet_512x768_v13_seed2050/best.pth.tar', \n",
    "#                         '../checkpoint/deeplabv3plus_resnet_512x768_v13_seed2051/best.pth.tar'\n",
    "#                        ]#[1, 1, 1, 1], [0.3, 0.3, 0.3, 0.3], 0.7, pos_ratio=.411, LB=.6633\n",
    "\n",
    "# checkpoint_path_list = ['../checkpoint/deeplabv3plus_resnet_512x768_v13_seed2048/best.pth.tar', \n",
    "#                         '../checkpoint/deeplabv3plus_resnet_512x768_v13_seed2050/best.pth.tar', \n",
    "#                         '../checkpoint/deeplabv3plus_resnet_512x768_v13_seed2051/best.pth.tar'\n",
    "#                        ]#[1, 1, 1, 1], [0.3, 0.3, 0.3, 0.3], 0.72, pos_ratio=.404, LB=.659\n",
    "\n",
    "# checkpoint_path_list = ['../checkpoint/unet_resnet34_512x768_v1_seed2060/best.pth.tar', \n",
    "#                         '../checkpoint/unet_resnet34_512x768_v1_seed2061/best.pth.tar', \n",
    "#                         '../checkpoint/unet_resnet34_512x768_v1_seed2062/best.pth.tar'\n",
    "#                        ]#[1, 1, 1, 1], [0.4, 0.4, 0.4, 0.4], 0.7, pos_ratio=.403, LB=.6574\n",
    "\n",
    "# checkpoint_path_list = ['../checkpoint/fpn_resnet34_1024x1536_v1_seed2043/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_1024x1536_v1_seed2044/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_1024x1536_v1_seed2045//best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_1024x1536_v1_seed2052/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_1024x1536_v1_seed2053/best.pth.tar'\n",
    "#                        ]#[1, 1, 1, 1], [0.3, 0.3, 0.3, 0.3], 0.66, pos_ratio=.417, LB=.6574\n",
    "\n",
    "# checkpoint_path_list = ['../checkpoint/pspnet_resnet34_512x768_v1_seed2070/best.pth.tar', \n",
    "#                         '../checkpoint/pspnet_resnet34_512x768_v1_seed2071/best.pth.tar', \n",
    "#                         '../checkpoint/pspnet_resnet34_512x768_v1_seed2072/best.pth.tar', \n",
    "#                         '../checkpoint/pspnet_resnet34_512x768_v1_seed2073/best.pth.tar', \n",
    "#                         '../checkpoint/pspnet_resnet34_512x768_v1_seed2074/best.pth.tar'\n",
    "#                        ]#[1, 1, 1, 1], [0.3, 0.3, 0.3, 0.3], 0.62, pos_ratio=.405, LB=.6561\n",
    "\n",
    "# checkpoint_path_list = ['../checkpoint/unet_resnet34_384x576_v1_seed2080/best.pth.tar', \n",
    "#                         '../checkpoint/unet_resnet34_384x576_v1_seed2081/best.pth.tar', \n",
    "#                         '../checkpoint/unet_resnet34_384x576_v1_seed2082/best.pth.tar', \n",
    "#                         '../checkpoint/unet_resnet34_384x576_v1_seed2083/best.pth.tar', \n",
    "#                         '../checkpoint/unet_resnet34_384x576_v1_seed2084/best.pth.tar'\n",
    "#                        ]#[1, 1, 1, 1], [0.32, 0.32, 0.32, 0.32], 0.78, pos_ratio=.405, LB=.6637\n",
    "\n",
    "# checkpoint_path_list = ['../checkpoint/fpn_resnet34_384x576_v1_seed2100/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_384x576_v1_seed2101/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_384x576_v1_seed2102/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_384x576_v1_seed2103/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_384x576_v1_seed2104/best.pth.tar'\n",
    "#                        ]#[1, 1, 1, 1], [0.3, 0.3, 0.3, 0.3], 0.75, pos_ratio=.417, LB=.6568\n",
    "\n",
    "# checkpoint_path_list = ['../checkpoint/pspnet_resnet34_384x576_v1_seed2090/best.pth.tar', \n",
    "#                         '../checkpoint/pspnet_resnet34_384x576_v1_seed2091/best.pth.tar', \n",
    "#                         '../checkpoint/pspnet_resnet34_384x576_v1_seed2092/best.pth.tar', \n",
    "#                         '../checkpoint/pspnet_resnet34_384x576_v1_seed2093/best.pth.tar', \n",
    "#                         '../checkpoint/pspnet_resnet34_384x576_v1_seed2094/best.pth.tar'\n",
    "#                        ]#[1, 1, 1, 1], [0.3, 0.3, 0.3, 0.3], 0.65, pos_ratio=.402, mean_pixel=.171, LB=.6523\n",
    "\n",
    "# checkpoint_path_list = ['../checkpoint/fpn_resnet34_512x768_v1_seed2120/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_512x768_v1_seed2121/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_512x768_v1_seed2122/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_512x768_v1_seed2123/best.pth.tar', \n",
    "#                         '../checkpoint/fpn_resnet34_512x768_v1_seed2124/best.pth.tar'\n",
    "#                        ]#[1, 1, 1, 1], [0.3, 0.3, 0.3, 0.3], 0.68, pos_ratio=.396, mean_pixel=.152, 8.48MB, LB=.6671\n",
    "\n",
    "checkpoint_path_list = ['../checkpoint/fpn_resnet34_512x768_v1_seed2150/best.pth.tar', \n",
    "                        '../checkpoint/fpn_resnet34_512x768_v1_seed2151/best.pth.tar', \n",
    "                        '../checkpoint/fpn_resnet34_512x768_v1_seed2152/best.pth.tar', \n",
    "                        '../checkpoint/fpn_resnet34_512x768_v1_seed2153/best.pth.tar', \n",
    "                        '../checkpoint/fpn_resnet34_512x768_v1_seed2154/best.pth.tar'\n",
    "                       ]#[1, 1, 1, 1], [0.3, 0.3, 0.3, 0.3], 0.68, pos_ratio=.390, mean_pixel=.152, 8.35MB, LB=.6650"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "#from model.deeplab_model_kaggler.deeplab import DeepLab, predict_proba\n",
    "from model.model_resnet_fpn import FPNResNet34, predict_proba\n",
    "#from model.model_plain_unet import UNetResNet34, predict_proba\n",
    "#from model.model_pspnet import PSPNet, predict_proba"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Loading checkpoint_path:  ../checkpoint/fpn_resnet34_512x768_v1_seed2150/best.pth.tar\n",
      "use TTA\n",
      "Complete prediction for checkpoint_path:  ../checkpoint/fpn_resnet34_512x768_v1_seed2150/best.pth.tar\n",
      "\n",
      "Loading checkpoint_path:  ../checkpoint/fpn_resnet34_512x768_v1_seed2151/best.pth.tar\n",
      "use TTA\n",
      "Complete prediction for checkpoint_path:  ../checkpoint/fpn_resnet34_512x768_v1_seed2151/best.pth.tar\n",
      "\n",
      "Loading checkpoint_path:  ../checkpoint/fpn_resnet34_512x768_v1_seed2152/best.pth.tar\n",
      "use TTA\n",
      "Complete prediction for checkpoint_path:  ../checkpoint/fpn_resnet34_512x768_v1_seed2152/best.pth.tar\n",
      "\n",
      "Loading checkpoint_path:  ../checkpoint/fpn_resnet34_512x768_v1_seed2153/best.pth.tar\n",
      "use TTA\n",
      "Complete prediction for checkpoint_path:  ../checkpoint/fpn_resnet34_512x768_v1_seed2153/best.pth.tar\n",
      "\n",
      "Loading checkpoint_path:  ../checkpoint/fpn_resnet34_512x768_v1_seed2154/best.pth.tar\n",
      "use TTA\n",
      "Complete prediction for checkpoint_path:  ../checkpoint/fpn_resnet34_512x768_v1_seed2154/best.pth.tar\n",
      "\n",
      "CPU times: user 2h 10min 6s, sys: 40min 49s, total: 2h 50min 56s\n",
      "Wall time: 15min 55s\n"
     ]
    }
   ],
   "source": [
    "%%time\n",
    "## run\n",
    "preds_test = None\n",
    "preds_test_clf = None\n",
    "\n",
    "for checkpoint_path in checkpoint_path_list:\n",
    "    print('Loading checkpoint_path: ', checkpoint_path)\n",
    "    if 'deeplab' in checkpoint_path:\n",
    "        net = DeepLab(num_classes=4,\n",
    "                  backbone='resnet',#resnet34, resnet101\n",
    "                  output_stride=16,#default 16, 8\n",
    "                  sync_bn=None,\n",
    "                  freeze_bn=False,\n",
    "                  debug=False, \n",
    "                  clf_path=True\n",
    "                 ).cuda(device=device)\n",
    "    elif 'fpn' in checkpoint_path:\n",
    "        net = FPNResNet34(debug=False, downscale=False).cuda(device=device)#downscale=False!!!!!!!!!!\n",
    "    elif 'unet' in checkpoint_path:\n",
    "        net = UNetResNet34(debug=False).cuda(device=device)\n",
    "    elif 'psp' in checkpoint_path:\n",
    "        net = PSPNet(debug=False).cuda(device=device)\n",
    "    \n",
    "    net, _ = load_checkpoint(checkpoint_path, net)\n",
    "    if multi_gpu is not None:\n",
    "        net = nn.DataParallel(net, device_ids=multi_gpu)\n",
    "    \n",
    "    ##predict\n",
    "    _preds_test, _preds_test_clf = predict_proba(net, test_dl, device, multi_gpu=multi_gpu, mode='test', tta=True)\n",
    "    _preds_test = sigmoid(_preds_test)\n",
    "    _preds_test_clf = sigmoid(_preds_test_clf)\n",
    "    if preds_test is None:\n",
    "        preds_test = _preds_test\n",
    "        preds_test_clf = _preds_test_clf\n",
    "    else:\n",
    "        preds_test += _preds_test\n",
    "        preds_test_clf += _preds_test_clf\n",
    "    print('Complete prediction for checkpoint_path: ', checkpoint_path)\n",
    "    print()\n",
    "\n",
    "preds_test /= len(checkpoint_path_list)\n",
    "preds_test_clf /= len(checkpoint_path_list)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "metadata": {},
   "outputs": [],
   "source": [
    "## for second-stage ensemble of (ensemble of model architecture)\n",
    "\n",
    "#preds_test1 = copy.deepcopy(preds_test)\n",
    "#preds_test2 = copy.deepcopy(preds_test)\n",
    "\n",
    "#preds_test = (preds_test1 + preds_test2) / 2"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(3698, 4, 512, 768)"
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "preds_test.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "# # best_threshold_EMPTY, best_threshold_MASK, best_score = ([35000, 35000, 35000, 35000], \n",
    "# #                                                          [0.6, 0.6, 0.6, 0.6], None)\n",
    "\n",
    "# best_threshold_EMPTY, best_threshold_MASK, best_score = ([25000, 25000, 25000, 25000], \n",
    "#                                                          [0.6, 0.6, 0.6, 0.6], None)\n",
    "# best_threshold_EMPTY, best_threshold_MASK, best_score"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "([1, 1, 1, 1], [0.3, 0.3, 0.3, 0.3], 0.68)"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "best_threshold_EMPTY, best_threshold_MASK, best_threshold_CLF = \\\n",
    "[1, 1, 1, 1], [0.3, 0.3, 0.3, 0.3], 0.68\n",
    "\n",
    "best_threshold_EMPTY, best_threshold_MASK, best_threshold_CLF"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## build submission"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "import PIL\n",
    "from dataset.mask_functions import mask2rle"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "31f55196a5ff4cbc8e74bd1ba9f13831",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "HBox(children=(IntProgress(value=0, max=3698), HTML(value='')))"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "mean_pixel:  0.15188780375938457\n",
      "CPU times: user 1min 16s, sys: 5.28 s, total: 1min 21s\n",
      "Wall time: 38.5 s\n"
     ]
    }
   ],
   "source": [
    "%%time\n",
    "\n",
    "ensemble = True #False#True\n",
    "\n",
    "mean_pixel = 0.0\n",
    "\n",
    "#### Generate rle encodings\n",
    "rles = []\n",
    "for idx in tqdm_notebook(range(preds_test.shape[0])):#p is logit from model\n",
    "    p = preds_test[idx]\n",
    "    if preds_test_clf is not None:\n",
    "        p2 = preds_test_clf[idx]\n",
    "    else:\n",
    "        p2 = [None]*4\n",
    "    for ch in range(4):\n",
    "        pred_mask = predict_mask(p[ch], best_threshold_EMPTY[ch], best_threshold_MASK[ch], \n",
    "                                 best_threshold_CLF, p2[ch], use_sigmoid=(not ensemble))#False ensemble\n",
    "        if pred_mask.sum()>0:#predicted non-empty mask\n",
    "            mean_pixel += pred_mask.mean()\n",
    "            pred_mask = cv2.resize(pred_mask.astype('float32'), (525, 350))\n",
    "            pred_mask = (pred_mask>0.5).astype(np.uint8)\n",
    "            rles.append(mask2rle(pred_mask))\n",
    "        else:\n",
    "            rles.append('')\n",
    "print('mean_pixel: ', mean_pixel/(preds_test.shape[0]*4))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "14792\n"
     ]
    }
   ],
   "source": [
    "img_id_ch = []\n",
    "for fname in test_fnames:\n",
    "    for name in ['Fish', 'Flower', 'Gravel', 'Sugar']:\n",
    "        img_id_ch.append(fname+'.jpg_%s'%name)\n",
    "\n",
    "sub_df = pd.DataFrame({'Image_Label': img_id_ch, 'EncodedPixels': rles})\n",
    "print(len(sub_df.index))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Image_Label</th>\n",
       "      <th>EncodedPixels</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>10055ff.jpg_Fish</td>\n",
       "      <td></td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>10055ff.jpg_Flower</td>\n",
       "      <td></td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>10055ff.jpg_Gravel</td>\n",
       "      <td>522 33 857 68 1203 90 1550 95 1870 2 1887 114 ...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>10055ff.jpg_Sugar</td>\n",
       "      <td>5 136 242 90 353 150 586 103 702 155 929 113 1...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>6e54759.jpg_Fish</td>\n",
       "      <td></td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "          Image_Label                                      EncodedPixels\n",
       "0    10055ff.jpg_Fish                                                   \n",
       "1  10055ff.jpg_Flower                                                   \n",
       "2  10055ff.jpg_Gravel  522 33 857 68 1203 90 1550 95 1870 2 1887 114 ...\n",
       "3   10055ff.jpg_Sugar  5 136 242 90 353 150 586 103 702 155 929 113 1...\n",
       "4    6e54759.jpg_Fish                                                   "
      ]
     },
     "execution_count": 23,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "sub_df.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "# def summary_test_pred(preds_test, preds_test_clf, \n",
    "#                       best_threshold_MASK, best_threshold_EMPTY, best_threshold_CLF, use_sigmoid=True):\n",
    "#     if use_sigmoid:\n",
    "#         preds_test = sigmoid(preds_test)#.reshape(n, -1)\n",
    "#         if preds_test_clf is not None:\n",
    "#             preds_test_clf = sigmoid(preds_test_clf)\n",
    "#     n, c = preds_test.shape[0], preds_test.shape[1]\n",
    "#     MASK_THRESHOLD = np.concatenate([best_threshold_MASK]*n).reshape(-1, 1)\n",
    "#     pred = (preds_test.reshape(n*c, -1)>MASK_THRESHOLD).astype(np.int)\n",
    "#     EMPTY_THRESHOLD_4ch = np.concatenate([best_threshold_EMPTY]*n)#.reshape(-1, 1)\n",
    "#     pred_clf = (pred.sum(axis=1)<EMPTY_THRESHOLD_4ch).astype(np.int)\n",
    "#     pred[pred_clf==1, ] = 0\n",
    "#     if preds_test_clf is not None:\n",
    "#         pred[preds_test_clf.reshape(n*c, -1).squeeze()<best_threshold_CLF, ] = 0\n",
    "#     print('best_threshold_MASK: ', best_threshold_MASK)\n",
    "#     print('best_threshold_EMPTY: ', best_threshold_EMPTY)\n",
    "#     print('best_threshold_CLF: ', best_threshold_CLF)\n",
    "#     print('pred pos_ratio: ', (pred.sum(axis=1)>0).mean())\n",
    "#     print('pred mean pixel: ', pred.mean())\n",
    "    \n",
    "# summary_test_pred(preds_test, preds_test_clf, \n",
    "#                   best_threshold_MASK, best_threshold_EMPTY, best_threshold_CLF, use_sigmoid=(not ensemble))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "95618"
      ]
     },
     "execution_count": 24,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "idx = 2\n",
    "ch = 0\n",
    "\n",
    "# check the correctness of transformation\n",
    "pred_mask = predict_mask(preds_test[idx][ch], best_threshold_EMPTY[ch], best_threshold_MASK[ch], \n",
    "                         best_threshold_CLF, preds_test_clf[idx][ch], use_sigmoid=(not ensemble))\n",
    "pred_mask.sum()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<matplotlib.image.AxesImage at 0x7faa13e6c898>"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAW4AAAD8CAYAAABXe05zAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAS4UlEQVR4nO3db4xcV3nH8e+D7dgESIwDRI5t4URYCFS1JrUSo1SIxoQkLsK8CDQRAoNcWSoggagETiu1atUX0BcEkFDAJbROBflDgMaK0rqRE4RUFRMnMZBgQgyN8MohLuRvQQ0Ynr6Ys8l4Pd6d3Z07c8/M9yON9t5zz+w8O7PzmzNn7r0TmYkkqR4vGnUBkqT5MbglqTIGtyRVxuCWpMoY3JJUGYNbkirTSHBHxBUR8XBEHImIXU3chiRNqhj0ftwRsQT4EXAZMAXcC1yTmT8Y6A1J0oRqYsR9EXAkM3+Smb8Gbga2NXA7kjSRljbwO9cAR7vWp4CLZ7vCGbE8V/CSBkqRpDr9H7/k1/lc9NrWRHD3uqFT5mMiYiewE2AFZ3JxbGmgFEmq04Hcf9ptTUyVTAHrutbXAsdmdsrM3Zm5KTM3LWN5A2VI0nhqIrjvBTZExPkRcQZwNbC3gduRpIk08KmSzDwRER8C9gFLgC9l5kODvh1JmlRNzHGTmXcCdzbxuyVp0nnkpCRVxuCWpMoY3JJUGYNbkipjcEtSZQxuSaqMwS1JlTG4JakyBrckVcbglqTKGNySVBmDW5IqY3BLUmUMbkmqjMEtSZUxuCWpMga3JFXG4JakyhjcklQZg1uSKmNwS1JlDG5JqozBLUmVWTrqAjQZ9h07BMDl5218fnk+Lj9v46BLkqplcGvR5hPECwnt+V7PkNe4M7i1KAsN4iZ112SIaxwZ3Jq3Nob16XRP0Ujjwg8nNS81hXa3fccOVVu7NJMjbp1inANu5t/mSFw1Mrj1vHEO7NNxKkU1cqpEwGSGdrdJ//tVF4N7wjn3+wLvB9XCqZIJYSj1x6kT1cDgHjMG9GAY4GqzOYM7Ir4EvA04npm/V9pWAbcA64FHgXdl5pMREcBngK3Ar4D3Zeb9zZSuaYZ1czyYR23Uzxz3PwNXzGjbBezPzA3A/rIOcCWwoVx2AtcPpkydjqE9PNOfB3ifa9TmHHFn5rciYv2M5m3Am8vyHuCbwMdL+42ZmcC3I2JlRKzOzMcGVfAkMijax6kUjdJC57jPnQ7jzHwsIl5V2tcAR7v6TZW2U4I7InbSGZWzgjMXWMb4MqzrYIBrFAa9O2D0aMteHTNzd2ZuysxNy1g+4DKk4fKFVsO00OB+PCJWA5Sfx0v7FLCuq99a4NjCy5tMhoCk2Sw0uPcC28vyduD2rvb3Rsdm4Gnnt+fH0K6XH1xqWOYM7oi4Cfgv4LURMRURO4BPAJdFxCPAZWUd4E7gJ8AR4B+BDzRStdRihrea1s9eJdecZtOWHn0T+OBii5IknZ5HTg5BvwdxOFIbH+5toiYZ3A2bGca9ntAG9vgywNUEg7tBswWyYT1Z9h07ZHhrYDyta0MMZklNMbgbYGhLapLBLQ2JL+gaFIN7wHxySmqawS1JlXGvkgFxpC1pWBxxL4AhLWmUHHHP03RoG96SRsXg7pNBLaktnCqRpMoY3JJUGYO7D06TaBA8V4kGxeCWpMoY3JJUGYN7Dk6TSGobg1uSKmNwz8LRtgbFDyY1SAa3JFXG4Jakyhjcs/DtrQbB/yMNmsEtSZUxuCWpMga3JFXG4J6D85NaDP9/1ASDW5IqY3BLUmUM7jl49KQWymkSNcXglqTKGNxSAxxtq0kGtyRVZs7gjoh1EXFPRByOiIci4sOlfVVE3BURj5SfLy/tERGfjYgjEfG9iLiw6T+iSY6cJLVNPyPuE8BfZObrgM3AByPi9cAuYH9mbgD2l3WAK4EN5bITuH7gVUst5ou9mjZncGfmY5l5f1l+FjgMrAG2AXtKtz3AO8ryNuDG7Pg2sDIiVg+8ckmaUPOa446I9cAbgAPAuZn5GHTCHXhV6bYGONp1tanSJo09R9sahr6DOyJeCnwN+EhmPjNb1x5t2eP37YyIgxFx8Dc8128ZI+GTUVKb9BXcEbGMTmh/OTO/Xpofn54CKT+Pl/YpYF3X1dcCx2b+zszcnZmbMnPTMpYvtH5Jmjj97FUSwA3A4cz8VNemvcD2srwduL2r/b1l75LNwNPTUyqSpMVb2kefS4D3AN+PiOnjv/8S+ARwa0TsAH4KvLNsuxPYChwBfgW8f6AVS9KEi8xTpp+H7qxYlRfHllGXMSfPW6LZ+FmIBulA7ueZfKLXZ4YeOSlJtTG4JakyBvc8+FZYUhsY3NIA+KKuYTK4JakyBre0SI62NWwGtyRVxuCWFsHRtkbB4Jakyhjc8+QIS9KoGdySVBmDW5Iq08/ZATVDr+kST0A1eZw206g44h4Qn8SShsXgHiDDe3L4WGuUDO4B8wk9/nyMNWoGdwN8YktqksHdEMNbUlMMbmkefEFWG7g7oDQLg1ptZHBLPRjYajODu0HTT34PzqmHga0aOMctFYa2amFwD4GB0H4+RqqJwT0kl5+30XCQNBAG95AZ3pIWy+AeAUff7eJjodoY3CNkYEhaCHcH1ETwRVLjxBH3iDlt0jzvX40bg7slDJdmeL9qHBncLWLIDI7vZDTOnONumcvP2+gh8rMwjCWDu5Um/RwnhrM0uzmDOyJWAN8Clpf+t2Xm30TE+cDNwCrgfuA9mfnriFgO3Aj8IfAL4E8z89GG6h9rkzT6Nqyl/vUzx/0ccGlm/gGwEbgiIjYDnwSuy8wNwJPAjtJ/B/BkZr4GuK700wKN+1ztuP99UhPmDO7s+N+yuqxcErgUuK207wHeUZa3lXXK9i0REQOreEKNY8CN298jDUtfc9wRsQS4D3gN8Dngx8BTmXmidJkC1pTlNcBRgMw8ERFPA+cAP5/xO3cCOwFWcObi/ooJUvP0iUEtDUZfuwNm5m8zcyOwFrgIeF2vbuVnr9F1ntKQuTszN2XmpmUs77deUWcA1liz1Fbz2o87M58CvglsBlZGxPSIfS1wrCxPAesAyvazgScGUazqZGhLgzVncEfEKyNiZVl+MfAW4DBwD3BV6bYduL0s7y3rlO13Z+YpI25J0sL0M8e9GthT5rlfBNyamXdExA+AmyPi74EHgBtK/xuAf4mII3RG2lc3UPdEq2GO21G21Jxow2D4rFiVF8eWUZdRvbYEuqEtLd6B3M8z+UTPPfI8V8kYacMug6O+fWkSeMj7GBr2IfOGtTRcjrjHWNOB2oYRvjSJDO4x10+wzjeADWxptJwqmQDdUyezBa5hLNXBEfcEMZil8WBwS1JlDG5JqozBLUmVMbglqTIGtyRVxuCWpMq4H7ckcfIpItq+66zBLWmi9Tqnz8y2tgW5wS1pLE2H7yC+p3Wuo46HzeCWVLW5QnlQZ8k83e8ZRaD74aSkarXhy0P2HTv0fB3dy00yuCVVZ1gBOR/d9TRdm1MlklqtbQHdryb3UjG4JbVKrUE9TAa3pJEzrOfH4JY0MpMS2N27Jg6CwS1pKCYlpGczqP3B3atEUuMM7cFyxC2pEYZ1b4MYdRvckgbKwG6eUyWSBsbQ7s9i7ydH3JLmzYBevMVMmTjiljQvhvbgLPTQfUfckmZlUDdvvqNvg1tSTwb2cM3nIB2nSiSdwtBuN4Nb0kkM7fbrO7gjYklEPBARd5T18yPiQEQ8EhG3RMQZpX15WT9Stq9vpnRJmkzzGXF/GDjctf5J4LrM3AA8Cewo7TuAJzPzNcB1pZ+kCjjarkNfwR0Ra4E/Ab5Y1gO4FLitdNkDvKMsbyvrlO1bSn9JLWZo16PfEfengY8Bvyvr5wBPZeaJsj4FrCnLa4CjAGX706X/SSJiZ0QcjIiDv+G5BZYvaRAM7brMGdwR8TbgeGbe193co2v2se2FhszdmbkpMzctY3lfxUoaPEO7Pv3sx30J8PaI2AqsAM6iMwJfGRFLy6h6LXCs9J8C1gFTEbEUOBt4YuCVS9KEmnPEnZnXZubazFwPXA3cnZnvBu4BrirdtgO3l+W9ZZ2y/e7MPGXELUlamMXsx/1x4KMRcYTOHPYNpf0G4JzS/lFg1+JKlCR1izYMhs+KVXlxbBl1GdJEco67fS4/byMHcj/P5BM998jzyElJqozBLUmVMbglqTLOcUsCnOtum5Wrf+Ect6TZLfabxzU8BrckVcbglqTKGNySVBmDW5IqY3BLAtyrpCYGtyRVxuCWBLg7YE0MbkmqjMEt6XmOutthrsfB4JZ0EsO7/QxuSacwvNvN4JbU0+XnbTTAW8rglqTKGNyS1CL9vMsxuCXNyumS9jG4Jc3J8G4Xg1uSKmNwS+qLo+72MLgl9c3wbla/96/BLUktMJ8XRYNbkipjcEvqm1+20Iz5TkEZ3JL65mHwg7eQ+9PgljRv3WFjkC/MYl4Elw64FknSLAbxQmdwS1oQR9qjY3BL0hAM8oXOOW5Ji+aHlrMb9H1jcEsaGMP7VE3cJ31NlUTEo8CzwG+BE5m5KSJWAbcA64FHgXdl5pMREcBngK3Ar4D3Zeb9A69cUitdft7Gid3fe1gvXPMZcf9xZm7MzE1lfRewPzM3APvLOsCVwIZy2QlcP6hiJdVhkkbe09NEw/ybF/Ph5DbgzWV5D/BN4OOl/cbMTODbEbEyIlZn5mOLKVRSXbqDbNxG4KN+Yeo3uBP4j4hI4AuZuRs4dzqMM/OxiHhV6bsGONp13anSdlJwR8ROOiNyVnDmwv8CSa3XT9DVEu6jDm3oP7gvycxjJZzviogfztI3erTlKQ2d8N8NcFasOmW7pMkyMxDbFuRtCOxpfQV3Zh4rP49HxDeAi4DHp6dAImI1cLx0nwLWdV19LXBsgDVLmgCLCcp+Qr9NQTxfcwZ3RLwEeFFmPluW3wr8HbAX2A58ovy8vVxlL/ChiLgZuBh42vltScNUcyj3o58R97nANzp7+bEU+Epm/ntE3AvcGhE7gJ8C7yz976SzK+AROrsDvn/gVUvSBIvOzh8jLiLiWeDhUdfRp1cAPx91EX2opU6op9Za6oR6aq2lThh+ra/OzFf22tCWc5U83LV/eKtFxMEaaq2lTqin1lrqhHpqraVOaFetHvIuSZUxuCWpMm0J7t2jLmAeaqm1ljqhnlprqRPqqbWWOqFFtbbiw0lJUv/aMuKWJPVp5MEdEVdExMMRcSQids19jUZr+VJEHI+IB7vaVkXEXRHxSPn58tIeEfHZUvf3IuLCIde6LiLuiYjDEfFQRHy4jfVGxIqI+E5EfLfU+bel/fyIOFDqvCUizijty8v6kbJ9/TDq7Kp3SUQ8EBF3tLzORyPi+xFxKCIOlrZWPfbltldGxG0R8cPyv/rGltb52nJfTl+eiYiPtLFWADJzZBdgCfBj4ALgDOC7wOtHWM+bgAuBB7va/gHYVZZ3AZ8sy1uBf6NzbpbNwIEh17oauLAsvwz4EfD6ttVbbu+lZXkZcKDc/q3A1aX988Cfl+UPAJ8vy1cDtwz5fv0o8BXgjrLe1jofBV4xo61Vj3257T3An5XlM4CVbaxzRs1LgJ8Br25rrUO/U2bcQW8E9nWtXwtcO+Ka1s8I7oeB1WV5NZ19zgG+AFzTq9+I6r4duKzN9QJnAvfTORXCz4GlM/8PgH3AG8vy0tIvhlTfWjrnlr8UuKM8KVtXZ7nNXsHdqsceOAv475n3S9vq7FH3W4H/bHOto54qOd0pYNvkpNPXAnOdvnboytv0N9AZzbau3jL9cIjOicjuovMu66nMPNGjlufrLNufBs4ZRp3Ap4GPAb8r6+e0tE544VTL90XnFMnQvsf+AuB/gH8q009fjM75jtpW50xXAzeV5VbWOurg7usUsC3Vitoj4qXA14CPZOYzs3Xt0TaUejPzt5m5kc6I9iLgdbPUMpI6I+JtwPHMvK+7eZZaRv34X5KZF9L5xqkPRsSbZuk7qlqX0pl6vD4z3wD8khe+KauXUd+nlM8w3g58da6uPdqGVuuog7uGU8A+Hp3T1hItO31tRCyjE9pfzsyvl+bW1puZT9H5pqTNwMqImD7lQnctz9dZtp8NPDGE8i4B3h6d71e9mc50yadbWCdw8qmWgZNOtVxqasNjPwVMZeaBsn4bnSBvW53drgTuz8zHy3orax11cN8LbCif3J9B5y3K3hHXNNP06Wvh1NPXvrd8uryZIZ++NiICuAE4nJmfamu9EfHKiFhZll8MvAU4DNwDXHWaOqfrvwq4O8skYpMy89rMXJuZ6+n8H96dme9uW53QOdVyRLxsepnOnOyDtOyxz8yfAUcj4rWlaQvwg7bVOcM1vDBNMl1T+2od9sR/jw8CttLZI+LHwF+NuJab6HzF2m/ovKLuoDNvuR94pPxcVfoG8LlS9/eBTUOu9Y/ovDX7HnCoXLa2rV7g94EHSp0PAn9d2i8AvkPn9L9fBZaX9hVl/UjZfsEI/g/ezAt7lbSuzlLTd8vloennTdse+3LbG4GD5fH/V+Dlbayz3P6ZwC+As7vaWlmrR05KUmVGPVUiSZong1uSKmNwS1JlDG5JqozBLUmVMbglqTIGtyRVxuCWpMr8P1+QsjcnkyFDAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "plt.imshow(pred_mask)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.39034613304488913"
      ]
     },
     "execution_count": 25,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "(sub_df.EncodedPixels!='').mean()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [],
   "source": [
    "# sub_df.to_csv('../submission/1115_deeplabv3plus_resnet_512x768_v13_seed2048-50-51.csv.gz', \n",
    "#               index=False, compression='gzip')\n",
    "\n",
    "# sub_df.to_csv('../submission/1116_fpn_resnet34_384x576_v1_seed2100-01-02-03-04.csv.gz', \n",
    "#               index=False, compression='gzip')\n",
    "\n",
    "sub_df.to_csv('../submission/1117_fpn_resnet34_576x768_v1_seed2150-51-52-53-54.csv.gz', \n",
    "              index=False, compression='gzip')\n",
    "\n",
    "# sub_df.to_csv('../submission/1115_unet_resnet34_512x768_v1_seed2060-61-62.csv.gz', \n",
    "#               index=False, compression='gzip')\n",
    "\n",
    "# sub_df.to_csv('../submission/1116_unet_resnet34_384x576_v1_seed2080-81-82-83-84.csv.gz', \n",
    "#               index=False, compression='gzip')\n",
    "\n",
    "# sub_df.to_csv('../submission/1116_pspnet_resnet34_512x768_v1_seed2070-71-72-73-74.csv.gz', \n",
    "#               index=False, compression='gzip')\n",
    "\n",
    "# sub_df.to_csv('../submission/1116_pspnet_resnet34_384x576_v1_seed2090-91-92-93-94.csv.gz', \n",
    "#               index=False, compression='gzip')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 53,
   "metadata": {},
   "outputs": [],
   "source": [
    "# ### for ensemble\n",
    "# sub_df.to_csv('../submission/sigmoid_ensemble.csv.gz', index=False, compression='gzip')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 105,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.5335376848178868"
      ]
     },
     "execution_count": 105,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "(pd.read_csv('../data/raw/train.csv').fillna('').EncodedPixels!='').mean()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "#this is for remembering best thresholds of those already submitted solutions\n",
    "sub_df.to_csv('submission/del.csv.gz', index=False, compression='gzip')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.44503785830178477"
      ]
     },
     "execution_count": 34,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "sub_kaggler = pd.read_csv('../submission/kaggler0_submission_segmentation_and_classifier.csv').fillna('')\n",
    "(sub_kaggler.EncodedPixels!='').mean()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## maybe a leak\n",
    "every image has at least one label, force to predict non-empty-mask"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Image_Label</th>\n",
       "      <th>EncodedPixels</th>\n",
       "      <th>img_id</th>\n",
       "      <th>has_mask</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>10055ff.jpg_Fish</td>\n",
       "      <td></td>\n",
       "      <td>10055ff</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>10055ff.jpg_Flower</td>\n",
       "      <td></td>\n",
       "      <td>10055ff</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>10055ff.jpg_Gravel</td>\n",
       "      <td>1217 45 1268 7 1560 2 1564 63 1899 3 1903 81 2...</td>\n",
       "      <td>10055ff</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "          Image_Label                                      EncodedPixels  \\\n",
       "0    10055ff.jpg_Fish                                                      \n",
       "1  10055ff.jpg_Flower                                                      \n",
       "2  10055ff.jpg_Gravel  1217 45 1268 7 1560 2 1564 63 1899 3 1903 81 2...   \n",
       "\n",
       "    img_id  has_mask  \n",
       "0  10055ff         0  \n",
       "1  10055ff         0  \n",
       "2  10055ff         1  "
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#sub_df = pd.read_csv('../submission/dirty_ensemble.csv.gz').fillna('')\n",
    "sub = copy.deepcopy(sub_df)\n",
    "\n",
    "sub['img_id'] = [name.split('.')[0] for name in sub.Image_Label]\n",
    "sub['has_mask'] = (sub.EncodedPixels!='').astype(np.int)\n",
    "sub.head(3)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "2    1816\n",
       "1    1410\n",
       "3     404\n",
       "0      56\n",
       "4      12\n",
       "Name: sum, dtype: int64"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "agg = sub.groupby(['img_id'])['has_mask'].agg(['sum'])\n",
    "agg['sum'].value_counts()# / 3698"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "f0ac8850ba18419fa098a98cc67226e0",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "HBox(children=(IntProgress(value=0, max=3698), HTML(value='')))"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "CPU times: user 13min 26s, sys: 25.1 s, total: 13min 51s\n",
      "Wall time: 50.4 s\n"
     ]
    }
   ],
   "source": [
    "%%time\n",
    "\n",
    "ensemble = False#True\n",
    "\n",
    "relaxed_threshold_EMPTY = 1\n",
    "\n",
    "#### Generate rle encodings\n",
    "rles = []\n",
    "for p in tqdm_notebook(preds_test):#p is logit from model\n",
    "    found = False\n",
    "    for ch in range(4):\n",
    "        pred_mask = predict_mask(p[ch], best_threshold_EMPTY[ch], best_threshold_MASK[ch], use_sigmoid=(not ensemble))#False ensemble\n",
    "        if pred_mask.sum()>0:#predicted non-empty mask\n",
    "            pred_mask = cv2.resize(pred_mask.astype('float32'), (525, 350))\n",
    "            pred_mask = (pred_mask>0.5).astype(np.uint8)\n",
    "            rles.append(mask2rle(pred_mask))\n",
    "            found = True\n",
    "        else:\n",
    "            rles.append('')\n",
    "    ##if predict 4-channel-all-empty\n",
    "    if not found:\n",
    "        pred_mask_4ch = []\n",
    "        pred_mask_sum = []\n",
    "        for ch in range(4):\n",
    "            pred_mask = predict_mask(p[ch], relaxed_threshold_EMPTY, best_threshold_MASK[ch], use_sigmoid=(not ensemble))#False ensemble\n",
    "            pred_mask_sum.append(pred_mask.sum())\n",
    "            pred_mask_4ch.append(pred_mask)\n",
    "        if np.sum(pred_mask_sum)==0:\n",
    "            continue\n",
    "        else:\n",
    "            ch_idx = np.argmax(pred_mask_sum)#pick the channel with maximum mask area\n",
    "            pred_mask = pred_mask_4ch[ch_idx]\n",
    "            ##replace empty mask with new mask\n",
    "            pred_mask = cv2.resize(pred_mask.astype('float32'), (525, 350))\n",
    "            pred_mask = (pred_mask>0.5).astype(np.uint8)\n",
    "            rles[-(4-ch_idx)] = mask2rle(pred_mask)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "14792\n"
     ]
    }
   ],
   "source": [
    "img_id_ch = []\n",
    "for fname in test_fnames:\n",
    "    for name in ['Fish', 'Flower', 'Gravel', 'Sugar']:\n",
    "        img_id_ch.append(fname+'.jpg_%s'%name)\n",
    "\n",
    "sub_df = pd.DataFrame({'Image_Label': img_id_ch, 'EncodedPixels': rles})\n",
    "print(len(sub_df.index))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.4398323418063818"
      ]
     },
     "execution_count": 33,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "(sub_df.EncodedPixels!='').mean()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "metadata": {},
   "outputs": [],
   "source": [
    "sub_df.to_csv('../submission/1109_deeplabv3plus_resnet_512x768_v10_seed2022-4ch-empty-fix.csv.gz', \n",
    "              index=False, compression='gzip')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
