{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "http://medicaldecathlon.com/#tasks\n",
    "- Target: Liver and tumour\n",
    "- Modality: Portal venous phase CT\n",
    "- Size: 201 3D volumes (131 Training + 70 Testing)\n",
    "- Source: IRCAD Hôpitaux Universitaires\n",
    "- Challenge: Label unbalance with a large (liver) and small (tumour) target"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Setup imports"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "id": "ZejZZonxwCMH",
    "tags": []
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2024-06-06 14:24:37.491882: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\n",
      "2024-06-06 14:24:37.520308: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n",
      "To enable the following instructions: AVX2 AVX_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n",
      "2024-06-06 14:24:38.013849: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MONAI version: 1.4.dev2421\n",
      "Numpy version: 1.26.4\n",
      "Pytorch version: 2.3.0+cu121\n",
      "MONAI flags: HAS_EXT = False, USE_COMPILED = False, USE_META_DICT = False\n",
      "MONAI rev id: 1070036ea3c30176fc82cfb15952387bed8b8a90\n",
      "MONAI __file__: /home/<username>/.local/lib/python3.10/site-packages/monai/__init__.py\n",
      "\n",
      "Optional dependencies:\n",
      "Pytorch Ignite version: 0.4.11\n",
      "ITK version: NOT INSTALLED or UNKNOWN VERSION.\n",
      "Nibabel version: 5.2.1\n",
      "scikit-image version: 0.23.2\n",
      "scipy version: 1.13.0\n",
      "Pillow version: 10.3.0\n",
      "Tensorboard version: 2.16.2\n",
      "gdown version: 5.2.0\n",
      "TorchVision version: 0.18.0+cu121\n",
      "tqdm version: 4.66.4\n",
      "lmdb version: NOT INSTALLED or UNKNOWN VERSION.\n",
      "psutil version: 5.9.8\n",
      "pandas version: 2.2.2\n",
      "einops version: 0.8.0\n",
      "transformers version: NOT INSTALLED or UNKNOWN VERSION.\n",
      "mlflow version: NOT INSTALLED or UNKNOWN VERSION.\n",
      "pynrrd version: NOT INSTALLED or UNKNOWN VERSION.\n",
      "clearml version: NOT INSTALLED or UNKNOWN VERSION.\n",
      "\n",
      "For details about installing the optional dependencies, please visit:\n",
      "    https://docs.monai.io/en/latest/installation.html#installing-the-recommended-dependencies\n",
      "\n"
     ]
    }
   ],
   "source": [
    "from monai.utils import first, set_determinism\n",
    "from monai.transforms import (\n",
    "    AsDiscrete,\n",
    "    AsDiscreted,\n",
    "    EnsureChannelFirstd,\n",
    "    Compose,\n",
    "    CropForegroundd,\n",
    "    LoadImaged,\n",
    "    Orientationd,\n",
    "    RandCropByPosNegLabeld,\n",
    "    ScaleIntensityRanged,\n",
    "    Spacingd,\n",
    "    Invertd,\n",
    ")\n",
    "from monai.handlers.utils import from_engine\n",
    "# https://docs.monai.io/en/stable/networks.html#nets\n",
    "from monai.networks.nets import UNet, AttentionUnet, DynUNet, SegResNet, VNet, SegResNetVAE, UNETR\n",
    "from monai.networks.layers import Norm\n",
    "from monai.metrics import DiceMetric\n",
    "from monai.losses import DiceLoss\n",
    "from monai.inferers import sliding_window_inference\n",
    "from monai.data import CacheDataset, DataLoader, Dataset, decollate_batch\n",
    "from monai.config import print_config\n",
    "from monai.apps import download_and_extract\n",
    "import aim\n",
    "from aim.pytorch import track_gradients_dists, track_params_dists\n",
    "import torch\n",
    "import matplotlib.pyplot as plt\n",
    "import tempfile\n",
    "import shutil\n",
    "import os\n",
    "import glob\n",
    "\n",
    "print_config()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "import nibabel as nib\n",
    "def get_shape(name):\n",
    "    img = nib.load(name).get_fdata()\n",
    "    label = nib.load(name.replace('_0000','').replace('imagesTr','labelsTr')).get_fdata()\n",
    "    print(img.shape, label.shape, name)\n",
    "    return 0\n",
    "\n",
    "# for name in train_images:\n",
    "#     get_shape(name)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "w3EPRPqBwCMN"
   },
   "source": [
    "## Set MSD Spleen dataset path"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "id": "lVZr7-kBwCMO"
   },
   "outputs": [],
   "source": [
    "data_dir = '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver/'\n",
    "train_images = sorted(glob.glob(os.path.join(data_dir, \"imagesTr\", \"*.nii.gz\")))\n",
    "# train_labels = sorted(glob.glob(os.path.join(data_dir, \"labelsTr\", \"*.nii.gz\")))\n",
    "train_labels = [i.replace('_0000','').replace('imagesTr','labelsTr') for i in train_images]\n",
    "data_dicts = [{\"image\": image_name, \"label\": label_name, \"name\": image_name} for image_name, label_name in zip(train_images, train_labels)]\n",
    "train_files, val_files = data_dicts[:-9], data_dicts[-9:]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[{'image': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_92_0000.nii.gz',\n",
       "  'label': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//labelsTr/liver_92.nii.gz',\n",
       "  'name': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_92_0000.nii.gz'},\n",
       " {'image': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_93_0000.nii.gz',\n",
       "  'label': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//labelsTr/liver_93.nii.gz',\n",
       "  'name': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_93_0000.nii.gz'},\n",
       " {'image': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_94_0000.nii.gz',\n",
       "  'label': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//labelsTr/liver_94.nii.gz',\n",
       "  'name': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_94_0000.nii.gz'},\n",
       " {'image': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_95_0000.nii.gz',\n",
       "  'label': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//labelsTr/liver_95.nii.gz',\n",
       "  'name': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_95_0000.nii.gz'},\n",
       " {'image': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_96_0000.nii.gz',\n",
       "  'label': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//labelsTr/liver_96.nii.gz',\n",
       "  'name': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_96_0000.nii.gz'},\n",
       " {'image': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_97_0000.nii.gz',\n",
       "  'label': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//labelsTr/liver_97.nii.gz',\n",
       "  'name': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_97_0000.nii.gz'},\n",
       " {'image': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_98_0000.nii.gz',\n",
       "  'label': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//labelsTr/liver_98.nii.gz',\n",
       "  'name': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_98_0000.nii.gz'},\n",
       " {'image': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_99_0000.nii.gz',\n",
       "  'label': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//labelsTr/liver_99.nii.gz',\n",
       "  'name': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_99_0000.nii.gz'},\n",
       " {'image': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_9_0000.nii.gz',\n",
       "  'label': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//labelsTr/liver_9.nii.gz',\n",
       "  'name': '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_9_0000.nii.gz'}]"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "val_files"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "Q1Wi6EtAwCMO"
   },
   "source": [
    "## Set deterministic training for reproducibility"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "id": "dr8HRsffwCMO"
   },
   "outputs": [],
   "source": [
    "set_determinism(seed=1645)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "13ZnlKGCwCMO"
   },
   "source": [
    "## Setup transforms for training and validation\n",
    "\n",
    "Here we use several transforms to augment the dataset:\n",
    "1. `LoadImaged` loads the spleen CT images and labels from NIfTI format files.\n",
    "1. `EnsureChannelFirstd` ensures the original data to construct \"channel first\" shape.\n",
    "1. `Spacingd` adjusts the spacing by `pixdim=(1.5, 1.5, 2.)` based on the affine matrix.\n",
    "1. `Orientationd` unifies the data orientation based on the affine matrix.\n",
    "1. `ScaleIntensityRanged` extracts intensity range [-57, 164] and scales to [0, 1].\n",
    "1. `CropForegroundd` removes all zero borders to focus on the valid body area of the images and labels.\n",
    "1. `RandCropByPosNegLabeld` randomly crop patch samples from big image based on pos / neg ratio.  \n",
    "The image centers of negative samples must be in valid body area.\n",
    "1. `RandAffined` efficiently performs `rotate`, `scale`, `shear`, `translate`, etc. together based on PyTorch affine transform.\n",
    "1. `EnsureTyped` converts the numpy array to PyTorch Tensor for further steps."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "id": "jf7siKPOwCMO"
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/yawei/.local/lib/python3.10/site-packages/monai/utils/deprecate_utils.py:321: FutureWarning: monai.transforms.croppad.dictionary CropForegroundd.__init__:allow_smaller: Current default value of argument `allow_smaller=True` has been deprecated since version 1.2. It will be changed to `allow_smaller=False` in version 1.5.\n",
      "  warn_deprecated(argname, msg, warning_category)\n"
     ]
    }
   ],
   "source": [
    "roi_size = (96, 96, 32)\n",
    "train_transforms = Compose(\n",
    "    [\n",
    "        LoadImaged(keys=[\"image\", \"label\"]),\n",
    "        EnsureChannelFirstd(keys=[\"image\", \"label\"]),\n",
    "        ScaleIntensityRanged(\n",
    "            keys=[\"image\"],\n",
    "            a_min=-57,\n",
    "            a_max=164,\n",
    "            b_min=0.0,\n",
    "            b_max=1.0,\n",
    "            clip=True,\n",
    "        ),\n",
    "        CropForegroundd(keys=[\"image\", \"label\"], source_key=\"image\"),\n",
    "        Orientationd(keys=[\"image\", \"label\"], axcodes=\"RAS\"),\n",
    "        Spacingd(keys=[\"image\", \"label\"], pixdim=(1.5, 1.5, 2.0), mode=(\"bilinear\", \"nearest\")),\n",
    "        RandCropByPosNegLabeld(\n",
    "            keys=[\"image\", \"label\"],\n",
    "            label_key=\"label\",\n",
    "            spatial_size=roi_size,\n",
    "            pos=1,\n",
    "            neg=1,\n",
    "            num_samples=2,\n",
    "            image_key=\"image\",\n",
    "            image_threshold=0,\n",
    "        ),\n",
    "        \n",
    "        # user can also add other random transforms\n",
    "        # RandAffined(\n",
    "        #     keys=['image', 'label'],\n",
    "        #     mode=('bilinear', 'nearest'),\n",
    "        #     prob=1.0, spatial_size=(96, 96, 96),\n",
    "        #     rotate_range=(0, 0, np.pi/15),\n",
    "        #     scale_range=(0.1, 0.1, 0.1)),\n",
    "    ]\n",
    ")\n",
    "val_transforms = Compose(\n",
    "    [\n",
    "        LoadImaged(keys=[\"image\", \"label\"]),\n",
    "        EnsureChannelFirstd(keys=[\"image\", \"label\"]),\n",
    "        ScaleIntensityRanged(\n",
    "            keys=[\"image\"],\n",
    "            a_min=-57,\n",
    "            a_max=164,\n",
    "            b_min=0.0,\n",
    "            b_max=1.0,\n",
    "            clip=True,\n",
    "        ),\n",
    "        CropForegroundd(keys=[\"image\", \"label\"], source_key=\"image\"),\n",
    "        Orientationd(keys=[\"image\", \"label\"], axcodes=\"RAS\"),\n",
    "        Spacingd(keys=[\"image\", \"label\"], pixdim=(1.5, 1.5, 2.0), mode=(\"bilinear\", \"nearest\")),\n",
    "    ]\n",
    ")\n",
    "\n",
    "check_transforms = Compose(\n",
    "    [\n",
    "        LoadImaged(keys=[\"image\", \"label\"]),\n",
    "        EnsureChannelFirstd(keys=[\"image\", \"label\"]),\n",
    "    ]\n",
    ")"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "au4rmQfDwCMP"
   },
   "source": [
    "## Check transforms in DataLoader"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 408
    },
    "id": "qqcFPuVkwCMP",
    "outputId": "4189428e-4569-4453-e379-df4466208c85",
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "image shape: torch.Size([96, 96, 32]), label shape: torch.Size([96, 96, 32])\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAA9EAAAHoCAYAAABO2mw/AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABnAklEQVR4nO3deXxV9Z34/3cWsu8J2YBACPuiICIEXBGl7lRbx476demobXGf1mpHbbVVbDtVp611m47buFRnXOqKCopFEGRHdoRAWJIQyL4v5/eHPzJ83p+PyQnkcrO8no9HHo+8D+/PuZ977rn38OHyfp8Qz/M8AQAAAAAAHQoN9gQAAAAAAOgpWEQDAAAAAOATi2gAAAAAAHxiEQ0AAAAAgE8sogEAAAAA8IlFNAAAAAAAPrGIBgAAAADAJxbRAAAAAAD4xCIaAAAAAACfWEQDQfTss89KSEiIFBQUBHsqAADgGDvSvwecfvrpMm7cuC6dy5AhQ+Tqq6/u0n0CvRWLaAAAAAAAfGIRDQTRlVdeKXV1dTJ48OBgTwUAAACAD+HBngDQl4WFhUlYWFiwpwEAAADAJ76JBoJI10INGTJEzj//fPn000/lxBNPlOjoaBk/frx8+umnIiLy+uuvy/jx4yUqKkomTZokq1atMva3du1aufrqq2Xo0KESFRUlmZmZcu2118qBAwesxz70GFFRUZKXlydPPvmk/OpXv5KQkBAr97//+79l0qRJEh0dLSkpKXLZZZdJYWFhlx8PAAD6srfeekvOO+88yc7OlsjISMnLy5Nf//rX0tLS4sxfsWKFTJs2TaKjoyU3N1eeeOIJK6ehoUF++ctfyrBhwyQyMlIGDRokd9xxhzQ0NAT66QC9Ft9EA93Mtm3b5J//+Z/lhhtukCuuuEL+/d//XS644AJ54okn5Be/+IX85Cc/ERGRuXPnyqWXXiqbN2+W0NBv/j3so48+ku3bt8s111wjmZmZsn79ennqqadk/fr18sUXX7QtkFetWiXf+c53JCsrS+677z5paWmR+++/X/r372/N54EHHpB77rlHLr30UvmXf/kX2b9/v/zpT3+SU089VVatWiVJSUnH7NgAANCbPfvssxIXFye33367xMXFyYIFC+Tee++VyspK+f3vf2/klpWVybnnniuXXnqp/OAHP5BXX31VfvzjH0tERIRce+21IiLS2toqF154oSxatEiuv/56GT16tKxbt04eeeQR2bJli7z55ptBeJZAL+ABCJpnnnnGExFvx44dnud53uDBgz0R8RYvXtyWM2/ePE9EvOjoaG/nzp1t25988klPRLxPPvmkbVttba31GC+//LInIt5nn33Wtu2CCy7wYmJivD179rRt27p1qxceHu4d/rFQUFDghYWFeQ888ICxz3Xr1nnh4eHWdgAA4J/+e4DrOn7DDTd4MTExXn19fdu20047zRMR7w9/+EPbtoaGBm/ChAleenq619jY6Hme573wwgteaGio949//MPY5xNPPOGJiPf555+3bRs8eLB31VVXdeGzA3ov/js30M2MGTNG8vPz2+IpU6aIiMiMGTMkJyfH2r59+/a2bdHR0W2/19fXS2lpqUydOlVERFauXCkiIi0tLfLxxx/L7NmzJTs7uy1/2LBhcs455xhzef3116W1tVUuvfRSKS0tbfvJzMyU4cOHyyeffNJVTxsAgD7v8Ot4VVWVlJaWyimnnCK1tbWyadMmIzc8PFxuuOGGtjgiIkJuuOEGKSkpkRUrVoiIyGuvvSajR4+WUaNGGdfxGTNmiIhwHQeOEP+dG+hmDl8oi4gkJiaKiMigQYOc28vKytq2HTx4UO677z555ZVXpKSkxMivqKgQEZGSkhKpq6uTYcOGWY+tt23dulU8z5Phw4c759qvXz8/TwkAAPiwfv16ufvuu2XBggVSWVlp/Nmh6/gh2dnZEhsba2wbMWKEiIgUFBTI1KlTZevWrbJx40ZnuZaIWH9XAOAPi2igm/m2bt3ftt3zvLbfL730Ulm8eLH87Gc/kwkTJkhcXJy0trbKd77zHWltbe30XFpbWyUkJETef/995+PHxcV1ep8AAMBWXl4up512miQkJMj9998veXl5EhUVJStXrpSf//znR3wdHz9+vDz88MPOP9f/QA/AHxbRQC9RVlYm8+fPl/vuu0/uvffetu1bt2418tLT0yUqKkq2bdtm7UNvy8vLE8/zJDc3t+1ftwEAQNf79NNP5cCBA/L666/Lqaee2rZ9x44dzvy9e/dKTU2N8W30li1bROSbu32IfHMdX7NmjZx55pnOu28AODLURAO9xKFvig//ZlpE5NFHH7XyZs6cKW+++abs3bu3bfu2bdvk/fffN3IvvvhiCQsLk/vuu8/ar+d5zltnAQCAznNdxxsbG+Uvf/mLM7+5uVmefPJJI/fJJ5+U/v37y6RJk0Tkm/+htmfPHnn66aet8XV1dVJTU9OVTwHoM/gmGuglEhIS5NRTT5Xf/e530tTUJAMGDJAPP/zQ+S/Yv/rVr+TDDz+U6dOny49//GNpaWmRP//5zzJu3DhZvXp1W15eXp785je/kbvuuksKCgpk9uzZEh8fLzt27JA33nhDrr/+evnpT396DJ8lAAC907Rp0yQ5OVmuuuoqufnmmyUkJEReeOEF6x+xD8nOzpbf/va3UlBQICNGjJC//e1vsnr1annqqafaepZceeWV8uqrr8qPfvQj+eSTT2T69OnS0tIimzZtkldffVXmzZsnJ5544rF8mkCvwCIa6EVeeukluemmm+Sxxx4Tz/Pk7LPPlvfff9/owi0iMmnSJHn//fflpz/9qdxzzz0yaNAguf/++2Xjxo1W988777xTRowYIY888ojcd999IvJNDdXZZ58tF1544TF7bgAA9GapqanyzjvvyL/+67/K3XffLcnJyXLFFVfImWeeKbNmzbLyk5OT5bnnnpObbrpJnn76acnIyJA///nPct1117XlhIaGyptvvimPPPKIPP/88/LGG29ITEyMDB06VG655RZKtYAjFOJ92z9vAehzZs+eLevXr7fqqAEAAAB8g5pooI+qq6sz4q1bt8p7770np59+enAmBAAAAPQAfBMN9FFZWVly9dVXy9ChQ2Xnzp3y+OOPS0NDg6xatepb7wsNAAAA9HXURAN91He+8x15+eWXpaioSCIjIyU/P18efPBBFtAAAABAO/gmGgAAAAAAn6iJBgAAAADAp4Atoh977DEZMmSIREVFyZQpU2TZsmWBeigAABAEXOsBAH1RQP4799/+9jf5f//v/8kTTzwhU6ZMkUcffVRee+012bx5s6Snp7c7trW1Vfbu3Svx8fESEhLS1VMDAKDTPM+Tqqoqyc7OltBQ/hOXyNFd60W43gMAupdOXeu9ADjppJO8OXPmtMUtLS1edna2N3fu3A7HFhYWeiLCDz/88MMPP93up7CwMBCXzR7paK71nsf1nh9++OGHn+754+da3+XduRsbG2XFihVy1113tW0LDQ2VmTNnypIlS6z8hoYGaWhoaIs9+pwBALqQ61tOP9easLAwI7+1tVXi4+O7dG49VWev9SLffr0/Wc6VcOkX2AkDANCBZmmSRfKer2t9ly+iS0tLpaWlRTIyMoztGRkZsmnTJit/7ty5ct9993X1NAAAEJEjX0S7xvHfjr/R2Wu9yLdf78Oln4SHsIgGAATZ//9XAz/X+qAXdt11111SUVHR9lNYWBjsKQEAerC4uDjjJyIiwvrxo6WlxfjB0eF6DwDoLbr8m+i0tDQJCwuT4uJiY3txcbFkZmZa+ZGRkRIZGdnV0wAAAAHS2Wu9CNd7AEDv0eXfREdERMikSZNk/vz5bdtaW1tl/vz5kp+f39UPBwAAjjGu9QCAvqzLv4kWEbn99tvlqquukhNPPFFOOukkefTRR6WmpkauueaaQDwcAAA4xrjWAwD6qoAsov/pn/5J9u/fL/fee68UFRXJhAkT5IMPPrAakAAAgJ6Jaz0AoK8K8brZPaUqKyslMTEx2NMAAPQQ4eHmvwfHxsYacVNTkzWmrq7OiP1eCisqKiQhIaGTM4TLoev96XIR3bkBAEHX7DXJp/KWr2t90LtzAwAAAADQU7CIBgAAAADAJxbRAAAAAAD4FJDGYgAAHCstLS1GXFFRYcQhISHWmOjoaCNubGy0cg6vk/Y8T1pbW49mmgAAoJfgm2gAAAAAAHxiEQ0AAAAAgE8sogEAAAAA8IlFNAAAAAAAPtFYDDgG9A3bKysrgzQToPc5vAGY3z+vr6834pSUFCvn8IZlnudJeXn5kU0QAAD0KnwTDQAAAACATyyiAQAAAADwiUU0AAAAAAA+URMNdLGYmBhrGzXQQNeIiIiwtjU2NnZ6P62trUZcWlpq5cTGxrb93lHdNQAA6Dv4JhoAAAAAAJ9YRAMAAAAA4BOLaAAAAAAAfKImGuhiutYSQNc5kvrnI1VTU3PMHgsAAPQcfBMNAAAAAIBPLKIBAAAAAPCJRTQAAAAAAD6xiAYAAAAAwCcaiwFHKTIy0ojr6+uDNBMAAAAAgcY30QAAAAAA+MQiGgAAAAAAn1hEAwAAAADgEzXRwFHyPC/YUwAQAKmpqW2/t7a2SllZWRBnAwAAugu+iQYAAAAAwCcW0QAAAAAA+MQiGgAAAAAAn1hEAwAAAADgE43FgE6IiIiwtjU2NgZhJgCORlpamhEff/zxVs7o0aPbfm9sbJSnnnoq4PMCAADdH99EAwAAAADgE4toAAAAAAB8YhENAAAAAIBP1EQDnTB06FBr26ZNm4IwEwCHZGdnG3FOTo4Rjxo1yhozYMAAI25tbbVyCgsL236n9wEAADiEb6IBAAAAAPCJRTQAAAAAAD6xiAYAAAAAwCdqooFO2Lp1a7CnAPQpunY5MzPTyjnjjDOMODY21ojr6+utMUVFRUacnJxs5fTv37/t94aGho4nCwAA+gS+iQYAAAAAwCcW0QAAAAAA+MQiGgAAAAAAn1hEAwAAAADgE43FgHaMGjXKiDdt2hSkmQC9T1xcnBHr95uISH5+vhFHRUV1uN8hQ4YYcVlZmZVTW1trxAcOHLBycnJy2n6nsRgAADiEb6IBAAAAAPCJRTQAAAAAAD6xiAYAAAAAwCdqooF2bN68OdhTAHqkiIgIa9ugQYOM+LLLLjPilpYWa0xsbKwRDxw40MrRNc/FxcVGHBMTY43R9dj19fVWTlhYWNvvoaH8mzMAAPgGfysAAAAAAMAnFtEAAAAAAPjEIhoAAAAAAJ9YRAMAAAAA4BONxYDD9O/f34j3798fpJkAPUtiYqIRn3vuuR3mDBkyxIjDw+1LUmFhoRG7GpY1NzcbcVJSkhEfPHjQGhMVFWXEra2tVs7hDcsaGhqsPwcAAH0T30QDAAAAAOATi2gAAAAAAHxiEQ0AAAAAgE/URAOHObwGEsA3dK1yTk6OlXPOOecYcVpampWj64pramqMOCQkxBqja5VLSkqsnIyMDCPetGmTEScnJ1tj9HOqra21cg6vrQ4N5d+cAQDAN/hbAQAAAAAAPrGIBgAAAADAJxbRAAAAAAD4RE00+ixdRykiUlxcHISZAN1HdHS0te2ss84y4tjYWCsnJSXFiAcOHGjl1NfXG3FLS4sRp6enW2N0DXRMTIyVo2urIyMjO5yvrnHWY0REEhIS2n7XcwcAAH0X30QDAAAAAOATi2gAAAAAAHxiEQ0AAAAAgE8sogEAAAAA8InGYuizmpqagj0FIOiGDBlixKeffrqVk5WVZcSuJluHN+ESEUlKSrJyDh48aMR79uwx4szMTGtMcnKyEVdUVFg5mm4kVl1dbeU0NjYacVhYmJVz+PNsaGjo8HEBAEDfwDfRAAAAAAD4xCIaAAAAAACfWEQDAAAAAOATNdHoM9LS0ozYT20l0JO5aox1DfTZZ59txP369bPG6P4B2dnZVo6uO/Y8z8rRdceVlZVWjlZVVWXE8fHxVk5iYqIR65ptV010TU2NEdfW1lo51EQDAAAXvokGAAAAAMAnFtEAAAAAAPjEIhoAAAAAAJ9YRAMAAAAA4BONxdBnREdHG3FpaWmQZgIERni4+ZE+YcIEKycpKcmIY2NjjdjV7GvQoEFGvHPnTisnLy/PiJubm60c3SRMP7arAVhWVpYR6yZnIiLr1q0z4oSEBCOuq6uzxmzfvt2IQ0Ptf1M+7rjj2t0HAADom/gmGgAAAAAAn1hEAwAAAADgU6cW0XPnzpXJkydLfHy8pKeny+zZs2Xz5s1GTn19vcyZM0dSU1MlLi5OLrnkEikuLu7SSQMAgMDgWg8AQPs6VRO9cOFCmTNnjkyePFmam5vlF7/4hZx99tmyYcOGttq22267Td5991157bXXJDExUW688Ua5+OKL5fPPPw/IEwBckpOTrW0lJSVBmAkQGCNGjLC26VrgkSNHWjm67ljXAqekpFhjysrKjDg3N9fKiYyMNOIDBw5YOfHx8e2O0bGIyMGDB404IyPDytFzXrlypRGnpqZaY3QNd3p6upVz7rnntv2u67l7M671AAC0r1OL6A8++MCIn332WUlPT5cVK1bIqaeeKhUVFfLXv/5VXnrpJZkxY4aIiDzzzDMyevRo+eKLL2Tq1KldN3MAANDluNYDANC+o6qJrqioEJH/+xZgxYoV0tTUJDNnzmzLGTVqlOTk5MiSJUuc+2hoaJDKykrjBwAAdA9dca0X4XoPAOg9jngR3draKrfeeqtMnz5dxo0bJyIiRUVFEhERYd1CJSMjQ4qKipz7mTt3riQmJrb96FupAACA4Oiqa70I13sAQO9xxIvoOXPmyFdffSWvvPLKUU3grrvukoqKirafwsLCo9ofAADoGl11rRfheg8A6D06VRN9yI033ijvvPOOfPbZZzJw4MC27ZmZmdLY2Cjl5eXGv1AXFxdLZmamc1+RkZHOZjLA0XA1AWpubg7CTICuER5uflxfccUVVo7+LI2IiLBy6uvrjVg36nI14EtMTDTihoYGK0c3DWtpabFydBOzkJAQI25sbLTGREVFdTi/wYMHG/HOnTuN+OKLL7bG6MZnruvQ4Q3L9PHvC7ryWi/C9R4A0Ht06ptoz/PkxhtvlDfeeEMWLFhgdWidNGmS9OvXT+bPn9+2bfPmzbJr1y7Jz8/vmhkDAICA4VoPAED7OvVP63PmzJGXXnpJ3nrrLYmPj2+rfUpMTJTo6GhJTEyUH/7wh3L77bdLSkqKJCQkyE033ST5+fl06wQAoAfgWg8AQPs6tYh+/PHHRUTk9NNPN7Y/88wzcvXVV4uIyCOPPCKhoaFyySWXSENDg8yaNUv+8pe/dMlkAQBAYHGtBwCgfSGe53nBnsThKisrrfo7oCO61rK1tTVIMwG6hq7B/e53v2vEZ511ljVm7969RuzqAzBs2DAj1v0DXHXJ+jO5urraytG11a4uzTExMUZcW1trxGFhYdaYfv36Wds0/Tx13XdcXFyHYy6//HIrJzk5ue33Q9emiooKSUhI6HBO6NihY3q6XCThIR2/zgAABFKz1ySfylu+rvVHdZ9oAAAAAAD6EhbRAAAAAAD4xCIaAAAAAACf+t6NL9Er6XrMffv2BWkmQOe56m4mTZpkxBMmTDDiyspKa4x+H7jul15RUWHEuk7Ztd/09HQjLisrs3J07bLrPtG6BvrwewyLiOzatcsao+d3eJ3yIbq+Wd8DWh8XEZG8vLwO9wsAAODCN9EAAAAAAPjEIhoAAAAAAJ9YRAMAAAAA4BOLaAAAAAAAfKKxGHqksLAwIy4pKQnSTICjd+mll1rb+vfvb8S6wVZcXJw1RjfUCgkJsXIiIiI6FYvYDcr0XEREIiMjO9yPno9uUOZqRqYbljU1NVk5+liceOKJRuxqlpafn29tAwAA8INvogEAAAAA8IlFNAAAAAAAPrGIBgAAAADAJ2qi0SPpmkxdswl0F6665OnTpxvxuHHjrJzy8nIjdtULa6Gh5r+LRkdHdzgfXS+cmppqjWltbW13HyIiRUVFRhwbG2vl1NTUGLGuida9DkTseueCggIrZ8SIEUacnJxsxH6OAwAAgF98Ew0AAAAAgE8sogEAAAAA8IlFNAAAAAAAPrGIBgAAAADAJxqLoUeikRh6iszMTGvb8ccfb8SupmFZWVlGXF1dbcQRERHWmIqKCiOOioqycnRTPj2mrq7OGqObhLkagO3du9eIExMTrRzdzCs83LwEuear3+u6eZprP/rY5OfnW2P8WLVqVdvv+vgDAIC+i2+iAQAAAADwiUU0AAAAAAA+sYgGAAAAAMAnaqIBoAvFxcUZ8UknnWTlDBgwwIh1Ta+ISHx8vBGXlZUZcXl5eYePnZSUZOXoumRdc9zQ0GCNSUtLM2JXnXdNTY0Rp6SkWDn6saKjo43YVecdGRlpxP3797dydP31kCFD2n0cF1efhQceeKDt96ampg73AQAA+ga+iQYAAAAAwCcW0QAAAAAA+MQiGgAAAAAAn6iJPkZ0HaLneUGaSffHsUJPNm3aNCN2nb/6Xsy6/lnErjHW+3Htt7m52Yj93HdZP7brPsy6DnnHjh0d5uj5i4gkJycbcWlpqRG7arhLSkqMeNiwYVbOzp07jfjcc8+1cjry8ccfW9tyc3PbfnfVigMAgL6Jb6IBAAAAAPCJRTQAAAAAAD6xiAYAAAAAwCcW0QAAAAAA+ERjsWNkzJgxRrx+/fogzaT7S0hIMOKKioogzQTomG6Et2XLFiM+5ZRTrDFNTU1GHB5ufxTr8z4rK8uICwsLrTFpaWlG7GrUtWnTJiNOTEw04gEDBlhjdCMx13718x4yZIiV09raasS6OdqePXusMfo59evXz8oZP368ta0ja9euNeIlS5ZYOYMGDWr7vb6+vtOPAQAAeie+iQYAAAAAwCcW0QAAAAAA+MQiGgAAAAAAn6iJDgBd/ywiUlNTE4SZ9EzV1dXBngLgm67r1bWzrtrgiIgII66trbVyYmNjjbiystKIdX2xiEhycrIRf/3111aOHqdrjKOjo60xuu67sbHRyunfv78RFxQUWDn6szE1NdWIXTXR8fHxRuz6fDj//POtbYdraGiwti1evNiIzzrrLCtn+vTpbb9XVlbKz3/+83YfBwAA9A18Ew0AAAAAgE8sogEAAAAA8IlFNAAAAAAAPrGIBgAAAADAJxqLBcDYsWOtba+99loQZtL96WZEIiKlpaVBmAlwZCZMmGDEU6dONWLduEtEJCEhwYhbWlqsnLCwMCPWzQn1PkTsBlrNzc1Wjp5PU1NTh/PV28LD7UtHYmKiEbve2ykpKUZcWFhoxK7npBu35efnWzkdWbp0qbXt5JNPNuJx48a1uw/XsQQAAH0T30QDAAAAAOATi2gAAAAAAHxiEQ0AAAAAgE/URAdAZmZmsKfQY0RFRVnbdA0k0F1ERkZa266++mojrq2tNWJdKyxi1y6Hhtr/nllWVmbE5eXlRpyRkWGNiY6ONuL6+norJysry4grKiqMeNOmTdaYwYMHG3FJSYmVo2uGXZ+DCxYsMGJdT56cnGyNaWxsbHcuLvozZOLEiVZOfHy8EevjLSJy8ODBtt+rqqo6fFwAANA38E00AAAAAAA+sYgGAAAAAMAnFtEAAAAAAPhETXQAFBcXB3sK3VZqaqoR7969O0gzAToWFxdnxN/97netnKSkJCPWNby6/lnEvl+y6zNj4MCBRqzrfF31w9XV1R0+tp6fnn9RUZE15sCBA0YcExNj5dTV1Rmxvv+0iMj+/fuNWD8HV13yxRdfbMQhISFWjqZzdP2ziF3X/eGHH1o5h/ds0LXuAACg7+KbaAAAAAAAfGIRDQAAAACATyyiAQAAAADwiUU0AAAAAAA+0VisC5xwwglG/M477wRpJt2fbs6jmyUBx0p4eHi7sYhIfn6+EU+bNs3K0c2wYmNjjdjVYKu8vNyIW1parBzdDEs3CXO9d/Rz0I8jYjdL0/sNDbX/bVXv19UITTfz0s3TREQiIiLa3e+MGTOsMa79dFZBQYG1bcmSJUbcUXM312sEAAD6Jr6JBgAAAADAJxbRAAAAAAD4xCIaAAAAAACfqInuArrOT9f99mXJyclG3NzcHKSZAKawsDAjdtUY6/O3qqqqw/20trYasa45FhGJjIw0Yl1HLSLy9ddfG7GuDdafOyJ27a+ev4hd2xsdHW3EdXV11hhdL5yWlmbl6FrqLVu2WDlJSUlGvHHjRiO+6KKLrDFHor6+3ojnzZtn5ejXOz093copLCz81n0CAIC+i2+iAQAAAADwiUU0AAAAAAA+sYgGAAAAAMAnFtEAAAAAAPhEY7EukJeXZ8RffPFFkGbS/eimRUVFRUGaCfo63QBMN8IaOXKkNSY3N9eIw8Ptj8yamhojTkxMNOJ+/fpZY6qrq404IyPDytm9e7cRZ2VlGbGrEVZlZWW7jyNivyf1mNLSUmuMboTmapammwa6jtXQoUON+J/+6Z+snCOhG5Tt2LHDiPVzdG1zvU5lZWVtv7ueMwAA6Jv4JhoAAAAAAJ9YRAMAAAAA4BOLaAAAAAAAfKIm+giEhIQYcVRUVJBm0r3o4yIikpKSYsR79+49VtNBH6brnUVE4uPjjbi+vt6IBwwY0OF+dV21iEhLS4sR69rgxsZGa4yuv3XV7MbFxRnxgQMHjNj1uaP34zoO5eXl7e7HVZ/d1NRkxAcPHrRy9H6GDBli5QwbNsyIs7OzrZyOFBQUWNveffddI87MzDRiXbcuIhIREWHE+/bts3L279/f9rs+BgAAoO/im2gAAAAAAHxiEQ0AAAAAgE8sogEAAAAA8Ima6COg79f69ttvB2km3UtCQoK1rbi4OAgzQV8XGxtrbdP1zPqexbqOVsQ+f3NycqycmJiYdueia29F7BpdV+1yXV2dEev7O3ueZ43RfQn0GBG7hlvPxc+xc73X9f2lp06dauXommg/9H6XLl1q5ej6dl0b7voc2rNnjxHre0uLmPfqdh1vAADQN/FNNAAAAAAAPrGIBgAAAADAJxbRAAAAAAD4xCIaAAAAAACfaCx2BKKioox4+/btQZpJ9+JqvLN///4gzAS9iW6OpRtuiYiEh5sfZfo9KiJy8OBBI+7Xr58Rr1+/3hpzwQUXGLFu3CUiEhkZacQNDQ3txq75uZqPNTY2GnF1dbURuxqa6TGu+erjp2PdeEzE32swZswYIz6SJmKu/S5btsyI9+3bZ+XoZmh79+414m3btlljvvjiCyPWxxcAAODb8E00AAAAAAA+sYgGAAAAAMCno1pEP/TQQxISEiK33npr27b6+nqZM2eOpKamSlxcnFxyySXcKxgAgB6Kaz0AAKYjron+8ssv5cknn5TjjjvO2H7bbbfJu+++K6+99pokJibKjTfeKBdffLF8/vnnRz3Z7uLKK6804vvuuy9IMwkuXdepa0OBI6Hrm0eMGGHErh4EumY3KSnJytH1+ampqUY8duxYa0xCQkK7cxWx6451zW52drY1Rtf+umqX9WPX19cbsavfgH5P1tbWWjm6friqqsqIS0pKrDH9+/c34uHDh1s5s2fPtrZ1RD+WrlMWETlw4IARu2rM9bHZvHmzES9dutQa4zo2sPXlaz0AAN/miL6Jrq6ulssvv1yefvppSU5ObtteUVEhf/3rX+Xhhx+WGTNmyKRJk+SZZ56RxYsXO/9yBAAAuieu9QAAuB3RInrOnDly3nnnycyZM43tK1askKamJmP7qFGjJCcnR5YsWeLcV0NDg1RWVho/AAAguLryWi/C9R4A0Ht0+r9zv/LKK7Jy5Ur58ssvrT8rKiqSiIgI679SZmRkSFFRkXN/c+fO7bP/HRoAgO6oq6/1IlzvAQC9R6e+iS4sLJRbbrlFXnzxRed9WI/EXXfdJRUVFW0/hYWFXbJfAADQeYG41otwvQcA9B6d+iZ6xYoVUlJSIieccELbtpaWFvnss8/kz3/+s8ybN08aGxulvLzc+Bfq4uJiyczMdO4zMjKyxzWkGjRoULCn0C0MHDjQiLdt2xakmaCnCgsLs7bpz4rRo0cbsasJV3V1tRGHhtr/Pjh48GAj1g0CS0tLrTFlZWVG3NLSYuXoz4O0tDQjdn2+xcXFGXFjY6OVo49Nc3OzEdfU1FhjEhMTjVg3aROxj41u1OVqLHbWWWcZ8fnnn2/l9OvXz9qm6SZm77zzjhHHx8dbY3TTMFdDML3fzz77rN0/R/sCca0X6ZnXewAAXDq1iD7zzDNl3bp1xrZrrrlGRo0aJT//+c9l0KBB0q9fP5k/f75ccsklIvJNl9Rdu3ZJfn5+180aAAAEBNd6AADa16lFdHx8vIwbN87YFhsbK6mpqW3bf/jDH8rtt98uKSkpkpCQIDfddJPk5+fL1KlTu27WAAAgILjWAwDQviO+T/S3eeSRRyQ0NFQuueQSaWhokFmzZslf/vKXrn4YAAAQJFzrAQB92VEvoj/99FMjjoqKkscee0wee+yxo911txAdHW1te/HFF4Mwk+DyU4cKdJauqxcRGT58uBHr96CuORYROXDggBHrmmMRsf6baVZWlhFXVFRYY3T3YVcN986dO4141KhRRhwREWGN0f0DYmJirBxdJ63rsfXcRMRqAuWq4Y6NjTViPz0e9PH1U//seZ61TXd63r9/vxH7qYn2U9/squvG0ent13oAADrjiO4TDQAAAABAX8QiGgAAAAAAn1hEAwAAAADgU5c3FuttXPWMn3zySRBmElyu+826akiBQ1x1vrq2Xtclu8bpOuTk5GRrzLBhw4zYda9afZ/oyspKI9b1xK75ump2y8vLjVjXC+/Zs6fD/brunavrhXWOa4zepu8tLSLS1NRkxPoz7uuvv7bG/O53v7O2deStt96ytuk+Cvo5pqamWmNaW1uN2PX679u3z4h13bfrntoAAABHim+iAQAAAADwiUU0AAAAAAA+sYgGAAAAAMAnFtEAAAAAAPhEY7EOXHHFFda2xx57LAgzCa7s7Gxr286dO4MwEwRDenq6tU03qKqrqzNi1zmjm0QNHDjQytFNovLy8ow4NNT+t7/S0lIjzsnJsXJaWlqMWDfUcjUjq6+vN2JXMz19HPRcPM+zxujGV7rhlohIRkaGEcfFxRmxqwGYPjau5m67du0yYt3k7JlnnrHGREdHW9u0LVu2GPG6deusnKSkpHbnp88PEZHi4mIjdjVLGz16tBGnpaUZ8ZE2Fjv82LheRwAA0DfxTTQAAAAAAD6xiAYAAAAAwCcW0QAAAAAA+ERNdAdmz55tbeuLNdG7d+8O9hTQRfr162fEkZGRVo6uDw4Ptz8qdL2wrpsePny4NUbvR9c/u/aja5cPHDjQ4RhdTywiUlVVZcSNjY1GrGu6Rexjo5+za5uuMQ4LC7PG6Npf13z189R1va66b11b7aof17W9N954oxG76t87ehwRkQULFhhxWVmZlaOPsa5dds1XvwauWnv9WurjWV5ebo3Rz0HXzIuIJCQktP3ueZ5UVlZaOQAAoO/hm2gAAAAAAHxiEQ0AAAAAgE8sogEAAAAA8IlFNAAAAAAAPtFYrAOFhYXBnkJQ6KZFe/fuDdJMcDQGDhxobdNNrVwNwHQTLt2MTMQ+R3QDKN00yrUf3SxLxG4k1tDQ0O7jiIhkZWUZsasBVFxcnBHHxsYasatpWGJiohHv2LHDytH0/Gtra62c5ORkI3a9v/Q43Szr4MGD1hh9zF0NwK6++moj9tNITNNNxERENm3aZMSuZnS6KVhpaakRx8fHW2P0OdPc3Gzl6PNVH9/Ro0dbY5YtW2Zt0yoqKjrMAQAAfQ/fRAMAAAAA4BOLaAAAAAAAfGIRDQAAAACAT9REd+BXv/pVsKcQFCkpKUa8a9euIM0E7QkLCzNiXQuqa3pdOa4aU11j3NTUZOWMGTPGiHWtta5lds0nNTW1w8fWtcyumuiCggIjDgkJsXJmzZplxDt37jTi/v37W2P27dtnxK764fLyciPWtdW6RlrErmfev3+/laNroHWdd1RUlDVG93D44Q9/aOUcSQ20Pg7btm2zcvRrW1xcbOXo59DY2GjEZWVl1hjP84xY1z+LiIwaNcqIV65cacSu2nC9XwAAAL/4JhoAAAAAAJ9YRAMAAAAA4BOLaAAAAAAAfKImWpk0aZIRr1ixIkgzCS5XPSO6n5iYGCPW94XWdbWuMS0tLVbOkCFDjNh1r2Ndz6xrol33Cdb3BR4xYoSVo/eja211jbSIyKJFi4w4MzPTytF1sfr+w657S+v6cdd9g/Xz1DXmrtpbXUftek56vroGWu9DRGTo0KFGfCT1z65a9sWLF3eYo49NXl5eh/v2U5+v6/6TkpKsnJKSEiPWNfGu/VITDQAAjhTfRAMAAAAA4BOLaAAAAAAAfGIRDQAAAACATyyiAQAAAADwicZiyl//+lcjnjBhQnAmcoyNGzfOiDdt2hSkmeDbxMfHW9t0YybdEEz/uYhITU2NEY8aNcrK0c29dBMxEZHGxkYj1s2cIiIirDG60ZmrOVZkZGS7+yksLLTG6OZoroZlurlUSkqKEevnIyISGmr+O2NdXZ2VEx0dbcS6eZZuCCYisnPnzg7nqxuL6QZmruZeV155pRG7msYdPHjQiPv372/EukmbiEhBQYERu56TbtRWVlZm5eimYPo5ZmVlWWN0MzrdaEzE/ryqqqoyYj1/AACAo8E30QAAAAAA+MQiGgAAAAAAn1hEAwAAAADgEzXRSk5OTrCnEBT19fVGrOtHEXyumuihQ4cacWZmphG7akz1a52enm7l7Nu3r8P96HNE11rrGl4Rkbi4OCN21Rjr/a5fv96IXfW4us5X1ymL2DXQlZWVRqxrel05rsfW9eO6zlvXgYvYz9tVu6xfJ127fPfdd1tjtGeffdbapuumNV2vLSKSkJBgxPr8EBHJy8sz4qamJitH1517ntfuXEREiouL252LiMiSJUuMWL8mruMLAABwpPgmGgAAAAAAn1hEAwAAAADgE4toAAAAAAB8YhENAAAAAIBPNBZT/vGPfwR7CgEXGmr/20lZWVkQZoL2xMbGGnF2draVM378eCPWTblcDZV0gy1Xcy/92Lpxl4hISEiIEe/fv9+IXQ3Ltm/fbsSuZl5ff/21EevmU67mfzfddJMRu85n3SRMx66GVbqp1YgRI6wc3YhLNzUrKiqyxqSlpRlxUlKSlaOf97/+678aseu1/fOf/9zhY0dERBixPlYNDQ3WGH0+6AZ2IvZz2LNnj5WjG37pRnO68Zhr2+rVq60cfR4tXbrUygEAAOgqfBMNAAAAAIBPLKIBAAAAAPCJRTQAAAAAAD5RE63cdtttwZ5CwI0ePdratm3btiDMBO3Jzc014kGDBlk5kZGRRqxroqOioqwxur61sLDQyklNTW13vyIiNTU1RqzrfF2197q+Ve9DRKSgoMCIJ0+ebMSnn366NSY5OdmIS0pKrJxNmza1Oz/XsdJ10vrYidh1vbrG/JRTTrHGlJaWGvHJJ59s5QwbNsyIdc35f/zHf1hjdM3xxIkTrRxN133rWmwR+3XbunWrlaNr1aurq60cfb7q18BVn69fW1d9vmscAABAoPBNNAAAAAAAPrGIBgAAAADAJxbRAAAAAAD4xCIaAAAAAACf+nRjsfPPP9/a9o9//CMIMzm2wsPtl103DkJg6SZWupGXiMioUaOMOCkpycrRjZmys7ONODEx0RpTW1trxBkZGVZOS0uLEVdUVFg50dHR7e43LCzMGqMbibmaWOkmZscdd5wRT58+3RqzZcsWI9ZNxFzz0cfG9R7Qx0E/RxH7+C1btsyIdeMxEfdnj/bVV18Z8YcffmjErsZturnXRRdd1OHjbNiwwYibmpqsnJSUFCN2vbYhISFG7Goap8fpxm36dRSxj8OqVausnPLycmsbAABAoPBNNAAAAAAAPrGIBgAAAADAJxbRAAAAAAD41Kdroq+77jpr20cffRSEmQSWrl09ePCgleOqTUXg6Lrf4cOHWzm6vrm1tdXK0fW2ut79wIEDHY6pq6uzcmJiYoy4qqrKyomMjDTi/v37G7GrxjgtLc3apul6cV1Hq2t4RUSWL19uxK66f11Trmt2XfXj+tj069fPyhk8eLART5061Yh1bbuI/X578sknrZxPPvnEiAsKCox47Nix1pi8vDwj1q+R67ELCwuNOD4+3hpTVlZmxLGxsVaOrqWOiIiwcvQ5rGPXubhu3Tojpv4ZAAAEG99EAwAAAADgE4toAAAAAAB8YhENAAAAAIBPfbomesCAAda2hoaGIMwksHSt5549e4I0Exyi7wudm5tr5eh76Op7FovY9+atr683Ytc9ivUYV52vrtl27UfPT9eq6nsLi9h1s676fL1N37t937591hj9vk1OTrZydC2wnos+LiIis2bNMmJXHbJr3OH2799vbfvss8+M+O9//7uVo+uQ9T2q9WstIpKfn9/uXEREKisr241d9yzXr4krR9czu2qrdX373r17jVjX1Yu4780NAAAQTHwTDQAAAACATyyiAQAAAADwiUU0AAAAAAA+sYgGAAAAAMCnPt1YrDc2EXMJDzdf5tbW1iDNBIeMHj3aiF1NmHTDL90IS0QkOjraiHUTuYqKCmuMblCmG4SJiOzYscOIMzMzrRzdFEo/h+rqamtMY2Njhzm6OdbAgQONOCcnxxpz4MABI9bNskTsZmNXX321ESclJVljdCMsl6+++sqIt2/fbsRvvPGGNUY3btOvm4jdOGzkyJFG7GpyNmLEiPYnK3YDuNjY2HZjEbu5l56LiMiyZcuMWH/uiNjHWJ9DrkZzNBYDAADdDd9EAwAAAADgE4toAAAAAAB8YhENAAAAAIBPfbom+j//8z+DPYUu56qtdNXF4tiKiIgw4vHjxxtxZGSkNUbX8LrommK9n7q6OmuMrvN11SVnZWV1OD9do61jXacsIlJcXGzEujZcxK7RrqqqancfIiJnn322EbtqdvVr4Me2bduM+Ouvv7Zy1q9fb8S65njnzp3WmEGDBhlxSEiIlRMVFWXEup7cVfftR01NjRHruvp9+/ZZY2JiYoxY12uLiHieZ8Suzx197unnoB9HxK7HLyoqsnIAAACOJb6JBgAAAADAJxbRAAAAAAD4xCIaAAAAAACfWEQDAAAAAOBTn2osFh8fb8Rr1qwJ0kyOrdbW1mBPoc8bN26cEaelpRnxwYMHrTH6dXM1n6qtrW33cXUjJxGR0FDz385c+9XNvFyPoxto6QZQ/fr1s8bk5eUZsW5qJWI3pPrpT39qxLrhll+68VVJSYkRL1y40BozcOBAI961a5eVs3fvXiPev3+/EbsamunXIDU11crRjdlOOOEEIx48eLA1xg/dAC48vOPLgG72po+liH3OxMbGWjn6M1ife67zTM8XAAAg2PgmGgAAAAAAn1hEAwAAAADgE4toAAAAAAB86lM10bfeeqsRv/DCC8GZSAC5akyLioqCMJO+S9e7iti1tbreVdeKitj1oq66WZ2j65Bd50NZWVmHOVlZWUa8adMmK0fXJuv9VFdXW2NGjhxpxDNmzLByXHXcHdH1uFu2bLFydK21rm/esGGDNUa/d5qbm62c7OxsI9a17H5qetPT061tcXFx7T7OGWec0eF+XfTzLi0tNWJXLbN+rV21y5MnTzbiFStWWDm6fryhocGI9ftCRKSmpsba1hX0+eqqtdfvFQAAABG+iQYAAAAAwDcW0QAAAAAA+NTpRfSePXvkiiuukNTUVImOjpbx48fL8uXL2/7c8zy59957JSsrS6Kjo2XmzJmydevWLp00AAAIHK71AAB8u07VRJeVlcn06dPljDPOkPfff1/69+8vW7duleTk5Lac3/3ud/LHP/5RnnvuOcnNzZV77rlHZs2aJRs2bDji+7t2lfHjxxuxrs/riSIjI424sbExSDPBIa77Lvfv39+ID3/PiNivo4hdm6prekVEEhMTjVjX47rOh5SUFCPOyMiwcvRfhouLi62ciRMnGvH3vvc9I9Z11X7V19cbsa6t/vTTT60xuhZc36tZxL5fc0FBgRHv3r3bGqPvfe2qd9e16roG2nU/Z/0auOrS9b3Dhw4dasSuOno/9Lk3ZMgQI3bVAevn7ec+0a7nvXbtWiPWz9FVR6/PBxf9nnPNT9P1+a5zpq/WRPf0az0AAIHWqUX0b3/7Wxk0aJA888wzbdtyc3Pbfvc8Tx599FG5++675aKLLhIRkeeff14yMjLkzTfflMsuu6yLpg0AAAKBaz0AAO3r1H/n/vvf/y4nnniifP/735f09HSZOHGiPP30021/vmPHDikqKpKZM2e2bUtMTJQpU6bIkiVLnPtsaGiQyspK4wcAAARHIK71IlzvAQC9R6cW0du3b5fHH39chg8fLvPmzZMf//jHcvPNN8tzzz0nIv93Oxj930MzMjK+9TZLc+fOlcTExLafQYMGHcnzAAAAXSAQ13oRrvcAgN6jU4vo1tZWOeGEE+TBBx+UiRMnyvXXXy/XXXedPPHEE0c8gbvuuksqKirafgoLC494XwAA4OgE4lovwvUeANB7dKomOisrS8aMGWNsGz16tPzv//6viPxfE57i4mKjqVBxcbFMmDDBuc/IyEhnU6VA0P9q3huacOmGSrpRDwJPNzVyNdTS2/S5l5aWZo05cOCAEbuaWmnl5eVGHBMTY+U0Nze3+zgi9nl0zTXXWDmnnXZah/PRdOOoffv2WTn6m6xVq1YZ8ZYtW6wxAwYMMGLXX86nTJlixPpY6QZbIvax0o3cROxmbrqp0uTJk60x+rV0fQbq5mM5OTkdjvFDN6zTDcp0wzURkYEDBxpxQ0ODleOnAZg+nvo5HGkjL/05qOnnLCLWt6Cu1/akk05q+72pqUneeeedI5pfTxOIa73Isb3eAwAQSJ36Jnr69OmyefNmY9uWLVvaurDm5uZKZmamzJ8/v+3PKysrZenSpZKfn98F0wUAAIHEtR4AgPZ16pvo2267TaZNmyYPPvigXHrppbJs2TJ56qmn5KmnnhKRb76Ru/XWW+U3v/mNDB8+vO22F9nZ2TJ79uxAzB8AAHQhrvUAALSvU4voyZMnyxtvvCF33XWX3H///ZKbmyuPPvqoXH755W05d9xxh9TU1Mj1118v5eXlcvLJJ8sHH3zAfSMBAOgBuNYDANC+Ti2iRUTOP/98Of/887/1z0NCQuT++++X+++//6gmFgi6dhXoCtHR0UY8fPhwK0fXQOs61ISEBGuMrjutq6uzcvRfWPWYQ7WLh1u7dq0R65peEZFbbrnFiIcOHWrlaLomdvny5VbO+vXrjdhV571mzRoj1vXi4eH2x5au4a6tre1wv0lJSUbsqsfVddKuuln9uaJroL/88ktrjD6eeXl5Vo4+r3Q98ZHSz6G4uLjDMbrm2HXO6OPgqvPXtckVFRVG7Kcm2vU5PmzYMCOeNGlSu48rInL88ccbsa6RF/mmDvqQ+vr6PlMTLdKzr/UAAARap2qiAQAAAADoy1hEAwAAAADgE4toAAAAAAB8YhENAAAAAIBPnW4s1pM988wzwZ7CUdHNfUREIiIigjATHC4sLMyIXQ2VYmJijFg3/CopKbHGVFdXG7GrOdL27duNWDfL+vrrr60xI0eONOKrrrrKytFNrVyWLFlixKWlpUb8xhtvWGP0+epq1FVYWGjEullaTU2NNaaoqMiIXY3adDMsnaObUYnYr5tuCCciMmjQICPWjeX69+9vjYmMjDRiV7M0fR51VWNEvV/dcM3VwCwuLs6I9WskYp8zrtdp//79RqybebnOcc/zjDg7O9vKycrKMmL9Wh44cMAao7nme/hz0u9zAADQd/FNNAAAAAAAPrGIBgAAAADAJxbRAAAAAAD41Ktroo877jgj1jWmPU1ubq61bcuWLUGYCQ6na2Bd9cS6HreioqLdPxex64d1/bOIXUOqx9x+++3WGF3f6scHH3xgbVu6dKkR69plXe8qYteCu2qiU1JS2s3RjyNi14K3tLRYObrW11Vbq+ljNWrUKCsnJyfHiHXtsqsmWtN1v679dBVdCz5mzBgjLigosMY0NTUZsa7pFrHruvft22fl6PNT15jr+nIRkdBQ8996XbXLus67rKzMiPV5J+Lv+B4+X9d7FAAA9E18Ew0AAAAAgE8sogEAAAAA8IlFNAAAAAAAPvXqmujTTjvNiDds2BCkmXQN171kcWylp6db23QNtOt+srr+Ur+WrjE6x1WHeu655xrxjBkzrBxt8+bNRvzRRx9ZObqmeOfOnVbOl19+acS6dtlVG67vE6xrbUVE6urqjLi2ttaIdb2riH0P9QEDBlg5ut62o/pcEZGBAwe2uw+RrqldDlT9sx/6vuErVqywcnQ9sKvOe/HixUbsqq3WdfL6M9p13/tly5YZsa5/FxGZPHmyEevzytUPQ9e763uCi5g9B3RtNgAA6Lv4WwEAAAAAAD6xiAYAAAAAwCcW0QAAAAAA+MQiGgAAAAAAn3p1pyrdXOill14K0ky6xoEDB4I9hT7P1XxIN6RyNSDSTcJ0AyXXmKysLCO+/fbbO5yfbqD04IMPWjnvvvuuEY8dO9bK8TzPiJubm60c3fBLN/dKSUmxxujjMGLECCtn69atRpybm2vEW7ZsscYkJycb8ZAhQ6wc3ZBKz1/vQ0RkzJgxRuxqAKYbobkaqnVn+ng3NDRYOXpbTU2NlaObz+njImK/V/T7affu3daYhIQEIx43bpyVo/ezZ88eI3Y1hNPzS0tLs3KKiorafq+vr7f+HAAA9E18Ew0AAAAAgE8sogEAAAAA8IlFNAAAAAAAPvXqmuicnBwj7mk1xbqGs6KiIjgTQZuDBw9a22JjY43YVX+p62RnzJhhxK56zNTU1A7ns3btWiP+wQ9+YMS6BllEZP/+/Ua8a9cuK0fPt3///lbO+PHjjVjXGGdkZFhj9LEqKyuzcuLi4oy4sLDQiI877jhrzIQJE4w4MjLSytF1vKeccooR6x4KIiI7duww4ry8PCunp9VAa8OHDzdi1/mwbt06I3Z9Fult5eXlVo4+73VvgKioKGuMrqN2vU5NTU1GrM97V02/np/rvXL4NcNVKw4AAPomvokGAAAAAMAnFtEAAAAAAPjEIhoAAAAAAJ9YRAMAAAAA4FOvbizmagLUk4waNcqIv/jiiyDNBIeMGDHC2qYbH+lmSSIiZ599thGPHDmyw8fSzZIefPBBK+fll1824tLSUiN2NUNKTEw04n79+lk5nucZsatZmm4kpps3NTY2drhfV056eroR19fXdzgX3bDKRTcbczVL0xISEjrM6el0Q7D169dbOZs3bzbigoICK2fbtm1G7GoAFxYWZsS6uZd+rUXsRmL6ffFt2w7natKnG+y5zsXDm+O55gYAAPomvokGAAAAAMAnFtEAAAAAAPjEIhoAAAAAAJ96dU10SUlJsKdwVMrKyoI9hT5P13CGh9tvmbi4OCP+wQ9+YOUMGTKk3cf5+uuvrW3vvvuuEf/2t7+1cnQ9s653bm1ttcbo2mVXrafetnXrVitH1xgPHjy4w/0eXmPqmouIyNChQ404OjraiF31z/o16Cq6TtZV736s6DpkVx2wrvOdNm2alaOf08KFC4347bfftsbofgx1dXVWjj7XZs2aZeXo/gF6jKsGPSYmxohjY2OtHF37X1VVZcS6fl/EPq90vb6en/4sAAAAfRffRAMAAAAA4BOLaAAAAAAAfGIRDQAAAACAT726Jnrp0qXBnsJR2b17d7Cn0OfpexbrOkoRkXvuuafdMS47duww4scff9zKWbRokRGHhIRYObqmWNfJ5ubmWmMKCwuNODk52crZs2dPu/MVERk+fHi7c3HNt7i42Ihd98vWdcfnnHOOEbvua30kPv74YyNesmSJlaPvj/xf//VfVs6R1EmvWbOmw8fW913W9wDX9cUi9rHRr5Fr3AcffGDEK1assMbommJXrb2uXXbVN+tter9RUVHWGM3V60K/5/Q9wHft2mWNSUtLM+K9e/daOdnZ2W2/d9V5BwAAej6+iQYAAAAAwCcW0QAAAAAA+MQiGgAAAAAAn1hEAwAAAADgU69pLDZp0iRrm26y1N1lZWUZ8f79+4M0ExwyePBgI7711lutHD+NxMrKyoz4D3/4gxEvX77cGuN5nhFPnDjRylm1apURh4ebb2ndPElEpLy83IhdTaJ0E6XU1FQrJycnx4j1+003dxIRaWhoMOKDBw92OD99HI6UbqD14YcfGrGrwdrZZ59txO+9956VM3r0aCPWzdLWrVtnjfnb3/5mxK7GV7qxmOZqdKUbic2fP9/Kufjii41406ZNRlxfX2+NcZ0jWl1dnREvXLjQypk8ebIRDxgwwIh1ozERkZaWFiN2vU4VFRVGrM/Xffv2WWNCQ81/Q9bvHT0f13EBAAB9E99EAwAAAADgE4toAAAAAAB8YhENAAAAAIBPvaYm+oQTTrC27d27NwgzOXK6ttZVx4dj67TTTjPiCy+80MrRtcAHDhywcjZu3GjEBQUFRrx582ZrTFxcnBGHhIRYObo+VI+prq62xujaTl3LKmKfi7r+WcSuZ9Z137rmVMSueY2Pj7dydG+A1atXG/FJJ51kjenocVx0na/rWK1cudKI9fEVEXn99deN+PnnnzfiBQsWWGP0Y7lef318Bw0aZMTr16+3xui6eV0jLWLXGGuu88EPXbu+ZcsWK+eCCy4w4oyMDCN2vQa69jsyMtLKaWxsbHduNTU1HY5x5Rx+frrefwAAoG/im2gAAAAAAHxiEQ0AAAAAgE8sogEAAAAA8IlFNAAAAAAAPvWaxmKupi+ff/55EGZy5HRjJgTf3XffbcS6eZKISGVlpREnJCRYOc3NzUasG1+5mjlFRUW1uw8RkbS0NCPWzbJ0cyrXY+3atcvK0Q2fqqqqrJzjjz/eiAcMGGDEKSkp1hg9v+OOO87KGTx4sBHr5+1q3JaammrE27dvt3IWLVpkxB999JERu5qnlZaWGrF+TUREli9fbsR/+MMfjFg3BBMR+fLLL43Y9drq80jnDBw40BrzySefGPF1111n5ejGYuHhgbkM9O/f39q2f/9+Iw4LCzNiVzO6mJgYIy4uLrZyMjMzjVifZ7r5n+uxdQMzvR+9TwAA0HfxTTQAAAAAAD6xiAYAAAAAwCcW0QAAAAAA+NRraqJd9YGuukigPbrmVddjuuiaYl1PLCLy/vvvG7GuDc3KyrLG7Ny504jz8vKsHF3rqetmR40aZY3Rj+2q8y0vLzfiadOmWTnDhg0zYl1r6zJy5Egjzs/Pt3Li4uI63I9WXV1txNu2bbNydO36lClTjHj37t0d7nft2rVWjj7m8+bNM+LzzjvPGvPpp58ases569ffVZeu6Zr9kpISK2f8+PGd3u+R0OeZiF3P7uploenz3vU6RUZGtrtffa665qf3ISKSnp7e9rurbwEAAOib+CYaAAAAAACfWEQDAAAAAOATi2gAAAAAAHzqNTXRZ555prXt3nvvDcJM/ImIiLC2UXMXfHPmzDFi131rtSFDhhjx+vXrrRxdY6xrYF01vLq+Vd+zWMQ+j/R8N23aZI3R9y2ur6+3ck4//XQjHjt2bIePrevHGxoarDHjxo0z4iOpf3bdJ1gf86+++srK0bW0ul7Yz/2S9f2IRUSSk5ONWPdiSEpKssbo+3u77iVdUFBgxImJiUZcUVFhjdG9IdasWWPl6M/KU045xYiXLVtmjTmSeyS73jv6nNDHxtXbIjs724jLyso6fOx9+/YZsT7eIvb7yXVf88Nrq/3UbwMAgL6Bb6IBAAAAAPCJRTQAAAAAAD6xiAYAAAAAwCcW0QAAAAAA+NRrGovp5k7dXVZWlrVt7969QZhJ3xUZGWlty8/P7/R+CgsLjXj//v1Wjj4/dRO56upqa4zej6ux2ODBg404Pj7eiKOjo60xo0ePNuKTTz7ZytFNllyNpXSjK900Kicnxxrjasyl6f3oRmILFy60xujGbbt27bJy+vfv3+6YpqYma4x+n27dutXKmTFjhhHrY+dqSDVgwAAj/uyzz6yc8ePHG7F+Dfr162eNaW5uNmI/nylnn322ET/xxBNWTmxsrBEXFRVZOenp6e3ORURk1KhRRjx16lQj1s30REQOHjxoxMcff7yVs3z5ciPOyMgwYtd7R58PrsZye/bsafvd1YAPAAD0TXwTDQAAAACATyyiAQAAAADwiUU0AAAAAAA+9Zqa6NWrVwd7Cp2SnJxsbXPVcSJwXDWbxx13XKf3M3ToUCPWtbYi9uut64UrKio6fBxXDayuid6xY4cR6/piEZHQUPPfzlx1yjrHVVut6dpgV31rY2OjEb/zzjtWzldffWXE27dvN+Ivv/yyw7m4aoF1Pa6uQ4+Li7PG6Bpz1/HUr/fw4cM7nN+kSZOMeN68eVaOroHWx07XXovYdfQ7d+7scC66Xv873/mOlaPfK67PqlNPPdWIN27caOXoenw9xtVPQL/+rveXfl30sVm/fr01JiYmxogjIiKsnMPPibCwMOvPAQBA38Q30QAAAAAA+MQiGgAAAAAAn1hEAwAAAADgE4toAAAAAAB86rGNxXSTpd27dwdpJkfG1cTI1YgJgeNqFKQbc/lpEqVfS9eYtLQ0Iy4pKTHiyZMnW2Nef/11I/7888+tHN0MSTcsS0xM7HC+LklJSR2OSUhIMOKQkBAjdjVzeu2114x4wYIFVk59fb0RFxQUGHFqaqo1RjcAczWJ0sdcz9/VhEu/lrqRm4hIVFSUEZ9xxhlGrBuCiYjU1tYa8fjx462cbdu2GbFuuJaVldXhfnWDOBGRoqIiI9afpf/yL/9ijXn11VeNuKamxsrRx/ess86ycnRDtfBw8xKUkZFhjdFNzbZu3Wrl7Nu3z4j1ubhp0yZrzPHHH2/EutGYiPn+cZ1TAACgb+KbaAAAAAAAfGIRDQAAAACATyyiAQAAAADwqcfWRA8aNCjYUzgqrvpLHFutra3WtocfftiI8/PzjdhVRx0dHW3ErlrV9PR0I9bnr6vmODIy0ogzMzOtHF0vrOs69T5E7BpTvQ8RkdjYWCOeOnWqlaPP4f/5n/8x4vfee88as3HjRiNeu3atlaPrerVLLrmk3T8XEfnoo4+sbbpu9rTTTjNiV/2wPkcGDBhg5UybNs2IdY3x0qVLrTHFxcVG3L9/fytn165dRqzrhVevXm2N0fXiVVVVVk5FRYUR67r5G264ocO5uOrSdb34+eefb+WMHTvWiCsrK43YVZesH8vVc2Dx4sVGrGug9fEWsXtQ6PpsnUPPCgAAcAjfRAMAAAAA4BOLaAAAAAAAfOrUIrqlpUXuueceyc3NlejoaMnLy5Nf//rX1n95u/feeyUrK0uio6Nl5syZzluSAACA7odrPQAA7etUTfRvf/tbefzxx+W5556TsWPHyvLly+Waa66RxMREufnmm0VE5He/+5388Y9/lOeee05yc3PlnnvukVmzZsmGDRus+6keDX0/3I7qKLsb171jcWzp2mAR+968F110kRHPmDHDGjNmzBgjzsvLs3LGjRtnxPq+xi4XXHCBEbv6ALz//vtGrO+x7DrPdA3sZZddZuWkpKQYses+uz/72c+MWN+rXde7ioiMGDHCiPX7WMSum9V1sq57FOt7H2dnZ3f42Lqme+TIkdYYXQt8zz33WDn63sd79+41Yl2LLWLXWrvORV2Dq+umXTXyTU1NRqzvTy5inxN6P67PaV3n7XofaPo1cdH12a6aaM1Vl67rphctWmTEEydOtMboY6XnIiKyf//+tt8bGho6nFtv0Z2u9QAAdEedWkQvXrxYLrroIjnvvPNERGTIkCHy8ssvy7Jly0Tkm7/0Pfroo3L33Xe3LT6ef/55ycjIkDfffNP5l3UAANB9cK0HAKB9nfrv3NOmTZP58+fLli1bRERkzZo1smjRIjnnnHNERGTHjh1SVFQkM2fObBuTmJgoU6ZMkSVLljj32dDQIJWVlcYPAAAIjkBc60W43gMAeo9OfRN95513SmVlpYwaNUrCwsKkpaVFHnjgAbn88stF5P/++56+FUtGRsa3/te+uXPnyn333XckcwcAAF0sENd6Ea73AIDeo1PfRL/66qvy4osvyksvvSQrV66U5557Tv793/9dnnvuuSOewF133SUVFRVtP4WFhUe8LwAAcHQCca0X4XoPAOg9OvVN9M9+9jO588472+qdxo8fLzt37pS5c+fKVVdd1dagpri4WLKystrGFRcXy4QJE5z7jIyMlMjIyM5PPNyc+uLFizu9j2MpKSnJiF3NkRB8uvmU7jY7b948a8wXX3xhxEOGDLFydGMj3dyrurraGqObQrkaix36ZuiQzz77zIhnzZpljdENktasWWPlvPzyy0asm3CJiJSWlhpxXFycEbsadenmY7oRmojdkGz9+vVG7Go+FRERYcTJyclWzt13323EuqnZwIEDrTGa63XSx0o3AKurq7PGtLS0dPhY6enpRqybJ4aFhXU4RjfPEhGJjo5u93G/973vWdvee+89Iz5w4ICVoz+TV69ebeVUVVUZsf7sd30ulpWVGXF5ebmVo99z+lzUzepE7IZv+nFERAoKCtp+dx3L3ioQ13qRI7/eAwDQ3XTqm+ja2loJDTWHhIWFtXWazc3NlczMTJk/f37bn1dWVsrSpUslPz+/C6YLAAACiWs9AADt69Q30RdccIE88MADkpOTI2PHjpVVq1bJww8/LNdee62IiISEhMitt94qv/nNb2T48OFtt73Izs6W2bNnB2L+AACgC3GtBwCgfZ1aRP/pT3+Se+65R37yk59ISUmJZGdnyw033CD33ntvW84dd9whNTU1cv3110t5ebmcfPLJ8sEHH3DfSAAAegCu9QAAtC/E8zwv2JM4XGVlpVUv6nL4rTVERFasWGHluGrcgkXX7BUXF1s5rtpJBE5ISIi1Td/fdOXKlUY8bdo0a4zuRuuqCdQ1sOPHjzfiV155xRqja6BdtYQ/+MEPjFj/F0xdgyoi8tBDDxlxc3OzlaPrRXX9sIjI3r17293PJZdcYo05vMZURGT58uVWjp6zroHWdb8iIlOmTDHihx9+2MrRNa36MyM1NdUa89FHHxnx9u3brZyhQ4ca8YABA4x48+bN1hj9seuq8922bZsR6xp5134P1aoeos8zEWn7NvEQ3WH573//uzXmnXfeMeIXXnjBytHHwfUZ9/vf/96IU1JSjNhV767PRX0cROzael33rXtSiIgMHz7ciA/dA/lwmzZtMh7j3/7t36SiokISEhKsXHTeoev96XKRhIf0C/Z0AAB9XLPXJJ/KW76u9Z2qiQYAAAAAoC9jEQ0AAAAAgE8sogEAAAAA8IlFNAAAAAAAPnWqO3d3ohvFdKcmYi66KZRucoRjLzzcPv3j4+ONeMyYMUZcW1trjYmIiDDimpoaK0c3Olq0aFGH8ystLTVi3WhKxG7e1NDQYMRhYWHWmMObJYmIZGVlWTm62ZTreY8aNcqIdVMo3exJRNruM3vI5MmTrRzdHE13+/3e975njTnrrLOMeOfOnVbOnDlzjHjp0qVGrJuTidjNxvRrLSJy4MABI46Li7NytFWrVhmxq7FYR59x+liK2OdvXl6elbNlyxYj1q+1nwaHgwcPtrZNmjTJiF9++WUr58EHHzTicePGGfHFF19sjdHP0/W89eukG5ZVVFRYY2JjY4343HPPtXIOf11c7wEAANA38U00AAAAAAA+sYgGAAAAAMAnFtEAAAAAAPjUY2uiv//97xvx//zP/wRpJv7ous7m5uYgzQSHuGqBR4wYYcTJyclGvH37dmuMrr+srKy0cnR9q67R3Lx5szVG10TrelcRkYULFxqxrktOTEy0xuj95ObmWjklJSVG7HmelTNs2DAj3r9/vxHr2msRkZycHCM+6aSTrJyxY8casa6/TU9Pt8bo9/8XX3xh5eiaVl2HPG/ePGvMwIEDjfjCCy+0cgoLC41Y1xS7jsPWrVuNeNeuXVaOPhf1fl0157r3wsaNG60c1/E7XHZ2trVN1w8PGTLEytGfcRMmTLByVq9e3e5j//CHP7S29e/f34h1PbmIXVut31+67lvEfs8NHTrUyjm8dwWf2QAA4BC+iQYAAAAAwCcW0QAAAAAA+MQiGgAAAAAAn1hEAwAAAADgU49tLHbuuecGewqd4moChOA67rjjrG26eVNcXJwRu5ouNTY2GrHrtdaNpHSjMdeY6upqa5sWHm6+hRsaGox437591hid42qYtHv3biN2NZJav369EQ8YMMCIXcd36tSpRqwbt4nYja/0c3jppZesMZ988okR79mzx8rRx1w3YdPHUsR+/auqqqwc3Sxt5cqVRqyPk4jIzp07jdjVuE2/LroRlqtZlm4Ip5upiYiMGTPG2na4U045xdr29ttvG7E+50VEXnjhBSPu16+flaOPp24QqV8j12PNmDHDytmyZYsRh4WFGbHr/NXvA/0eFTFfb/3ZAAAA+i6+iQYAAAAAwCcW0QAAAAAA+MQiGgAAAAAAn3psTXRlZWWwp9Ap8fHxwZ4CFFedZFNTkxGHhIQYcUtLS4f7jY2NtbatWLHCiHU9q64VFRHp37+/Eet6YhGR1tZWI9a1tq65REdHG/HatWutnIiICCM+ePCglZOQkGDEuoZ36NCh1hj9vFNSUqyc0tJSI37++ec7HLNhwwYjdtWv6v3qnEmTJlljRo0aZcSRkZFWjq7h3rVrlxG76tL1eaTPMxG7Ll3XTY8YMcIao2uD9fksYtePf+973zNiXU8sYj9HF13X76q11z0Fxo8fb8SuOm/9HFx105qutS8oKLBy9Pnpqs8//FyrqqqSO+64o8PHBgAAvR/fRAMAAAAA4BOLaAAAAAAAfGIRDQAAAACATz22JrqoqCjYU/hWrrpJ7hMdfFFRUUasa4NF7NdO1666akw1V81mWVmZEeuaflfN6ddff23ECxcutHLKy8uNWN+bV98LWcSuzz9w4ICVM3DgwHb3K2LXUusa8x07dlhjRo8ebcS65lhEZOPGjUasa6C/+uora4yuBXc978TERCPWNd2pqanWGF2z+/HHH1s5uoZYv/6u80zfk9pVP6y36XriRYsWWWN0XbfrfNX389bnkOs4jBw50ojvvPNOK0dz9YHYu3evEevzyjVGPwfXazto0CAjLi4uNmLX/b013V9AxLwHuKtuHQAA9E18Ew0AAAAAgE8sogEAAAAA8IlFNAAAAAAAPrGIBgAAAADApx7bWOzgwYPBnsK3cjW+cTVvwrE1btw4I05OTrZydPOgiooKIx42bJg1xs+5qMeVlJQYsauxmG4+5edxGhoa2o1FROrq6oxYN08TsRt1uRqA6X3rZn9Lliyxxuj9zJgxw8qZNWuWES9evNiIXQ2exowZY8T6dRMRmTJlihE3NjYasT4uIiIvv/yyEbuaxunH0k2tdNMrEZHNmzcbsatxm26Ep/c7ePBga0xHcxMRCQ0NbTfH1VjsggsuMGLdIE5EZNOmTUbsek66cZ/+rHQ1QtOvt6th3fHHH2/E7733nhHrpnciIvv37zdi1+fB4eeIn6aCAACgb+CbaAAAAAAAfGIRDQAAAACATyyiAQAAAADwqUfURLtq1bqzhIQEa5urNhXH1vDhw41Y1/CKiGRmZhpxbGysEbvqZvU2V+2krslMSUkxYletbU5OjhHrOtojVV9f3+kxut5VxK7Z1TXbuqZbRGTDhg1G7Oof0NraasS63rmmpsYas2/fPiN21U3r92Bzc7MRb9u2zRqjX0vXc9LnUVhYWIfzjY6ONuKysjIrR79O8fHxRqzri130+SsiEhERYcTV1dUd7kf79a9/bW176623jPj555/vcD7r1q0zYletdWJiohHv2bPHytm4caMRn3nmmUa8evVqa4y+rrjet57nOX8HAAB9G99EAwAAAADgE4toAAAAAAB8YhENAAAAAIBPLKIBAAAAAPCpRzQW0w2MujvdWEhEpLy8/NhPBIa0tDQjdr1OurmQbnLlahqnG37p5mQidkMqvZ8tW7ZYY3Tzqe6mowZlJSUl1rYVK1Z0uA/9fh86dKgR68ZYIiK7d+9udy4iIv369Wv3sTMyMqwx+nXT+xCxz6O8vDwjdr22fpp56cZnjY2NRuynYZmrwVppaakRT506tcO5aLNnz7a2rVy50oh1IzTXtoULFxrxiSeeaI1JTU01Yn18RezXPzTU/Pdh3fRORCQrK8uIXU3jDt+v63gDAIC+iW+iAQAAAADwiUU0AAAAAAA+sYgGAAAAAMCnHlET7aoNraurC8JM/KmqqrK26VpbBFZiYqK1TddS6tpQEbsGWteu6lpLEZHs7OwO95uUlGTEuo7WVW+p6zxjYmKsHH1edffzTNcHu+ara6InT55sxCeffLI1pqyszIh1nbqIfYz1a+Kqm9Xng2u+urZa19q6asN1TfyBAwesHF1/reeia6RF7GPnqh/Xj93S0mLlaPo5uvpUXHXVVUZcWVlp5Xz++edG/OGHHxrx4MGDrTE333yzEbv6S+htup48NzfXGqPfg666+sOPjZ/jBAAA+ga+iQYAAAAAwCcW0QAAAAAA+MQiGgAAAAAAn3pETbSLrq3rTlz1uBUVFUZMfV1g6XvLith19Lp2VcSui9Svpa61FLFrXuPi4qwcXc8aGRlpxDt37rTG6Pta792718qpra21tnUFXUvruj/2hAkTjPjjjz82Ytd9d6+99lojdt1LWNPHyuWGG24w4nXr1lk569evN2Lda+Htt9+2xhQVFRmxqyZa70c/b9c5oz8PXM/RVX9/OFe9s67H17Xirvl4ntfu44iI7Nq1y4hHjBhh5eieA+ecc46Vo+8TrmuVP/roI2uMvk/4aaedZuXofgH6dXL1KUhPTzdi13E4vFY9UO81AADQ8/BNNAAAAAAAPrGIBgAAAADAJxbRAAAAAAD4xCIaAAAAAACfemxjsVWrVgV7Cp1CI7FjSzcjEhGpr683YlcjId0ULCwszIhdDYoaGhqM2NUsSzcx0s3IiouLrTFTpkwx4oyMDCunoKDAiKurq424srLSGqObLoWEhFg5KSkpRnzhhRdaOYMGDTLiX/7yl0Y8duxYa0xycrK1rSO6KduGDRusHN3M6/jjj7dyxo0bZ8Rr16414qioKGtMUlKSEetzSERkz549Rrxv3z4jXr16tTUmNjbWiGtqaqwcfX5OnTrViF2v7eTJk41YN9MTEdm6dasRDxkyxMrRGhsb252biH0eTZ8+3cqZNm2aEetmdJs3b7bG3HXXXUZ86aWXWjn//M//bMS6IaCr2Z9+LNexOrzRoOu1BwAAfRPfRAMAAAAA4BOLaAAAAAAAfGIRDQAAAACATz22Jvrzzz8P9hS+VVpamrVt27ZtQZhJ7zB48GBr286dO9sdk5eX1+F+db2ziF27rmtt/dSCpqamWjm6plTXMrtq5pubm43YVZOpz7X8/HwjHjBggDVG16GuWbPGytE15a4ac13zfNJJJxlxRESENcaPwsJCIz548KARl5WVWWMOHDhgxK4a2ISEBCOeOHGiEf/85z+3xqxcudKIFy5c2OFj69df13SL2HXJukZaxK7rjYyMbDcWsZ+jK0ef0/qzyfX5pevJ9fxF7Jp9PRcRkRtvvNGIdT25632ga5dffvllK0f3yPjRj35kxPq9JCISExNjxK7ndHh9u+4lAAAA+i6+iQYAAAAAwCcW0QAAAAAA+MQiGgAAAAAAn1hEAwAAAADgU49oLDZmzBhr2/bt24MwE39cDatw5HTjJhfdJCgzM9PK0U2WdEMwEbuxkW5iVVlZaY0ZPny4EbuaWukGSnq+eh8idrOp9PR0K0fPTzexcjUjS05ONuJ+/fpZOXo/dXV1Vk5XNRLTOpqfq1HX3r17jfiNN96wchYsWGDEuoHWTTfdZI0544wzjPi73/2ulTN//nwjnjdvnhG7zkX9nFxNuA5vauXaj2u/ugGYbiImInLqqacasW4s52raeNZZZxmxq2GdPqddBg0aZMQjR440YleDNd0UbNeuXVbOpk2bjPhXv/qVEeuGeyIi8fHxRuy6pkRFRX3rPAAAQN/FN9EAAAAAAPjEIhoAAAAAAJ+63X/ndt2Ht6fdn5P/9te1XOdERzkNDQ0d5rj+q6sep2N9v2dXjuu/R+scfU77uU+0633QUY5rvnqM6/jqHNfxrKqqMuLD/+vr0aiurjbimpqadv9cRKS2ttaIXf/9vKNj49qv/u/7oaH2vzvq+fk5Z/TxdX1m6HPCtR9NP2/XOa5fN33sXOeiPg56jMiRfe7pcgPXc9TzOZLPA9d7p7PvwUO/+3l8+HPoWDZLkwiHFQAQZM3yzd8N/FzrQ7xu9jeC3bt3W3VzAAB0B4WFhTJw4MBgT6NX4HoPAOiO/Fzru90iurW1Vfbu3Svx8fFSVVUlgwYNksLCQmfjHRydyspKjm8AcXwDi+MbWBxfk+d5UlVVJdnZ2c7/kYDOO3S99zxPcnJyONcChPdyYHF8A4vjG1gcX1NnrvXd7r9zh4aGtq38D3VPTkhI4IUNII5vYHF8A4vjG1gc3/+TmJgY7Cn0Koeu94fKBTjXAovjG1gc38Di+AYWx/f/+L3W88/pAAAAAAD4xCIaAAAAAACfuvUiOjIyUn75y19KZGRksKfSK3F8A4vjG1gc38Di+OJY4VwLLI5vYHF8A4vjG1gc3yPX7RqLAQAAAADQXXXrb6IBAAAAAOhOWEQDAAAAAOATi2gAAAAAAHxiEQ0AAAAAgE8sogEAAAAA8KnbLqIfe+wxGTJkiERFRcmUKVNk2bJlwZ5SjzR37lyZPHmyxMfHS3p6usyePVs2b95s5NTX18ucOXMkNTVV4uLi5JJLLpHi4uIgzbhne+ihhyQkJERuvfXWtm0c36OzZ88eueKKKyQ1NVWio6Nl/Pjxsnz58rY/9zxP7r33XsnKypLo6GiZOXOmbN26NYgz7jlaWlrknnvukdzcXImOjpa8vDz59a9/LYfftIHji0DiWt81uNYfW1zrux7X+sDhWh8gXjf0yiuveBEREd5//dd/eevXr/euu+46LykpySsuLg721HqcWbNmec8884z31VdfeatXr/bOPfdcLycnx6uurm7L+dGPfuQNGjTImz9/vrd8+XJv6tSp3rRp04I4655p2bJl3pAhQ7zjjjvOu+WWW9q2c3yP3MGDB73Bgwd7V199tbd06VJv+/bt3rx587xt27a15Tz00ENeYmKi9+abb3pr1qzxLrzwQi83N9erq6sL4sx7hgceeMBLTU313nnnHW/Hjh3ea6+95sXFxXn/8R//0ZbD8UWgcK3vOlzrjx2u9V2Pa31gca0PjG65iD7ppJO8OXPmtMUtLS1edna2N3fu3CDOqncoKSnxRMRbuHCh53meV15e7vXr18977bXX2nI2btzoiYi3ZMmSYE2zx6mqqvKGDx/uffTRR95pp53WdmHl+B6dn//8597JJ5/8rX/e2trqZWZmer///e/btpWXl3uRkZHeyy+/fCym2KOdd9553rXXXmtsu/jii73LL7/c8zyOLwKLa33gcK0PDK71gcG1PrC41gdGt/vv3I2NjbJixQqZOXNm27bQ0FCZOXOmLFmyJIgz6x0qKipERCQlJUVERFasWCFNTU3G8R41apTk5ORwvDthzpw5ct555xnHUYTje7T+/ve/y4knnijf//73JT09XSZOnChPP/1025/v2LFDioqKjOObmJgoU6ZM4fj6MG3aNJk/f75s2bJFRETWrFkjixYtknPOOUdEOL4IHK71gcW1PjC41gcG1/rA4lofGOHBnoBWWloqLS0tkpGRYWzPyMiQTZs2BWlWvUNra6vceuutMn36dBk3bpyIiBQVFUlERIQkJSUZuRkZGVJUVBSEWfY8r7zyiqxcuVK+/PJL6884vkdn+/bt8vjjj8vtt98uv/jFL+TLL7+Um2++WSIiIuSqq65qO4auzwuOb8fuvPNOqayslFGjRklYWJi0tLTIAw88IJdffrmICMcXAcO1PnC41gcG1/rA4VofWFzrA6PbLaIROHPmzJGvvvpKFi1aFOyp9BqFhYVyyy23yEcffSRRUVHBnk6v09raKieeeKI8+OCDIiIyceJE+eqrr+SJJ56Qq666Ksiz6/leffVVefHFF+Wll16SsWPHyurVq+XWW2+V7Oxsji/QQ3Gt73pc6wOLa31gca0PjG7337nT0tIkLCzM6mhYXFwsmZmZQZpVz3fjjTfKO++8I5988okMHDiwbXtmZqY0NjZKeXm5kc/x9mfFihVSUlIiJ5xwgoSHh0t4eLgsXLhQ/vjHP0p4eLhkZGRwfI9CVlaWjBkzxtg2evRo2bVrl4hI2zHk8+LI/OxnP5M777xTLrvsMhk/frxceeWVctttt8ncuXNFhOOLwOFaHxhc6wODa31gca0PLK71gdHtFtEREREyadIkmT9/ftu21tZWmT9/vuTn5wdxZj2T53ly4403yhtvvCELFiyQ3Nxc488nTZok/fr1M4735s2bZdeuXRxvH84880xZt26drF69uu3nxBNPlMsvv7ztd47vkZs+fbp1m5YtW7bI4MGDRUQkNzdXMjMzjeNbWVkpS5cu5fj6UFtbK6Gh5mUgLCxMWltbRYTji8DhWt+1uNYHFtf6wOJaH1hc6wMk2J3NXF555RUvMjLSe/bZZ70NGzZ4119/vZeUlOQVFRUFe2o9zo9//GMvMTHR+/TTT719+/a1/dTW1rbl/OhHP/JycnK8BQsWeMuXL/fy8/O9/Pz8IM66Zzu8Y6fncXyPxrJly7zw8HDvgQce8LZu3eq9+OKLXkxMjPff//3fbTkPPfSQl5SU5L311lve2rVrvYsuuojbMvh01VVXeQMGDGi77cXrr7/upaWleXfccUdbDscXgcK1vutwrT/2uNZ3Ha71gcW1PjC65SLa8zzvT3/6k5eTk+NFRER4J510kvfFF18Ee0o9kog4f5555pm2nLq6Ou8nP/mJl5yc7MXExHjf/e53vX379gVv0j2cvrByfI/O22+/7Y0bN86LjIz0Ro0a5T311FPGn7e2tnr33HOPl5GR4UVGRnpnnnmmt3nz5iDNtmeprKz0brnlFi8nJ8eLioryhg4d6v3bv/2b19DQ0JbD8UUgca3vGlzrjz2u9V2La33gcK0PjBDP87zgfAcOAAAAAEDP0u1qogEAAAAA6K5YRAMAAAAA4BOLaAAAAAAAfGIRDQAAAACATyyiAQAAAADwiUU0AAAAAAA+sYgGAAAAAMAnFtEAAAAAAPjEIhoAAAAAAJ9YRAMAAAAA4BOLaAAAAAAAfPr/ADGKDrhAHArWAAAAAElFTkSuQmCC",
      "text/plain": [
       "<Figure size 1200x600 with 2 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "check_ds = Dataset(data=train_files, transform=train_transforms)\n",
    "check_loader = DataLoader(check_ds, batch_size=1)\n",
    "check_data = first(check_loader)\n",
    "image, label = (check_data[\"image\"][0][0], check_data[\"label\"][0][0])\n",
    "print(f\"image shape: {image.shape}, label shape: {label.shape}\")\n",
    "# plot the slice [:, :, 80]\n",
    "\n",
    "plt.figure(\"check\", (12, 6))\n",
    "plt.subplot(1, 2, 1)\n",
    "plt.title(\"image\")\n",
    "plt.imshow(image[:, :, 30], cmap=\"gray\")\n",
    "plt.subplot(1, 2, 2)\n",
    "plt.title(\"label\")\n",
    "plt.imshow(label[:, :, 30])\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([2, 1, 96, 96, 32]) torch.Size([2, 1, 96, 96, 32]) ['/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_0_0000.nii.gz', '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver//imagesTr/liver_0_0000.nii.gz']\n"
     ]
    }
   ],
   "source": [
    "for data in check_loader:\n",
    "    x,y = data['image'], data['label']\n",
    "    print(x.shape, y.shape, data['name'])\n",
    "    break"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "D0_EHJ7FwCMQ"
   },
   "source": [
    "## Define CacheDataset and DataLoader for training and validation\n",
    "\n",
    "Here we use CacheDataset to accelerate training and validation process, it's 10x faster than the regular Dataset.  \n",
    "To achieve best performance, set `cache_rate=1.0` to cache all the data, if memory is not enough, set lower value.  \n",
    "Users can also set `cache_num` instead of `cache_rate`, will use the minimum value of the 2 settings.  \n",
    "And set `num_workers` to enable multi-threads during caching.  \n",
    "If want to to try the regular Dataset, just change to use the commented code below."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "kKA4gboPwCMQ",
    "outputId": "2496df99-8445-4c70-a3b1-721f9e552b34",
    "tags": []
   },
   "outputs": [],
   "source": [
    "# train_ds = CacheDataset(data=train_files, transform=train_transforms, cache_rate=1.0, num_workers=4)\n",
    "train_ds = Dataset(data=train_files, transform=train_transforms)\n",
    "# use batch_size=2 to load images and use RandCropByPosNegLabeld\n",
    "# to generate 2 x 4 images for network training\n",
    "train_loader = DataLoader(train_ds, batch_size=2, shuffle=False, num_workers=4)\n",
    "\n",
    "# val_ds = CacheDataset(data=val_files, transform=val_transforms, cache_rate=1.0, num_workers=4)\n",
    "val_ds = Dataset(data=val_files, transform=val_transforms)\n",
    "val_loader = DataLoader(val_ds, batch_size=1, num_workers=4)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "id": "d1WMn7DFKkbV"
   },
   "outputs": [
    {
     "ename": "NameError",
     "evalue": "name 'aim_run' is not defined",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mNameError\u001b[0m                                 Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[8], line 2\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[38;5;66;03m# finalize Aim Run\u001b[39;00m\n\u001b[0;32m----> 2\u001b[0m \u001b[43maim_run\u001b[49m\u001b[38;5;241m.\u001b[39mclose()\n",
      "\u001b[0;31mNameError\u001b[0m: name 'aim_run' is not defined"
     ]
    }
   ],
   "source": [
    "# finalize Aim Run\n",
    "aim_run.close()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "nOgy1x1BwCMQ"
   },
   "source": [
    "## Create Model, Loss, Optimizer"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "dataset_name = 'Task003_Liver'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "id": "VM-5g2bmwCMQ"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Task003_Liver_UNet\n"
     ]
    }
   ],
   "source": [
    "# standard PyTorch program style: create UNet, DiceLoss and Adam optimizer\n",
    "device = torch.device(\"cuda:0\")\n",
    "# device = torch.device(\"cpu\")\n",
    "\n",
    "UNet_metadata = {\n",
    "    \"spatial_dims\": 3,\n",
    "    \"in_channels\": 1,\n",
    "    \"out_channels\": 3,\n",
    "    \"strides\": (2, 2, 2, 2),\n",
    "    \"num_res_units\": 2,\n",
    "    \"channels\": (4, 8, 16, 32, 64),\n",
    "    \"norm\": Norm.BATCH,\n",
    "}\n",
    "\n",
    "model = UNet(**UNet_metadata).to(device)\n",
    "loss_function = DiceLoss(to_onehot_y=True, softmax=True)\n",
    "loss_type = \"DiceLoss\"\n",
    "optimizer = torch.optim.Adam(model.parameters(), 1e-4)\n",
    "dice_metric = DiceMetric(include_background=False, reduction=\"mean\")\n",
    "\n",
    "Optimizer_metadata = {}\n",
    "for ind, param_group in enumerate(optimizer.param_groups):\n",
    "    optim_meta_keys = list(param_group.keys())\n",
    "    Optimizer_metadata[f\"param_group_{ind}\"] = {\n",
    "        key: value for (key, value) in param_group.items() if \"params\" not in key\n",
    "    }\n",
    "aim_run = aim.Run()\n",
    "aim_run.name = f'{dataset_name}_{model.__class__.__name__}'\n",
    "# log model metadata\n",
    "aim_run[f\"{model.__class__.__name__}_meatdata\"] = UNet_metadata\n",
    "# log optimizer metadata\n",
    "aim_run[\"Optimizer_metadata\"] = Optimizer_metadata\n",
    "print(aim_run.name)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "id": "VM-5g2bmwCMQ"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Task003_Liver_UNETR\n"
     ]
    }
   ],
   "source": [
    "# standard PyTorch program style: create UNet, DiceLoss and Adam optimizer\n",
    "device = torch.device(\"cuda:0\")\n",
    "\n",
    "net_metadata = {\n",
    "    \"spatial_dims\": 3,\n",
    "    \"in_channels\": 1,\n",
    "    \"out_channels\": 2,\n",
    "    \"img_size\": (96, 96, 32),\n",
    "    \"mlp_dim\": 3072\n",
    "    #  \"strides\": (2, 2, 2, 2),\n",
    "    # \"num_res_units\": 2,\n",
    "    # \"channels\":(4, 8, 16, 32, 64),\n",
    "    # \"norm\": Norm.BATCH,\n",
    "}\n",
    "\n",
    "model = UNETR(**net_metadata).to(device)\n",
    "loss_function = DiceLoss(to_onehot_y=True, softmax=True)\n",
    "loss_type = \"DiceLoss\"\n",
    "optimizer = torch.optim.Adam(model.parameters(), 1e-4)\n",
    "dice_metric = DiceMetric(include_background=False, reduction=\"mean\")\n",
    "\n",
    "Optimizer_metadata = {}\n",
    "for ind, param_group in enumerate(optimizer.param_groups):\n",
    "    optim_meta_keys = list(param_group.keys())\n",
    "    Optimizer_metadata[f\"param_group_{ind}\"] = {\n",
    "        key: value for (key, value) in param_group.items() if \"params\" not in key\n",
    "    }\n",
    "aim_run = aim.Run()\n",
    "aim_run.name = f'{dataset_name}_{model.__class__.__name__}'\n",
    "# log model metadata\n",
    "aim_run[f\"{model.__class__.__name__}_meatdata\"] = net_metadata\n",
    "# log optimizer metadata\n",
    "aim_run[\"Optimizer_metadata\"] = Optimizer_metadata\n",
    "print(aim_run.name)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "4nD1pAY-wCMR"
   },
   "source": [
    "## Execute a typical PyTorch training process"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([0., 1., 2.], dtype=float32)"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import numpy as np\n",
    "np.unique(batch_data[\"label\"])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([4, 1, 96, 96, 32]) torch.Size([4, 1, 96, 96, 32]) ['/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver/imagesTr/liver_0_0000.nii.gz', '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver/imagesTr/liver_0_0000.nii.gz', '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver/imagesTr/liver_100_0000.nii.gz', '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver/imagesTr/liver_100_0000.nii.gz']\n",
      "torch.Size([4, 1, 96, 96, 32]) torch.Size([4, 1, 96, 96, 32]) ['/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver/imagesTr/liver_101_0000.nii.gz', '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver/imagesTr/liver_101_0000.nii.gz', '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver/imagesTr/liver_102_0000.nii.gz', '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver/imagesTr/liver_102_0000.nii.gz']\n",
      "torch.Size([4, 1, 96, 96, 32]) torch.Size([4, 1, 96, 96, 32]) ['/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver/imagesTr/liver_103_0000.nii.gz', '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver/imagesTr/liver_103_0000.nii.gz', '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver/imagesTr/liver_104_0000.nii.gz', '/mnt/datawow/lyl/models/nnUNet-master/nnUNetFrame/DATASET/nnUNet_raw/nnUNet_raw_data/Task003_Liver/imagesTr/liver_104_0000.nii.gz']\n"
     ]
    },
    {
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[13], line 2\u001b[0m\n\u001b[1;32m      1\u001b[0m step\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m0\u001b[39m\n\u001b[0;32m----> 2\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m batch_data \u001b[38;5;129;01min\u001b[39;00m train_loader:\n\u001b[1;32m      3\u001b[0m     step\u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m1\u001b[39m\n\u001b[1;32m      4\u001b[0m     inputs, labels \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m      5\u001b[0m         batch_data[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mimage\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m      6\u001b[0m         batch_data[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mlabel\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m      7\u001b[0m     )\n",
      "File \u001b[0;32m~/.local/lib/python3.10/site-packages/torch/utils/data/dataloader.py:631\u001b[0m, in \u001b[0;36m_BaseDataLoaderIter.__next__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    628\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sampler_iter \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m    629\u001b[0m     \u001b[38;5;66;03m# TODO(https://github.com/pytorch/pytorch/issues/76750)\u001b[39;00m\n\u001b[1;32m    630\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reset()  \u001b[38;5;66;03m# type: ignore[call-arg]\u001b[39;00m\n\u001b[0;32m--> 631\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_next_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    632\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m    633\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dataset_kind \u001b[38;5;241m==\u001b[39m _DatasetKind\u001b[38;5;241m.\u001b[39mIterable \u001b[38;5;129;01mand\u001b[39;00m \\\n\u001b[1;32m    634\u001b[0m         \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m \\\n\u001b[1;32m    635\u001b[0m         \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m>\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called:\n",
      "File \u001b[0;32m~/.local/lib/python3.10/site-packages/torch/utils/data/dataloader.py:1329\u001b[0m, in \u001b[0;36m_MultiProcessingDataLoaderIter._next_data\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1326\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_process_data(data)\n\u001b[1;32m   1328\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_shutdown \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_tasks_outstanding \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m-> 1329\u001b[0m idx, data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_get_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1330\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_tasks_outstanding \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m   1331\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dataset_kind \u001b[38;5;241m==\u001b[39m _DatasetKind\u001b[38;5;241m.\u001b[39mIterable:\n\u001b[1;32m   1332\u001b[0m     \u001b[38;5;66;03m# Check for _IterableDatasetStopIteration\u001b[39;00m\n",
      "File \u001b[0;32m~/.local/lib/python3.10/site-packages/torch/utils/data/dataloader.py:1295\u001b[0m, in \u001b[0;36m_MultiProcessingDataLoaderIter._get_data\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1291\u001b[0m     \u001b[38;5;66;03m# In this case, `self._data_queue` is a `queue.Queue`,. But we don't\u001b[39;00m\n\u001b[1;32m   1292\u001b[0m     \u001b[38;5;66;03m# need to call `.task_done()` because we don't use `.join()`.\u001b[39;00m\n\u001b[1;32m   1293\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m   1294\u001b[0m     \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[0;32m-> 1295\u001b[0m         success, data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_try_get_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1296\u001b[0m         \u001b[38;5;28;01mif\u001b[39;00m success:\n\u001b[1;32m   1297\u001b[0m             \u001b[38;5;28;01mreturn\u001b[39;00m data\n",
      "File \u001b[0;32m~/.local/lib/python3.10/site-packages/torch/utils/data/dataloader.py:1133\u001b[0m, in \u001b[0;36m_MultiProcessingDataLoaderIter._try_get_data\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m   1120\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_try_get_data\u001b[39m(\u001b[38;5;28mself\u001b[39m, timeout\u001b[38;5;241m=\u001b[39m_utils\u001b[38;5;241m.\u001b[39mMP_STATUS_CHECK_INTERVAL):\n\u001b[1;32m   1121\u001b[0m     \u001b[38;5;66;03m# Tries to fetch data from `self._data_queue` once for a given timeout.\u001b[39;00m\n\u001b[1;32m   1122\u001b[0m     \u001b[38;5;66;03m# This can also be used as inner loop of fetching without timeout, with\u001b[39;00m\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m   1130\u001b[0m     \u001b[38;5;66;03m# Returns a 2-tuple:\u001b[39;00m\n\u001b[1;32m   1131\u001b[0m     \u001b[38;5;66;03m#   (bool: whether successfully get data, any: data if successful else None)\u001b[39;00m\n\u001b[1;32m   1132\u001b[0m     \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m-> 1133\u001b[0m         data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_data_queue\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1134\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m (\u001b[38;5;28;01mTrue\u001b[39;00m, data)\n\u001b[1;32m   1135\u001b[0m     \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m   1136\u001b[0m         \u001b[38;5;66;03m# At timeout and error, we manually check whether any worker has\u001b[39;00m\n\u001b[1;32m   1137\u001b[0m         \u001b[38;5;66;03m# failed. Note that this is the only mechanism for Windows to detect\u001b[39;00m\n\u001b[1;32m   1138\u001b[0m         \u001b[38;5;66;03m# worker failures.\u001b[39;00m\n",
      "File \u001b[0;32m/opt/miniconda3/lib/python3.10/multiprocessing/queues.py:113\u001b[0m, in \u001b[0;36mQueue.get\u001b[0;34m(self, block, timeout)\u001b[0m\n\u001b[1;32m    111\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m block:\n\u001b[1;32m    112\u001b[0m     timeout \u001b[38;5;241m=\u001b[39m deadline \u001b[38;5;241m-\u001b[39m time\u001b[38;5;241m.\u001b[39mmonotonic()\n\u001b[0;32m--> 113\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_poll\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m:\n\u001b[1;32m    114\u001b[0m         \u001b[38;5;28;01mraise\u001b[39;00m Empty\n\u001b[1;32m    115\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_poll():\n",
      "File \u001b[0;32m/opt/miniconda3/lib/python3.10/multiprocessing/connection.py:257\u001b[0m, in \u001b[0;36m_ConnectionBase.poll\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m    255\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_check_closed()\n\u001b[1;32m    256\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_check_readable()\n\u001b[0;32m--> 257\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_poll\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[0;32m/opt/miniconda3/lib/python3.10/multiprocessing/connection.py:424\u001b[0m, in \u001b[0;36mConnection._poll\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m    423\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_poll\u001b[39m(\u001b[38;5;28mself\u001b[39m, timeout):\n\u001b[0;32m--> 424\u001b[0m     r \u001b[38;5;241m=\u001b[39m \u001b[43mwait\u001b[49m\u001b[43m(\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    425\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mbool\u001b[39m(r)\n",
      "File \u001b[0;32m/opt/miniconda3/lib/python3.10/multiprocessing/connection.py:931\u001b[0m, in \u001b[0;36mwait\u001b[0;34m(object_list, timeout)\u001b[0m\n\u001b[1;32m    928\u001b[0m     deadline \u001b[38;5;241m=\u001b[39m time\u001b[38;5;241m.\u001b[39mmonotonic() \u001b[38;5;241m+\u001b[39m timeout\n\u001b[1;32m    930\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[0;32m--> 931\u001b[0m     ready \u001b[38;5;241m=\u001b[39m \u001b[43mselector\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mselect\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    932\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m ready:\n\u001b[1;32m    933\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m [key\u001b[38;5;241m.\u001b[39mfileobj \u001b[38;5;28;01mfor\u001b[39;00m (key, events) \u001b[38;5;129;01min\u001b[39;00m ready]\n",
      "File \u001b[0;32m/opt/miniconda3/lib/python3.10/selectors.py:416\u001b[0m, in \u001b[0;36m_PollLikeSelector.select\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m    414\u001b[0m ready \u001b[38;5;241m=\u001b[39m []\n\u001b[1;32m    415\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 416\u001b[0m     fd_event_list \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_selector\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpoll\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    417\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mInterruptedError\u001b[39;00m:\n\u001b[1;32m    418\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m ready\n",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
     ]
    }
   ],
   "source": [
    "step=0\n",
    "for batch_data in train_loader:\n",
    "    step+=1\n",
    "    inputs, labels = (\n",
    "        batch_data[\"image\"],\n",
    "        batch_data[\"label\"],\n",
    "    )\n",
    "    print(inputs.shape, labels.shape, batch_data['name'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "id": "KayxFseYwCMR",
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "----------\n",
      "epoch 1/600\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/yawei/.local/lib/python3.10/site-packages/torch/autograd/graph.py:744: UserWarning: Plan failed with a cudnnException: CUDNN_BACKEND_EXECUTION_PLAN_DESCRIPTOR: cudnnFinalize Descriptor Failed cudnn_status: CUDNN_STATUS_NOT_SUPPORTED (Triggered internally at ../aten/src/ATen/native/cudnn/Conv_v8.cpp:919.)\n",
      "  return Variable._execution_engine.run_backward(  # Calls into the C++ engine to run the backward pass\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1/61, train_loss: 0.7735\n",
      "2/61, train_loss: 0.7544\n",
      "3/61, train_loss: 0.8031\n",
      "4/61, train_loss: 0.8349\n",
      "5/61, train_loss: 0.7878\n",
      "6/61, train_loss: 0.7853\n",
      "7/61, train_loss: 0.7935\n",
      "8/61, train_loss: 0.7484\n",
      "9/61, train_loss: 0.7560\n",
      "10/61, train_loss: 0.7665\n",
      "11/61, train_loss: 0.7601\n",
      "12/61, train_loss: 0.7958\n",
      "13/61, train_loss: 0.8060\n",
      "14/61, train_loss: 0.8178\n",
      "15/61, train_loss: 0.7450\n",
      "16/61, train_loss: 0.7365\n",
      "17/61, train_loss: 0.6966\n",
      "18/61, train_loss: 0.7441\n",
      "19/61, train_loss: 0.7723\n",
      "20/61, train_loss: 0.8350\n",
      "21/61, train_loss: 0.7571\n",
      "22/61, train_loss: 0.8035\n",
      "23/61, train_loss: 0.8044\n",
      "24/61, train_loss: 0.7094\n",
      "25/61, train_loss: 0.7698\n",
      "26/61, train_loss: 0.7297\n",
      "27/61, train_loss: 0.7724\n",
      "28/61, train_loss: 0.7689\n",
      "29/61, train_loss: 0.7978\n",
      "30/61, train_loss: 0.7158\n",
      "31/61, train_loss: 0.8110\n",
      "32/61, train_loss: 0.7373\n",
      "33/61, train_loss: 0.7939\n",
      "34/61, train_loss: 0.7601\n",
      "35/61, train_loss: 0.7532\n",
      "36/61, train_loss: 0.7411\n",
      "37/61, train_loss: 0.7152\n",
      "38/61, train_loss: 0.7883\n",
      "39/61, train_loss: 0.7182\n",
      "40/61, train_loss: 0.7885\n",
      "41/61, train_loss: 0.7451\n",
      "42/61, train_loss: 0.7670\n",
      "43/61, train_loss: 0.7276\n",
      "44/61, train_loss: 0.7095\n",
      "45/61, train_loss: 0.8160\n",
      "46/61, train_loss: 0.6924\n",
      "47/61, train_loss: 0.7881\n",
      "48/61, train_loss: 0.7224\n",
      "49/61, train_loss: 0.7440\n",
      "50/61, train_loss: 0.8040\n",
      "51/61, train_loss: 0.7834\n",
      "52/61, train_loss: 0.6570\n",
      "53/61, train_loss: 0.6930\n",
      "54/61, train_loss: 0.7374\n",
      "55/61, train_loss: 0.7388\n",
      "56/61, train_loss: 0.7006\n",
      "57/61, train_loss: 0.7347\n",
      "58/61, train_loss: 0.6418\n",
      "59/61, train_loss: 0.7301\n",
      "60/61, train_loss: 0.7351\n",
      "61/61, train_loss: 0.7704\n",
      "epoch 1 average loss: 0.7572\n",
      "saved new best metric model at the 1th epoch\n",
      "current epoch: 1 current mean dice: 0.1042 \n",
      "best mean dice: 0.1042  at epoch: 1\n",
      "----------\n",
      "epoch 2/600\n",
      "1/61, train_loss: 0.7302\n",
      "2/61, train_loss: 0.6432\n",
      "3/61, train_loss: 0.6512\n",
      "4/61, train_loss: 0.6825\n",
      "5/61, train_loss: 0.6042\n",
      "6/61, train_loss: 0.6879\n",
      "7/61, train_loss: 0.8147\n",
      "8/61, train_loss: 0.7282\n",
      "9/61, train_loss: 0.8094\n",
      "10/61, train_loss: 0.7109\n",
      "11/61, train_loss: 0.7226\n",
      "12/61, train_loss: 0.7278\n",
      "13/61, train_loss: 0.6669\n",
      "14/61, train_loss: 0.7114\n",
      "15/61, train_loss: 0.7691\n",
      "16/61, train_loss: 0.7080\n",
      "17/61, train_loss: 0.6235\n",
      "18/61, train_loss: 0.8018\n",
      "19/61, train_loss: 0.7199\n",
      "20/61, train_loss: 0.6460\n",
      "21/61, train_loss: 0.7662\n",
      "22/61, train_loss: 0.6094\n",
      "23/61, train_loss: 0.6799\n",
      "24/61, train_loss: 0.7678\n",
      "25/61, train_loss: 0.6557\n",
      "26/61, train_loss: 0.6105\n",
      "27/61, train_loss: 0.6966\n",
      "28/61, train_loss: 0.6393\n",
      "29/61, train_loss: 0.6657\n",
      "30/61, train_loss: 0.6910\n",
      "31/61, train_loss: 0.6998\n",
      "32/61, train_loss: 0.7122\n",
      "33/61, train_loss: 0.7346\n",
      "34/61, train_loss: 0.6878\n",
      "35/61, train_loss: 0.6530\n",
      "36/61, train_loss: 0.7308\n",
      "37/61, train_loss: 0.6687\n",
      "38/61, train_loss: 0.7150\n",
      "39/61, train_loss: 0.6503\n",
      "40/61, train_loss: 0.7276\n",
      "41/61, train_loss: 0.7608\n",
      "42/61, train_loss: 0.7704\n",
      "43/61, train_loss: 0.7543\n",
      "44/61, train_loss: 0.7287\n",
      "45/61, train_loss: 0.6572\n",
      "46/61, train_loss: 0.7176\n",
      "47/61, train_loss: 0.7608\n",
      "48/61, train_loss: 0.7421\n",
      "49/61, train_loss: 0.6231\n",
      "50/61, train_loss: 0.6814\n",
      "51/61, train_loss: 0.6323\n",
      "52/61, train_loss: 0.6966\n",
      "53/61, train_loss: 0.6736\n",
      "54/61, train_loss: 0.7132\n",
      "55/61, train_loss: 0.6015\n",
      "56/61, train_loss: 0.5838\n",
      "57/61, train_loss: 0.5616\n",
      "58/61, train_loss: 0.6791\n",
      "59/61, train_loss: 0.5622\n",
      "60/61, train_loss: 0.6619\n",
      "61/61, train_loss: 0.5426\n",
      "epoch 2 average loss: 0.6889\n",
      "saved new best metric model at the 2th epoch\n",
      "current epoch: 2 current mean dice: 0.1134 \n",
      "best mean dice: 0.1134  at epoch: 2\n",
      "----------\n",
      "epoch 3/600\n",
      "1/61, train_loss: 0.6742\n",
      "2/61, train_loss: 0.5791\n",
      "3/61, train_loss: 0.6796\n",
      "4/61, train_loss: 0.6275\n",
      "5/61, train_loss: 0.6589\n",
      "6/61, train_loss: 0.6572\n",
      "7/61, train_loss: 0.7749\n",
      "8/61, train_loss: 0.6527\n",
      "9/61, train_loss: 0.6174\n",
      "10/61, train_loss: 0.6970\n",
      "11/61, train_loss: 0.7579\n",
      "12/61, train_loss: 0.6420\n",
      "13/61, train_loss: 0.5352\n",
      "14/61, train_loss: 0.5962\n",
      "15/61, train_loss: 0.6569\n",
      "16/61, train_loss: 0.6449\n",
      "17/61, train_loss: 0.5536\n",
      "18/61, train_loss: 0.6849\n",
      "19/61, train_loss: 0.6658\n",
      "20/61, train_loss: 0.5857\n",
      "21/61, train_loss: 0.5236\n",
      "22/61, train_loss: 0.7306\n",
      "23/61, train_loss: 0.6473\n",
      "24/61, train_loss: 0.5971\n",
      "25/61, train_loss: 0.6382\n",
      "26/61, train_loss: 0.5825\n",
      "27/61, train_loss: 0.6060\n",
      "28/61, train_loss: 0.7675\n",
      "29/61, train_loss: 0.5845\n",
      "30/61, train_loss: 0.5371\n",
      "31/61, train_loss: 0.6855\n",
      "32/61, train_loss: 0.6090\n",
      "33/61, train_loss: 0.6604\n",
      "34/61, train_loss: 0.6014\n",
      "35/61, train_loss: 0.7231\n",
      "36/61, train_loss: 0.7198\n",
      "37/61, train_loss: 0.5836\n",
      "38/61, train_loss: 0.6631\n",
      "39/61, train_loss: 0.6645\n",
      "40/61, train_loss: 0.5856\n",
      "41/61, train_loss: 0.6696\n",
      "42/61, train_loss: 0.6367\n",
      "43/61, train_loss: 0.5690\n",
      "44/61, train_loss: 0.7990\n",
      "45/61, train_loss: 0.6254\n",
      "46/61, train_loss: 0.6267\n",
      "47/61, train_loss: 0.6316\n",
      "48/61, train_loss: 0.7356\n",
      "49/61, train_loss: 0.5058\n",
      "50/61, train_loss: 0.7134\n",
      "51/61, train_loss: 0.5651\n",
      "52/61, train_loss: 0.5985\n",
      "53/61, train_loss: 0.5726\n",
      "54/61, train_loss: 0.6374\n",
      "55/61, train_loss: 0.7091\n",
      "56/61, train_loss: 0.5747\n",
      "57/61, train_loss: 0.6469\n",
      "58/61, train_loss: 0.7001\n",
      "59/61, train_loss: 0.6424\n",
      "60/61, train_loss: 0.6362\n",
      "61/61, train_loss: 0.6318\n",
      "epoch 3 average loss: 0.6407\n",
      "saved new best metric model at the 3th epoch\n",
      "current epoch: 3 current mean dice: 0.1454 \n",
      "best mean dice: 0.1454  at epoch: 3\n",
      "----------\n",
      "epoch 4/600\n"
     ]
    },
    {
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[10], line 20\u001b[0m\n\u001b[1;32m     18\u001b[0m epoch_loss \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[1;32m     19\u001b[0m step \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m---> 20\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m batch_data \u001b[38;5;129;01min\u001b[39;00m train_loader:\n\u001b[1;32m     21\u001b[0m     step \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m     22\u001b[0m     \u001b[38;5;66;03m# print(1, batch_data[\"image\"].shape, batch_data[\"label\"].shape, batch_data['name'])\u001b[39;00m\n",
      "File \u001b[0;32m~/.local/lib/python3.10/site-packages/torch/utils/data/dataloader.py:631\u001b[0m, in \u001b[0;36m_BaseDataLoaderIter.__next__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    628\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sampler_iter \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m    629\u001b[0m     \u001b[38;5;66;03m# TODO(https://github.com/pytorch/pytorch/issues/76750)\u001b[39;00m\n\u001b[1;32m    630\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reset()  \u001b[38;5;66;03m# type: ignore[call-arg]\u001b[39;00m\n\u001b[0;32m--> 631\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_next_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    632\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m    633\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dataset_kind \u001b[38;5;241m==\u001b[39m _DatasetKind\u001b[38;5;241m.\u001b[39mIterable \u001b[38;5;129;01mand\u001b[39;00m \\\n\u001b[1;32m    634\u001b[0m         \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m \\\n\u001b[1;32m    635\u001b[0m         \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m>\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called:\n",
      "File \u001b[0;32m~/.local/lib/python3.10/site-packages/torch/utils/data/dataloader.py:1329\u001b[0m, in \u001b[0;36m_MultiProcessingDataLoaderIter._next_data\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1326\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_process_data(data)\n\u001b[1;32m   1328\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_shutdown \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_tasks_outstanding \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m-> 1329\u001b[0m idx, data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_get_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1330\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_tasks_outstanding \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m   1331\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dataset_kind \u001b[38;5;241m==\u001b[39m _DatasetKind\u001b[38;5;241m.\u001b[39mIterable:\n\u001b[1;32m   1332\u001b[0m     \u001b[38;5;66;03m# Check for _IterableDatasetStopIteration\u001b[39;00m\n",
      "File \u001b[0;32m~/.local/lib/python3.10/site-packages/torch/utils/data/dataloader.py:1295\u001b[0m, in \u001b[0;36m_MultiProcessingDataLoaderIter._get_data\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1291\u001b[0m     \u001b[38;5;66;03m# In this case, `self._data_queue` is a `queue.Queue`,. But we don't\u001b[39;00m\n\u001b[1;32m   1292\u001b[0m     \u001b[38;5;66;03m# need to call `.task_done()` because we don't use `.join()`.\u001b[39;00m\n\u001b[1;32m   1293\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m   1294\u001b[0m     \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[0;32m-> 1295\u001b[0m         success, data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_try_get_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1296\u001b[0m         \u001b[38;5;28;01mif\u001b[39;00m success:\n\u001b[1;32m   1297\u001b[0m             \u001b[38;5;28;01mreturn\u001b[39;00m data\n",
      "File \u001b[0;32m~/.local/lib/python3.10/site-packages/torch/utils/data/dataloader.py:1133\u001b[0m, in \u001b[0;36m_MultiProcessingDataLoaderIter._try_get_data\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m   1120\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_try_get_data\u001b[39m(\u001b[38;5;28mself\u001b[39m, timeout\u001b[38;5;241m=\u001b[39m_utils\u001b[38;5;241m.\u001b[39mMP_STATUS_CHECK_INTERVAL):\n\u001b[1;32m   1121\u001b[0m     \u001b[38;5;66;03m# Tries to fetch data from `self._data_queue` once for a given timeout.\u001b[39;00m\n\u001b[1;32m   1122\u001b[0m     \u001b[38;5;66;03m# This can also be used as inner loop of fetching without timeout, with\u001b[39;00m\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m   1130\u001b[0m     \u001b[38;5;66;03m# Returns a 2-tuple:\u001b[39;00m\n\u001b[1;32m   1131\u001b[0m     \u001b[38;5;66;03m#   (bool: whether successfully get data, any: data if successful else None)\u001b[39;00m\n\u001b[1;32m   1132\u001b[0m     \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m-> 1133\u001b[0m         data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_data_queue\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1134\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m (\u001b[38;5;28;01mTrue\u001b[39;00m, data)\n\u001b[1;32m   1135\u001b[0m     \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m   1136\u001b[0m         \u001b[38;5;66;03m# At timeout and error, we manually check whether any worker has\u001b[39;00m\n\u001b[1;32m   1137\u001b[0m         \u001b[38;5;66;03m# failed. Note that this is the only mechanism for Windows to detect\u001b[39;00m\n\u001b[1;32m   1138\u001b[0m         \u001b[38;5;66;03m# worker failures.\u001b[39;00m\n",
      "File \u001b[0;32m/opt/miniconda3/lib/python3.10/multiprocessing/queues.py:113\u001b[0m, in \u001b[0;36mQueue.get\u001b[0;34m(self, block, timeout)\u001b[0m\n\u001b[1;32m    111\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m block:\n\u001b[1;32m    112\u001b[0m     timeout \u001b[38;5;241m=\u001b[39m deadline \u001b[38;5;241m-\u001b[39m time\u001b[38;5;241m.\u001b[39mmonotonic()\n\u001b[0;32m--> 113\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_poll\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m:\n\u001b[1;32m    114\u001b[0m         \u001b[38;5;28;01mraise\u001b[39;00m Empty\n\u001b[1;32m    115\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_poll():\n",
      "File \u001b[0;32m/opt/miniconda3/lib/python3.10/multiprocessing/connection.py:257\u001b[0m, in \u001b[0;36m_ConnectionBase.poll\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m    255\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_check_closed()\n\u001b[1;32m    256\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_check_readable()\n\u001b[0;32m--> 257\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_poll\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[0;32m/opt/miniconda3/lib/python3.10/multiprocessing/connection.py:424\u001b[0m, in \u001b[0;36mConnection._poll\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m    423\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_poll\u001b[39m(\u001b[38;5;28mself\u001b[39m, timeout):\n\u001b[0;32m--> 424\u001b[0m     r \u001b[38;5;241m=\u001b[39m \u001b[43mwait\u001b[49m\u001b[43m(\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    425\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mbool\u001b[39m(r)\n",
      "File \u001b[0;32m/opt/miniconda3/lib/python3.10/multiprocessing/connection.py:931\u001b[0m, in \u001b[0;36mwait\u001b[0;34m(object_list, timeout)\u001b[0m\n\u001b[1;32m    928\u001b[0m     deadline \u001b[38;5;241m=\u001b[39m time\u001b[38;5;241m.\u001b[39mmonotonic() \u001b[38;5;241m+\u001b[39m timeout\n\u001b[1;32m    930\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[0;32m--> 931\u001b[0m     ready \u001b[38;5;241m=\u001b[39m \u001b[43mselector\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mselect\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    932\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m ready:\n\u001b[1;32m    933\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m [key\u001b[38;5;241m.\u001b[39mfileobj \u001b[38;5;28;01mfor\u001b[39;00m (key, events) \u001b[38;5;129;01min\u001b[39;00m ready]\n",
      "File \u001b[0;32m/opt/miniconda3/lib/python3.10/selectors.py:416\u001b[0m, in \u001b[0;36m_PollLikeSelector.select\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m    414\u001b[0m ready \u001b[38;5;241m=\u001b[39m []\n\u001b[1;32m    415\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 416\u001b[0m     fd_event_list \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_selector\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpoll\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    417\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mInterruptedError\u001b[39;00m:\n\u001b[1;32m    418\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m ready\n",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
     ]
    }
   ],
   "source": [
    "model_dir = '/mnt/datawow/lyq/medical_image_model'\n",
    "\n",
    "max_epochs = 600\n",
    "val_interval = 1\n",
    "best_metric = -1\n",
    "best_metric_epoch = -1\n",
    "epoch_loss_values = []\n",
    "metric_values = []\n",
    "post_pred = Compose([AsDiscrete(argmax=True, to_onehot=3)])\n",
    "post_label = Compose([AsDiscrete(to_onehot=3)])\n",
    "\n",
    "slice_to_track = 30\n",
    "\n",
    "for epoch in range(max_epochs):\n",
    "    print(\"-\" * 10)\n",
    "    print(f\"epoch {epoch + 1}/{max_epochs}\")\n",
    "    model.train()\n",
    "    epoch_loss = 0\n",
    "    step = 0\n",
    "    for batch_data in train_loader:\n",
    "        step += 1\n",
    "        # print(1, batch_data[\"image\"].shape, batch_data[\"label\"].shape, batch_data['name'])\n",
    "        inputs, labels = (\n",
    "            batch_data[\"image\"].to(device),\n",
    "            batch_data[\"label\"].to(device),\n",
    "        )\n",
    "        \n",
    "        optimizer.zero_grad()\n",
    "        outputs = model(inputs)\n",
    "        # print(2, outputs.shape, labels.shape)\n",
    "        loss = loss_function(outputs, labels)\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "        epoch_loss += loss.item()\n",
    "        print(f\"{step}/{len(train_ds) // train_loader.batch_size}, \" f\"train_loss: {loss.item():.4f}\")\n",
    "        # track batch loss metric\n",
    "        aim_run.track(loss.item(), name=\"batch_loss\", context={\"type\": loss_type})\n",
    "\n",
    "    epoch_loss /= step\n",
    "    epoch_loss_values.append(epoch_loss)\n",
    "\n",
    "    # track epoch loss metric\n",
    "    aim_run.track(epoch_loss, name=\"epoch_loss\", context={\"type\": loss_type})\n",
    "\n",
    "    print(f\"epoch {epoch + 1} average loss: {epoch_loss:.4f}\")\n",
    "\n",
    "    if (epoch + 1) % val_interval == 0:\n",
    "        if (epoch + 1) % val_interval * 2 == 0:\n",
    "            # track model params and gradients\n",
    "            track_params_dists(model, aim_run)\n",
    "            # THIS SEGMENT TAKES RELATIVELY LONG (Advise Against it)\n",
    "            track_gradients_dists(model, aim_run)\n",
    "\n",
    "        model.eval()\n",
    "        with torch.no_grad():\n",
    "            for index, val_data in enumerate(val_loader):\n",
    "                val_inputs, val_labels = (\n",
    "                    val_data[\"image\"].to(device),\n",
    "                    val_data[\"label\"].to(device),\n",
    "                )\n",
    "                # roi_size = (160, 160, 160)\n",
    "\n",
    "                sw_batch_size = 4\n",
    "                val_outputs = sliding_window_inference(val_inputs, roi_size, sw_batch_size, model)\n",
    "\n",
    "                # tracking input, label and output images with Aim\n",
    "                output = torch.argmax(val_outputs, dim=1)[0, :, :, slice_to_track].float()\n",
    "\n",
    "                aim_run.track(\n",
    "                    aim.Image(val_inputs[0, 0, :, :, slice_to_track], caption=f\"Input Image: {index}\"),\n",
    "                    name=\"validation\",\n",
    "                    context={\"type\": \"input\"},\n",
    "                )\n",
    "                aim_run.track(\n",
    "                    aim.Image(val_labels[0, 0, :, :, slice_to_track], caption=f\"Label Image: {index}\"),\n",
    "                    name=\"validation\",\n",
    "                    context={\"type\": \"label\"},\n",
    "                )\n",
    "                aim_run.track(\n",
    "                    aim.Image(output, caption=f\"Predicted Label: {index}\"),\n",
    "                    name=\"predictions\",\n",
    "                    context={\"type\": \"labels\"},\n",
    "                )\n",
    "\n",
    "                val_outputs = [post_pred(i) for i in decollate_batch(val_outputs)]\n",
    "                val_labels = [post_label(i) for i in decollate_batch(val_labels)]\n",
    "                # compute metric for current iteration\n",
    "                dice_metric(y_pred=val_outputs, y=val_labels)\n",
    "\n",
    "            # aggregate the final mean dice result\n",
    "            metric = dice_metric.aggregate().item()\n",
    "            # track val metric\n",
    "            aim_run.track(metric, name=\"val_metric\", context={\"type\": loss_type})\n",
    "\n",
    "            # reset the status for next validation round\n",
    "            dice_metric.reset()\n",
    "\n",
    "            metric_values.append(metric)\n",
    "            if metric > best_metric:\n",
    "                best_metric = metric\n",
    "                best_metric_epoch = epoch + 1\n",
    "                torch.save(model.state_dict(), os.path.join(model_dir, f\"{aim_run.name}_best_metric_model.pth\"))\n",
    "\n",
    "                best_model_log_message = f\"saved new best metric model at the {epoch+1}th epoch\"\n",
    "                aim_run.track(aim.Text(best_model_log_message), name=\"best_model_log_message\", epoch=epoch + 1)\n",
    "                print(best_model_log_message)\n",
    "\n",
    "            message1 = f\"current epoch: {epoch + 1} current mean dice: {metric:.4f}\"\n",
    "            message2 = f\"\\nbest mean dice: {best_metric:.4f} \"\n",
    "            message3 = f\"at epoch: {best_metric_epoch}\"\n",
    "\n",
    "            aim_run.track(aim.Text(message1 + \"\\n\" + message2 + message3), name=\"epoch_summary\", epoch=epoch + 1)\n",
    "            print(message1, message2, message3)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "id": "d1WMn7DFKkbV"
   },
   "outputs": [],
   "source": [
    "# finalize Aim Run\n",
    "aim_run.close()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "id": "ygo9hrWswCMR",
    "scrolled": true,
    "tags": []
   },
   "outputs": [
    {
     "ename": "NameError",
     "evalue": "name 'best_metric' is not defined",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mNameError\u001b[0m                                 Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[1], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtrain completed, best_metric: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[43mbest_metric\u001b[49m\u001b[38;5;132;01m:\u001b[39;00m\u001b[38;5;124m.4f\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m \u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mat epoch: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mbest_metric_epoch\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n",
      "\u001b[0;31mNameError\u001b[0m: name 'best_metric' is not defined"
     ]
    }
   ],
   "source": [
    "print(f\"train completed, best_metric: {best_metric:.4f} \" f\"at epoch: {best_metric_epoch}\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'VNet'"
      ]
     },
     "execution_count": 12,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "run = aim.Run('dc7f4adf500345a78a890961')\n",
    "run.name"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "run.close()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "id": "jGhCvBg-wCMS"
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Launching Aim ..."
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Error: No such option: --force-init\n",
      "\n"
     ]
    }
   ],
   "source": [
    "%load_ext aim\n",
    "%aim up"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "zV7fV0CIwCMS"
   },
   "source": [
    "Once the above cell is executed, you will see the Aim UI running in output cell\n",
    "\n",
    "![Aim UI](https://user-images.githubusercontent.com/13848158/156644374-ba04963f-4f63-4fb9-b3ef-4d4e1ae521cc.jpg)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "KRw5pgLiwCMS"
   },
   "source": [
    "## Explore the loss and metric"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "MeX1wBjXwCMS"
   },
   "source": [
    "Compare metrics curves with Metrics Explorer - group and aggregate by any hyperparameter to easily compare training runs\n",
    "\n",
    "![Metrics Explorer](https://user-images.githubusercontent.com/13848158/156642623-8cf4911d-bed2-42b8-9f39-374f8d31def8.jpg)\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "zGZ5vozGwCMS"
   },
   "source": [
    "## Compare and analyze model outputs"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "mZIUa0aNwCMS"
   },
   "source": [
    "Compare models of different runs with input images and labels\n",
    "\n",
    "![Images Explorer](https://user-images.githubusercontent.com/13848158/156642615-c003fb3c-9f37-40f4-b499-ee6623db59ef.jpg)\n",
    "\n",
    "![Images Explorer](https://user-images.githubusercontent.com/13848158/156642618-0c0c380a-75aa-45b1-b431-149f735b3fde.jpg)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "uZKhs2DFwCMS"
   },
   "source": [
    "## Evaluation on original image spacings"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "id": "Ws5wpqPlwCMT"
   },
   "outputs": [],
   "source": [
    "val_org_transforms = Compose(\n",
    "    [\n",
    "        LoadImaged(keys=[\"image\", \"label\"]),\n",
    "        EnsureChannelFirstd(keys=[\"image\", \"label\"]),\n",
    "        Spacingd(keys=[\"image\"], pixdim=(1.5, 1.5, 2.0), mode=\"bilinear\"),\n",
    "        Orientationd(keys=[\"image\"], axcodes=\"RAS\"),\n",
    "        ScaleIntensityRanged(\n",
    "            keys=[\"image\"],\n",
    "            a_min=-57,\n",
    "            a_max=164,\n",
    "            b_min=0.0,\n",
    "            b_max=1.0,\n",
    "            clip=True,\n",
    "        ),\n",
    "        CropForegroundd(keys=[\"image\"], source_key=\"image\"),\n",
    "    ]\n",
    ")\n",
    "\n",
    "val_org_ds = Dataset(data=val_files, transform=val_org_transforms)\n",
    "val_org_loader = DataLoader(val_org_ds, batch_size=1, num_workers=4)\n",
    "\n",
    "post_transforms = Compose(\n",
    "    [\n",
    "        Invertd(\n",
    "            keys=\"pred\",\n",
    "            transform=val_org_transforms,\n",
    "            orig_keys=\"image\",\n",
    "            meta_keys=\"pred_meta_dict\",\n",
    "            orig_meta_keys=\"image_meta_dict\",\n",
    "            meta_key_postfix=\"meta_dict\",\n",
    "            nearest_interp=False,\n",
    "            to_tensor=True,\n",
    "        ),\n",
    "        AsDiscreted(keys=\"pred\", argmax=True, to_onehot=2),\n",
    "        AsDiscreted(keys=\"label\", to_onehot=2),\n",
    "    ]\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "id": "JTkKUwRGwCMT"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Metric on original image spacing:  0.9597647190093994\n"
     ]
    }
   ],
   "source": [
    "model.load_state_dict(torch.load(os.path.join(root_dir, \"best_metric_model.pth\")))\n",
    "model.eval()\n",
    "\n",
    "with torch.no_grad():\n",
    "    for val_data in val_org_loader:\n",
    "        val_data[\"image\"] = val_data[\"image\"].to(device)\n",
    "        val_data[\"label\"] = val_data[\"label\"].to(device)\n",
    "        roi_size = (160, 160, 160)\n",
    "        sw_batch_size = 4\n",
    "        val_data[\"pred\"] = sliding_window_inference(val_data[\"image\"], roi_size, sw_batch_size, model)\n",
    "        val_data = [post_transforms(i) for i in decollate_batch(val_data)]\n",
    "        val_outputs, val_labels = from_engine([\"pred\", \"label\"])(val_data)\n",
    "        # compute metric for current iteration\n",
    "        dice_metric(y_pred=val_outputs, y=val_labels)\n",
    "\n",
    "    # aggregate the final mean dice result\n",
    "    metric_org = dice_metric.aggregate().item()\n",
    "    # reset the status for next validation round\n",
    "    dice_metric.reset()\n",
    "\n",
    "print(\"Metric on original image spacing: \", metric_org)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "chILRaduwCMT"
   },
   "source": [
    "## Cleanup data directory\n",
    "\n",
    "Remove directory if a temporary was used."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "id": "yuCFCxOcwCMT"
   },
   "outputs": [],
   "source": [
    "if directory is None:\n",
    "    shutil.rmtree(root_dir)"
   ]
  }
 ],
 "metadata": {
  "accelerator": "GPU",
  "colab": {
   "name": "spleen_segmentation_3d_visualization.ipynb",
   "provenance": []
  },
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.14"
  },
  "vscode": {
   "interpreter": {
    "hash": "916dbcbb3f70747c44a77c7bcd40155683ae19c65e1c03b4aa3499c5328201f1"
   }
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
