{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "3d46e829-3d0d-4cd0-a6a3-3ee6a6ad60d0",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Tensorflow version 2.16.1\n",
      "GPU is OFF\n"
     ]
    }
   ],
   "source": [
    "import os\n",
    "import glob\n",
    "import numpy as np\n",
    "import tensorflow as tf\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "from skimage import measure\n",
    "from skimage.io import imread, imsave\n",
    "from skimage.transform import resize\n",
    "from skimage.morphology import dilation, disk\n",
    "from skimage.draw import polygon_perimeter\n",
    "\n",
    "print(f'Tensorflow version {tf.__version__}')\n",
    "print(f'GPU is {\"ON\" if tf.config.list_physical_devices(\"GPU\") else \"OFF\" }')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "6cd78b30-ff55-4b5b-8aea-783a8026ba48",
   "metadata": {},
   "outputs": [],
   "source": [
    "CLASSES = 8\n",
    "\n",
    "COLORS = ['black', 'red', 'lime',\n",
    "          'blue', 'orange', 'pink',\n",
    "          'cyan', 'magenta']\n",
    "\n",
    "SAMPLE_SIZE = (256, 256)\n",
    "\n",
    "OUTPUT_SIZE = (1080, 1920)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "64dbfc6a",
   "metadata": {},
   "outputs": [],
   "source": [
    "def load_images(image, mask):\n",
    "    image = tf.io.read_file(image)\n",
    "    image = tf.io.decode_jpeg(image)\n",
    "    image = tf.image.resize(image, OUTPUT_SIZE)\n",
    "    image = tf.image.convert_image_dtype(image, tf.float32)\n",
    "    image = image / 255.0\n",
    "    \n",
    "    mask = tf.io.read_file(mask)\n",
    "    mask = tf.io.decode_png(mask)\n",
    "    mask = tf.image.rgb_to_grayscale(mask)\n",
    "    mask = tf.image.resize(mask, OUTPUT_SIZE)\n",
    "    mask = tf.image.convert_image_dtype(mask, tf.float32)\n",
    "    \n",
    "    masks = []\n",
    "    \n",
    "    for i in range(CLASSES):\n",
    "        masks.append(tf.where(tf.equal(mask, float(i)), 1.0, 0.0))\n",
    "    \n",
    "    masks = tf.stack(masks, axis=2)\n",
    "    masks = tf.reshape(masks, OUTPUT_SIZE + (CLASSES,))\n",
    "\n",
    "    return image, masks\n",
    "\n",
    "def augmentate_images(image, masks):   \n",
    "    random_crop = tf.random.uniform((), 0.3, 1)\n",
    "    image = tf.image.central_crop(image, random_crop)\n",
    "    masks = tf.image.central_crop(masks, random_crop)\n",
    "    \n",
    "    random_flip = tf.random.uniform((), 0, 1)    \n",
    "    if random_flip >= 0.5:\n",
    "        image = tf.image.flip_left_right(image)\n",
    "        masks = tf.image.flip_left_right(masks)\n",
    "    \n",
    "    image = tf.image.resize(image, SAMPLE_SIZE)\n",
    "    masks = tf.image.resize(masks, SAMPLE_SIZE)\n",
    "    \n",
    "    return image, masks"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "60058e49",
   "metadata": {},
   "outputs": [
    {
     "ename": "TypeError",
     "evalue": "in user code:\n\n    File \"/var/folders/pr/9p05mn9d2s9d7y5bw252nbqr0000gn/T/ipykernel_2380/827882302.py\", line 2, in load_images  *\n        image = tf.io.read_file(image)\n\n    TypeError: Input 'filename' of 'ReadFile' Op has type float32 that does not match expected type of string.\n",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mTypeError\u001b[0m                                 Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[7], line 9\u001b[0m\n\u001b[1;32m      5\u001b[0m masks_dataset \u001b[38;5;241m=\u001b[39m tf\u001b[38;5;241m.\u001b[39mdata\u001b[38;5;241m.\u001b[39mDataset\u001b[38;5;241m.\u001b[39mfrom_tensor_slices(masks)\n\u001b[1;32m      7\u001b[0m dataset \u001b[38;5;241m=\u001b[39m tf\u001b[38;5;241m.\u001b[39mdata\u001b[38;5;241m.\u001b[39mDataset\u001b[38;5;241m.\u001b[39mzip((images_dataset, masks_dataset))\n\u001b[0;32m----> 9\u001b[0m dataset \u001b[38;5;241m=\u001b[39m dataset\u001b[38;5;241m.\u001b[39mmap(load_images, num_parallel_calls\u001b[38;5;241m=\u001b[39mtf\u001b[38;5;241m.\u001b[39mdata\u001b[38;5;241m.\u001b[39mAUTOTUNE)\n\u001b[1;32m     10\u001b[0m dataset \u001b[38;5;241m=\u001b[39m dataset\u001b[38;5;241m.\u001b[39mrepeat(\u001b[38;5;241m60\u001b[39m)\n\u001b[1;32m     11\u001b[0m dataset \u001b[38;5;241m=\u001b[39m dataset\u001b[38;5;241m.\u001b[39mmap(augmentate_images, num_parallel_calls\u001b[38;5;241m=\u001b[39mtf\u001b[38;5;241m.\u001b[39mdata\u001b[38;5;241m.\u001b[39mAUTOTUNE)\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/data/ops/dataset_ops.py:2299\u001b[0m, in \u001b[0;36mDatasetV2.map\u001b[0;34m(self, map_func, num_parallel_calls, deterministic, name)\u001b[0m\n\u001b[1;32m   2295\u001b[0m \u001b[38;5;66;03m# Loaded lazily due to a circular dependency (dataset_ops -> map_op ->\u001b[39;00m\n\u001b[1;32m   2296\u001b[0m \u001b[38;5;66;03m# dataset_ops).\u001b[39;00m\n\u001b[1;32m   2297\u001b[0m \u001b[38;5;66;03m# pylint: disable=g-import-not-at-top,protected-access\u001b[39;00m\n\u001b[1;32m   2298\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mtensorflow\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mpython\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mdata\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mops\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m map_op\n\u001b[0;32m-> 2299\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m map_op\u001b[38;5;241m.\u001b[39m_map_v2(\n\u001b[1;32m   2300\u001b[0m     \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m   2301\u001b[0m     map_func,\n\u001b[1;32m   2302\u001b[0m     num_parallel_calls\u001b[38;5;241m=\u001b[39mnum_parallel_calls,\n\u001b[1;32m   2303\u001b[0m     deterministic\u001b[38;5;241m=\u001b[39mdeterministic,\n\u001b[1;32m   2304\u001b[0m     name\u001b[38;5;241m=\u001b[39mname)\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/data/ops/map_op.py:40\u001b[0m, in \u001b[0;36m_map_v2\u001b[0;34m(input_dataset, map_func, num_parallel_calls, deterministic, name)\u001b[0m\n\u001b[1;32m     37\u001b[0m   \u001b[38;5;28;01mreturn\u001b[39;00m _MapDataset(\n\u001b[1;32m     38\u001b[0m       input_dataset, map_func, preserve_cardinality\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m, name\u001b[38;5;241m=\u001b[39mname)\n\u001b[1;32m     39\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m---> 40\u001b[0m   \u001b[38;5;28;01mreturn\u001b[39;00m _ParallelMapDataset(\n\u001b[1;32m     41\u001b[0m       input_dataset,\n\u001b[1;32m     42\u001b[0m       map_func,\n\u001b[1;32m     43\u001b[0m       num_parallel_calls\u001b[38;5;241m=\u001b[39mnum_parallel_calls,\n\u001b[1;32m     44\u001b[0m       deterministic\u001b[38;5;241m=\u001b[39mdeterministic,\n\u001b[1;32m     45\u001b[0m       preserve_cardinality\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[1;32m     46\u001b[0m       name\u001b[38;5;241m=\u001b[39mname)\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/data/ops/map_op.py:148\u001b[0m, in \u001b[0;36m_ParallelMapDataset.__init__\u001b[0;34m(self, input_dataset, map_func, num_parallel_calls, deterministic, use_inter_op_parallelism, preserve_cardinality, use_legacy_function, name)\u001b[0m\n\u001b[1;32m    146\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_input_dataset \u001b[38;5;241m=\u001b[39m input_dataset\n\u001b[1;32m    147\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_use_inter_op_parallelism \u001b[38;5;241m=\u001b[39m use_inter_op_parallelism\n\u001b[0;32m--> 148\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_map_func \u001b[38;5;241m=\u001b[39m structured_function\u001b[38;5;241m.\u001b[39mStructuredFunctionWrapper(\n\u001b[1;32m    149\u001b[0m     map_func,\n\u001b[1;32m    150\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_transformation_name(),\n\u001b[1;32m    151\u001b[0m     dataset\u001b[38;5;241m=\u001b[39minput_dataset,\n\u001b[1;32m    152\u001b[0m     use_legacy_function\u001b[38;5;241m=\u001b[39muse_legacy_function)\n\u001b[1;32m    153\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m deterministic \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m    154\u001b[0m   \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_deterministic \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdefault\u001b[39m\u001b[38;5;124m\"\u001b[39m\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/data/ops/structured_function.py:265\u001b[0m, in \u001b[0;36mStructuredFunctionWrapper.__init__\u001b[0;34m(self, func, transformation_name, dataset, input_classes, input_shapes, input_types, input_structure, add_to_graph, use_legacy_function, defun_kwargs)\u001b[0m\n\u001b[1;32m    258\u001b[0m       warnings\u001b[38;5;241m.\u001b[39mwarn(\n\u001b[1;32m    259\u001b[0m           \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mEven though the `tf.config.experimental_run_functions_eagerly` \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m    260\u001b[0m           \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124moption is set, this option does not apply to tf.data functions. \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m    261\u001b[0m           \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mTo force eager execution of tf.data functions, please use \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m    262\u001b[0m           \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m`tf.data.experimental.enable_debug_mode()`.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m    263\u001b[0m     fn_factory \u001b[38;5;241m=\u001b[39m trace_tf_function(defun_kwargs)\n\u001b[0;32m--> 265\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_function \u001b[38;5;241m=\u001b[39m fn_factory()\n\u001b[1;32m    266\u001b[0m \u001b[38;5;66;03m# There is no graph to add in eager mode.\u001b[39;00m\n\u001b[1;32m    267\u001b[0m add_to_graph \u001b[38;5;241m&\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;129;01mnot\u001b[39;00m context\u001b[38;5;241m.\u001b[39mexecuting_eagerly()\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/eager/polymorphic_function/polymorphic_function.py:1251\u001b[0m, in \u001b[0;36mFunction.get_concrete_function\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m   1249\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mget_concrete_function\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m   1250\u001b[0m   \u001b[38;5;66;03m# Implements PolymorphicFunction.get_concrete_function.\u001b[39;00m\n\u001b[0;32m-> 1251\u001b[0m   concrete \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_get_concrete_function_garbage_collected(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m   1252\u001b[0m   concrete\u001b[38;5;241m.\u001b[39m_garbage_collector\u001b[38;5;241m.\u001b[39mrelease()  \u001b[38;5;66;03m# pylint: disable=protected-access\u001b[39;00m\n\u001b[1;32m   1253\u001b[0m   \u001b[38;5;28;01mreturn\u001b[39;00m concrete\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/eager/polymorphic_function/polymorphic_function.py:1221\u001b[0m, in \u001b[0;36mFunction._get_concrete_function_garbage_collected\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m   1219\u001b[0m   \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_variable_creation_config \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m   1220\u001b[0m     initializers \u001b[38;5;241m=\u001b[39m []\n\u001b[0;32m-> 1221\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_initialize(args, kwargs, add_initializers_to\u001b[38;5;241m=\u001b[39minitializers)\n\u001b[1;32m   1222\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_initialize_uninitialized_variables(initializers)\n\u001b[1;32m   1224\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_created_variables:\n\u001b[1;32m   1225\u001b[0m   \u001b[38;5;66;03m# In this case we have created variables on the first call, so we run the\u001b[39;00m\n\u001b[1;32m   1226\u001b[0m   \u001b[38;5;66;03m# version which is guaranteed to never create variables.\u001b[39;00m\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/eager/polymorphic_function/polymorphic_function.py:696\u001b[0m, in \u001b[0;36mFunction._initialize\u001b[0;34m(self, args, kwds, add_initializers_to)\u001b[0m\n\u001b[1;32m    691\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_variable_creation_config \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_generate_scoped_tracing_options(\n\u001b[1;32m    692\u001b[0m     variable_capturing_scope,\n\u001b[1;32m    693\u001b[0m     tracing_compilation\u001b[38;5;241m.\u001b[39mScopeType\u001b[38;5;241m.\u001b[39mVARIABLE_CREATION,\n\u001b[1;32m    694\u001b[0m )\n\u001b[1;32m    695\u001b[0m \u001b[38;5;66;03m# Force the definition of the function for these arguments\u001b[39;00m\n\u001b[0;32m--> 696\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_concrete_variable_creation_fn \u001b[38;5;241m=\u001b[39m tracing_compilation\u001b[38;5;241m.\u001b[39mtrace_function(\n\u001b[1;32m    697\u001b[0m     args, kwds, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_variable_creation_config\n\u001b[1;32m    698\u001b[0m )\n\u001b[1;32m    700\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21minvalid_creator_scope\u001b[39m(\u001b[38;5;241m*\u001b[39munused_args, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39munused_kwds):\n\u001b[1;32m    701\u001b[0m \u001b[38;5;250m  \u001b[39m\u001b[38;5;124;03m\"\"\"Disables variable creation.\"\"\"\u001b[39;00m\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/eager/polymorphic_function/tracing_compilation.py:178\u001b[0m, in \u001b[0;36mtrace_function\u001b[0;34m(args, kwargs, tracing_options)\u001b[0m\n\u001b[1;32m    175\u001b[0m     args \u001b[38;5;241m=\u001b[39m tracing_options\u001b[38;5;241m.\u001b[39minput_signature\n\u001b[1;32m    176\u001b[0m     kwargs \u001b[38;5;241m=\u001b[39m {}\n\u001b[0;32m--> 178\u001b[0m   concrete_function \u001b[38;5;241m=\u001b[39m _maybe_define_function(\n\u001b[1;32m    179\u001b[0m       args, kwargs, tracing_options\n\u001b[1;32m    180\u001b[0m   )\n\u001b[1;32m    182\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m tracing_options\u001b[38;5;241m.\u001b[39mbind_graph_to_function:\n\u001b[1;32m    183\u001b[0m   concrete_function\u001b[38;5;241m.\u001b[39m_garbage_collector\u001b[38;5;241m.\u001b[39mrelease()  \u001b[38;5;66;03m# pylint: disable=protected-access\u001b[39;00m\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/eager/polymorphic_function/tracing_compilation.py:283\u001b[0m, in \u001b[0;36m_maybe_define_function\u001b[0;34m(args, kwargs, tracing_options)\u001b[0m\n\u001b[1;32m    281\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m    282\u001b[0m   target_func_type \u001b[38;5;241m=\u001b[39m lookup_func_type\n\u001b[0;32m--> 283\u001b[0m concrete_function \u001b[38;5;241m=\u001b[39m _create_concrete_function(\n\u001b[1;32m    284\u001b[0m     target_func_type, lookup_func_context, func_graph, tracing_options\n\u001b[1;32m    285\u001b[0m )\n\u001b[1;32m    287\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m tracing_options\u001b[38;5;241m.\u001b[39mfunction_cache \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m    288\u001b[0m   tracing_options\u001b[38;5;241m.\u001b[39mfunction_cache\u001b[38;5;241m.\u001b[39madd(\n\u001b[1;32m    289\u001b[0m       concrete_function, current_func_context\n\u001b[1;32m    290\u001b[0m   )\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/eager/polymorphic_function/tracing_compilation.py:310\u001b[0m, in \u001b[0;36m_create_concrete_function\u001b[0;34m(function_type, type_context, func_graph, tracing_options)\u001b[0m\n\u001b[1;32m    303\u001b[0m   placeholder_bound_args \u001b[38;5;241m=\u001b[39m function_type\u001b[38;5;241m.\u001b[39mplaceholder_arguments(\n\u001b[1;32m    304\u001b[0m       placeholder_context\n\u001b[1;32m    305\u001b[0m   )\n\u001b[1;32m    307\u001b[0m disable_acd \u001b[38;5;241m=\u001b[39m tracing_options\u001b[38;5;241m.\u001b[39mattributes \u001b[38;5;129;01mand\u001b[39;00m tracing_options\u001b[38;5;241m.\u001b[39mattributes\u001b[38;5;241m.\u001b[39mget(\n\u001b[1;32m    308\u001b[0m     attributes_lib\u001b[38;5;241m.\u001b[39mDISABLE_ACD, \u001b[38;5;28;01mFalse\u001b[39;00m\n\u001b[1;32m    309\u001b[0m )\n\u001b[0;32m--> 310\u001b[0m traced_func_graph \u001b[38;5;241m=\u001b[39m func_graph_module\u001b[38;5;241m.\u001b[39mfunc_graph_from_py_func(\n\u001b[1;32m    311\u001b[0m     tracing_options\u001b[38;5;241m.\u001b[39mname,\n\u001b[1;32m    312\u001b[0m     tracing_options\u001b[38;5;241m.\u001b[39mpython_function,\n\u001b[1;32m    313\u001b[0m     placeholder_bound_args\u001b[38;5;241m.\u001b[39margs,\n\u001b[1;32m    314\u001b[0m     placeholder_bound_args\u001b[38;5;241m.\u001b[39mkwargs,\n\u001b[1;32m    315\u001b[0m     \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m    316\u001b[0m     func_graph\u001b[38;5;241m=\u001b[39mfunc_graph,\n\u001b[1;32m    317\u001b[0m     add_control_dependencies\u001b[38;5;241m=\u001b[39m\u001b[38;5;129;01mnot\u001b[39;00m disable_acd,\n\u001b[1;32m    318\u001b[0m     arg_names\u001b[38;5;241m=\u001b[39mfunction_type_utils\u001b[38;5;241m.\u001b[39mto_arg_names(function_type),\n\u001b[1;32m    319\u001b[0m     create_placeholders\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m,\n\u001b[1;32m    320\u001b[0m )\n\u001b[1;32m    322\u001b[0m transform\u001b[38;5;241m.\u001b[39mapply_func_graph_transforms(traced_func_graph)\n\u001b[1;32m    324\u001b[0m graph_capture_container \u001b[38;5;241m=\u001b[39m traced_func_graph\u001b[38;5;241m.\u001b[39mfunction_captures\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/framework/func_graph.py:1059\u001b[0m, in \u001b[0;36mfunc_graph_from_py_func\u001b[0;34m(name, python_func, args, kwargs, signature, func_graph, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, create_placeholders)\u001b[0m\n\u001b[1;32m   1056\u001b[0m   \u001b[38;5;28;01mreturn\u001b[39;00m x\n\u001b[1;32m   1058\u001b[0m _, original_func \u001b[38;5;241m=\u001b[39m tf_decorator\u001b[38;5;241m.\u001b[39munwrap(python_func)\n\u001b[0;32m-> 1059\u001b[0m func_outputs \u001b[38;5;241m=\u001b[39m python_func(\u001b[38;5;241m*\u001b[39mfunc_args, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mfunc_kwargs)\n\u001b[1;32m   1061\u001b[0m \u001b[38;5;66;03m# invariant: `func_outputs` contains only Tensors, CompositeTensors,\u001b[39;00m\n\u001b[1;32m   1062\u001b[0m \u001b[38;5;66;03m# TensorArrays and `None`s.\u001b[39;00m\n\u001b[1;32m   1063\u001b[0m func_outputs \u001b[38;5;241m=\u001b[39m variable_utils\u001b[38;5;241m.\u001b[39mconvert_variables_to_tensors(func_outputs)\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/eager/polymorphic_function/polymorphic_function.py:599\u001b[0m, in \u001b[0;36mFunction._generate_scoped_tracing_options.<locals>.wrapped_fn\u001b[0;34m(*args, **kwds)\u001b[0m\n\u001b[1;32m    595\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m default_graph\u001b[38;5;241m.\u001b[39m_variable_creator_scope(scope, priority\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m50\u001b[39m):  \u001b[38;5;66;03m# pylint: disable=protected-access\u001b[39;00m\n\u001b[1;32m    596\u001b[0m   \u001b[38;5;66;03m# __wrapped__ allows AutoGraph to swap in a converted function. We give\u001b[39;00m\n\u001b[1;32m    597\u001b[0m   \u001b[38;5;66;03m# the function a weak reference to itself to avoid a reference cycle.\u001b[39;00m\n\u001b[1;32m    598\u001b[0m   \u001b[38;5;28;01mwith\u001b[39;00m OptionalXlaContext(compile_with_xla):\n\u001b[0;32m--> 599\u001b[0m     out \u001b[38;5;241m=\u001b[39m weak_wrapped_fn()\u001b[38;5;241m.\u001b[39m__wrapped__(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwds)\n\u001b[1;32m    600\u001b[0m   \u001b[38;5;28;01mreturn\u001b[39;00m out\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/data/ops/structured_function.py:231\u001b[0m, in \u001b[0;36mStructuredFunctionWrapper.__init__.<locals>.trace_tf_function.<locals>.wrapped_fn\u001b[0;34m(*args)\u001b[0m\n\u001b[1;32m    230\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mwrapped_fn\u001b[39m(\u001b[38;5;241m*\u001b[39margs):  \u001b[38;5;66;03m# pylint: disable=missing-docstring\u001b[39;00m\n\u001b[0;32m--> 231\u001b[0m   ret \u001b[38;5;241m=\u001b[39m wrapper_helper(\u001b[38;5;241m*\u001b[39margs)\n\u001b[1;32m    232\u001b[0m   ret \u001b[38;5;241m=\u001b[39m structure\u001b[38;5;241m.\u001b[39mto_tensor_list(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_output_structure, ret)\n\u001b[1;32m    233\u001b[0m   \u001b[38;5;28;01mreturn\u001b[39;00m [ops\u001b[38;5;241m.\u001b[39mconvert_to_tensor(t) \u001b[38;5;28;01mfor\u001b[39;00m t \u001b[38;5;129;01min\u001b[39;00m ret]\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/data/ops/structured_function.py:161\u001b[0m, in \u001b[0;36mStructuredFunctionWrapper.__init__.<locals>.wrapper_helper\u001b[0;34m(*args)\u001b[0m\n\u001b[1;32m    159\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m _should_unpack(nested_args):\n\u001b[1;32m    160\u001b[0m   nested_args \u001b[38;5;241m=\u001b[39m (nested_args,)\n\u001b[0;32m--> 161\u001b[0m ret \u001b[38;5;241m=\u001b[39m autograph\u001b[38;5;241m.\u001b[39mtf_convert(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_func, ag_ctx)(\u001b[38;5;241m*\u001b[39mnested_args)\n\u001b[1;32m    162\u001b[0m ret \u001b[38;5;241m=\u001b[39m variable_utils\u001b[38;5;241m.\u001b[39mconvert_variables_to_tensors(ret)\n\u001b[1;32m    163\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _should_pack(ret):\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/autograph/impl/api.py:693\u001b[0m, in \u001b[0;36mconvert.<locals>.decorator.<locals>.wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m    691\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:  \u001b[38;5;66;03m# pylint:disable=broad-except\u001b[39;00m\n\u001b[1;32m    692\u001b[0m   \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mhasattr\u001b[39m(e, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mag_error_metadata\u001b[39m\u001b[38;5;124m'\u001b[39m):\n\u001b[0;32m--> 693\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m e\u001b[38;5;241m.\u001b[39mag_error_metadata\u001b[38;5;241m.\u001b[39mto_exception(e)\n\u001b[1;32m    694\u001b[0m   \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m    695\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/autograph/impl/api.py:690\u001b[0m, in \u001b[0;36mconvert.<locals>.decorator.<locals>.wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m    688\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m    689\u001b[0m   \u001b[38;5;28;01mwith\u001b[39;00m conversion_ctx:\n\u001b[0;32m--> 690\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m converted_call(f, args, kwargs, options\u001b[38;5;241m=\u001b[39moptions)\n\u001b[1;32m    691\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:  \u001b[38;5;66;03m# pylint:disable=broad-except\u001b[39;00m\n\u001b[1;32m    692\u001b[0m   \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mhasattr\u001b[39m(e, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mag_error_metadata\u001b[39m\u001b[38;5;124m'\u001b[39m):\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/autograph/impl/api.py:439\u001b[0m, in \u001b[0;36mconverted_call\u001b[0;34m(f, args, kwargs, caller_fn_scope, options)\u001b[0m\n\u001b[1;32m    437\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m    438\u001b[0m   \u001b[38;5;28;01mif\u001b[39;00m kwargs \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 439\u001b[0m     result \u001b[38;5;241m=\u001b[39m converted_f(\u001b[38;5;241m*\u001b[39meffective_args, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m    440\u001b[0m   \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m    441\u001b[0m     result \u001b[38;5;241m=\u001b[39m converted_f(\u001b[38;5;241m*\u001b[39meffective_args)\n",
      "File \u001b[0;32m/var/folders/pr/9p05mn9d2s9d7y5bw252nbqr0000gn/T/__autograph_generated_file4aq8ha9l.py:10\u001b[0m, in \u001b[0;36mouter_factory.<locals>.inner_factory.<locals>.tf__load_images\u001b[0;34m(image, mask)\u001b[0m\n\u001b[1;32m      8\u001b[0m do_return \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mFalse\u001b[39;00m\n\u001b[1;32m      9\u001b[0m retval_ \u001b[38;5;241m=\u001b[39m ag__\u001b[38;5;241m.\u001b[39mUndefinedReturnValue()\n\u001b[0;32m---> 10\u001b[0m image \u001b[38;5;241m=\u001b[39m ag__\u001b[38;5;241m.\u001b[39mconverted_call(ag__\u001b[38;5;241m.\u001b[39mld(tf)\u001b[38;5;241m.\u001b[39mio\u001b[38;5;241m.\u001b[39mread_file, (ag__\u001b[38;5;241m.\u001b[39mld(image),), \u001b[38;5;28;01mNone\u001b[39;00m, fscope)\n\u001b[1;32m     11\u001b[0m image \u001b[38;5;241m=\u001b[39m ag__\u001b[38;5;241m.\u001b[39mconverted_call(ag__\u001b[38;5;241m.\u001b[39mld(tf)\u001b[38;5;241m.\u001b[39mio\u001b[38;5;241m.\u001b[39mdecode_jpeg, (ag__\u001b[38;5;241m.\u001b[39mld(image),), \u001b[38;5;28;01mNone\u001b[39;00m, fscope)\n\u001b[1;32m     12\u001b[0m image \u001b[38;5;241m=\u001b[39m ag__\u001b[38;5;241m.\u001b[39mconverted_call(ag__\u001b[38;5;241m.\u001b[39mld(tf)\u001b[38;5;241m.\u001b[39mimage\u001b[38;5;241m.\u001b[39mresize, (ag__\u001b[38;5;241m.\u001b[39mld(image), ag__\u001b[38;5;241m.\u001b[39mld(OUTPUT_SIZE)), \u001b[38;5;28;01mNone\u001b[39;00m, fscope)\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/autograph/impl/api.py:331\u001b[0m, in \u001b[0;36mconverted_call\u001b[0;34m(f, args, kwargs, caller_fn_scope, options)\u001b[0m\n\u001b[1;32m    329\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m conversion\u001b[38;5;241m.\u001b[39mis_in_allowlist_cache(f, options):\n\u001b[1;32m    330\u001b[0m   logging\u001b[38;5;241m.\u001b[39mlog(\u001b[38;5;241m2\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mAllowlisted \u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m: from cache\u001b[39m\u001b[38;5;124m'\u001b[39m, f)\n\u001b[0;32m--> 331\u001b[0m   \u001b[38;5;28;01mreturn\u001b[39;00m _call_unconverted(f, args, kwargs, options, \u001b[38;5;28;01mFalse\u001b[39;00m)\n\u001b[1;32m    333\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m ag_ctx\u001b[38;5;241m.\u001b[39mcontrol_status_ctx()\u001b[38;5;241m.\u001b[39mstatus \u001b[38;5;241m==\u001b[39m ag_ctx\u001b[38;5;241m.\u001b[39mStatus\u001b[38;5;241m.\u001b[39mDISABLED:\n\u001b[1;32m    334\u001b[0m   logging\u001b[38;5;241m.\u001b[39mlog(\u001b[38;5;241m2\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mAllowlisted: \u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m: AutoGraph is disabled in context\u001b[39m\u001b[38;5;124m'\u001b[39m, f)\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/autograph/impl/api.py:460\u001b[0m, in \u001b[0;36m_call_unconverted\u001b[0;34m(f, args, kwargs, options, update_cache)\u001b[0m\n\u001b[1;32m    458\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m kwargs \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m    459\u001b[0m   \u001b[38;5;28;01mreturn\u001b[39;00m f(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m--> 460\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m f(\u001b[38;5;241m*\u001b[39margs)\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/ops/io_ops.py:134\u001b[0m, in \u001b[0;36mread_file\u001b[0;34m(filename, name)\u001b[0m\n\u001b[1;32m     97\u001b[0m \u001b[38;5;129m@tf_export\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mio.read_file\u001b[39m\u001b[38;5;124m\"\u001b[39m, v1\u001b[38;5;241m=\u001b[39m[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mio.read_file\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mread_file\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[1;32m     98\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mread_file\u001b[39m(filename, name\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m):\n\u001b[1;32m     99\u001b[0m \u001b[38;5;250m  \u001b[39m\u001b[38;5;124;03m\"\"\"Reads the contents of file.\u001b[39;00m\n\u001b[1;32m    100\u001b[0m \n\u001b[1;32m    101\u001b[0m \u001b[38;5;124;03m  This operation returns a tensor with the entire contents of the input\u001b[39;00m\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m    132\u001b[0m \u001b[38;5;124;03m    A tensor of dtype \"string\", with the file contents.\u001b[39;00m\n\u001b[1;32m    133\u001b[0m \u001b[38;5;124;03m  \"\"\"\u001b[39;00m\n\u001b[0;32m--> 134\u001b[0m   \u001b[38;5;28;01mreturn\u001b[39;00m gen_io_ops\u001b[38;5;241m.\u001b[39mread_file(filename, name)\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/ops/gen_io_ops.py:588\u001b[0m, in \u001b[0;36mread_file\u001b[0;34m(filename, name)\u001b[0m\n\u001b[1;32m    586\u001b[0m     \u001b[38;5;28;01mpass\u001b[39;00m  \u001b[38;5;66;03m# Add nodes to the TensorFlow graph.\u001b[39;00m\n\u001b[1;32m    587\u001b[0m \u001b[38;5;66;03m# Add nodes to the TensorFlow graph.\u001b[39;00m\n\u001b[0;32m--> 588\u001b[0m _, _, _op, _outputs \u001b[38;5;241m=\u001b[39m _op_def_library\u001b[38;5;241m.\u001b[39m_apply_op_helper(\n\u001b[1;32m    589\u001b[0m       \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mReadFile\u001b[39m\u001b[38;5;124m\"\u001b[39m, filename\u001b[38;5;241m=\u001b[39mfilename, name\u001b[38;5;241m=\u001b[39mname)\n\u001b[1;32m    590\u001b[0m _result \u001b[38;5;241m=\u001b[39m _outputs[:]\n\u001b[1;32m    591\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _execute\u001b[38;5;241m.\u001b[39mmust_record_gradient():\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/framework/op_def_library.py:778\u001b[0m, in \u001b[0;36m_apply_op_helper\u001b[0;34m(op_type_name, name, **keywords)\u001b[0m\n\u001b[1;32m    776\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m g\u001b[38;5;241m.\u001b[39mas_default(), ops\u001b[38;5;241m.\u001b[39mname_scope(name) \u001b[38;5;28;01mas\u001b[39;00m scope:\n\u001b[1;32m    777\u001b[0m   \u001b[38;5;28;01mif\u001b[39;00m fallback:\n\u001b[0;32m--> 778\u001b[0m     _ExtractInputsAndAttrs(op_type_name, op_def, allowed_list_attr_map,\n\u001b[1;32m    779\u001b[0m                            keywords, default_type_attr_map, attrs, inputs,\n\u001b[1;32m    780\u001b[0m                            input_types)\n\u001b[1;32m    781\u001b[0m     _ExtractRemainingAttrs(op_type_name, op_def, keywords,\n\u001b[1;32m    782\u001b[0m                            default_type_attr_map, attrs)\n\u001b[1;32m    783\u001b[0m     _ExtractAttrProto(op_type_name, op_def, attrs, attr_protos)\n",
      "File \u001b[0;32m/opt/anaconda3/lib/python3.11/site-packages/tensorflow/python/framework/op_def_library.py:578\u001b[0m, in \u001b[0;36m_ExtractInputsAndAttrs\u001b[0;34m(op_type_name, op_def, allowed_list_attr_map, keywords, default_type_attr_map, attrs, inputs, input_types)\u001b[0m\n\u001b[1;32m    575\u001b[0m prefix \u001b[38;5;241m=\u001b[39m (\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mInput \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m of \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m Op has type \u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m that does not match\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m%\u001b[39m\n\u001b[1;32m    576\u001b[0m           (input_name, op_type_name, observed))\n\u001b[1;32m    577\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m input_arg\u001b[38;5;241m.\u001b[39mtype \u001b[38;5;241m!=\u001b[39m types_pb2\u001b[38;5;241m.\u001b[39mDT_INVALID:\n\u001b[0;32m--> 578\u001b[0m   \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mprefix\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m expected type of \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m    579\u001b[0m                   \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mdtypes\u001b[38;5;241m.\u001b[39mas_dtype(input_arg\u001b[38;5;241m.\u001b[39mtype)\u001b[38;5;241m.\u001b[39mname\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m    580\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m    581\u001b[0m   \u001b[38;5;66;03m# Update the maps with the default, if needed.\u001b[39;00m\n\u001b[1;32m    582\u001b[0m   k \u001b[38;5;241m=\u001b[39m input_arg\u001b[38;5;241m.\u001b[39mtype_attr\n",
      "\u001b[0;31mTypeError\u001b[0m: in user code:\n\n    File \"/var/folders/pr/9p05mn9d2s9d7y5bw252nbqr0000gn/T/ipykernel_2380/827882302.py\", line 2, in load_images  *\n        image = tf.io.read_file(image)\n\n    TypeError: Input 'filename' of 'ReadFile' Op has type float32 that does not match expected type of string.\n"
     ]
    }
   ],
   "source": [
    "images = sorted(glob.glob('UNet/Dataset/images/*.jpg'))\n",
    "masks = sorted(glob.glob('UNet/Dataset/masks/*.png'))\n",
    "\n",
    "images_dataset = tf.data.Dataset.from_tensor_slices(images)\n",
    "masks_dataset = tf.data.Dataset.from_tensor_slices(masks)\n",
    "\n",
    "dataset = tf.data.Dataset.zip((images_dataset, masks_dataset))\n",
    "\n",
    "dataset = dataset.map(load_images, num_parallel_calls=tf.data.AUTOTUNE)\n",
    "dataset = dataset.repeat(60)\n",
    "dataset = dataset.map(augmentate_images, num_parallel_calls=tf.data.AUTOTUNE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "0069c1a9",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2024-03-24 23:53:25.753468: W tensorflow/core/framework/local_rendezvous.cc:404] Local rendezvous is aborting with status: OUT_OF_RANGE: End of sequence\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAABRYAAAH/CAYAAAAi34GHAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABFl0lEQVR4nO3df4xV9Z0//tfAMDPq7kwj1BEFEbpaaU2tDIGCIWRdHaPGhj820rgRdG2yk6aryOoWykaLMZm0TU1qK/hpBZsm6BJ/xj9Yy/xREYXsruzQNIXERlwHW5AMxjuoFRTe3z9c5us4M8j7OpeZw3k8kvvHHM+Z+0R4vnLyunfm1qWUUgAAAAAAZBg32gEAAAAAgOKxWAQAAAAAslksAgAAAADZLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkM1iEQAAAADIZrEIAAAAAGSzWAQAAAAAsmUvFl988cW44YYb4rzzzou6urp49tlnP/OaLVu2RFtbWzQ1NcWMGTPi4YcfriYrMMr0H8rNDIDy0n8oNzMAGE72YvG9996Lyy67LH7+85+f1Pmvv/56XHfddbFgwYLo7u6O73//+3H77bfHU089lR0WGF36D+VmBkB56T+UmxkADKcupZSqvriuLp555plYtGjRsOd873vfi+eeey52797df6yjoyN+97vfxfbt26t9amCU6T+UmxkA5aX/UG5mAPBJ9bV+gu3bt0d7e/uAY9dcc02sW7cuPvzww5gwYcKgaw4fPhyHDx/u//rYsWPx9ttvx8SJE6Ourq7WkaEUUkpx6NChOO+882LcuNr8ulX9h7HpVPQ/wgyAsWqs3gNEmAFQa+4BoNxqMQNqvljcv39/tLa2DjjW2toaH330UfT29sbkyZMHXdPZ2RmrV6+udTQgIvbu3RtTpkypyffWfxjbatn/CDMAxrqxdg8QYQbAqeIeAMptJGdAzReLETHo1YXjP3093KsOK1eujOXLl/d/XalU4oILLoi9e/dGc3Nz7YJCifT19cXUqVPjr//6r2v6PPoPY8+p6n+EGQBj0Vi9B4gwA6DW3ANAudViBtR8sXjuuefG/v37Bxw7cOBA1NfXx8SJE4e8prGxMRobGwcdb25uNlBghNXyxwr0H8a2Wv9YkRkAY9tYuweIMAPgVHEPAOU2kjOgdr9U4f/Mmzcvurq6BhzbvHlzzJ49e9jfrQKcHvQfys0MgPLSfyg3MwDKI3ux+O6778bOnTtj586dEfHxx8jv3Lkzenp6IuLjty8vWbKk//yOjo544403Yvny5bF79+5Yv359rFu3Lu66666R+RMAp4z+Q7mZAVBe+g/lZgYAw0qZfvvb36aIGPRYunRpSimlpUuXpoULFw645oUXXkiXX355amhoSBdeeGFau3Zt1nNWKpUUEalSqeTGBYZRTa/0H04P1fbKDIDTQ1HuAarNCgzPPQCUWy16VZfS//0G1TGsr68vWlpaolKp+N0KMEKK0qui5IQiKVKvipQViqJIvSpSViiCInWqSFmhKGrRq5r/jkUAAAAA4PRjsQgAAAAAZLNYBAAAAACyWSwCAAAAANksFgEAAACAbBaLAAAAAEA2i0UAAAAAIJvFIgAAAACQzWIRAAAAAMhmsQgAAAAAZLNYBAAAAACyWSwCAAAAANksFgEAAACAbBaLAAAAAEA2i0UAAAAAIJvFIgAAAACQzWIRAAAAAMhmsQgAAAAAZLNYBAAAAACyWSwCAAAAANksFgEAAACAbBaLAAAAAEA2i0UAAAAAIJvFIgAAAACQzWIRAAAAAMhmsQgAAAAAZLNYBAAAAACyWSwCAAAAANksFgEAAACAbBaLAAAAAEA2i0UAAAAAIFtVi8U1a9bE9OnTo6mpKdra2mLr1q0nPH/Dhg1x2WWXxZlnnhmTJ0+OW2+9NQ4ePFhVYGB06T+UmxkA5WYGQHnpPzCU7MXixo0bY9myZbFq1aro7u6OBQsWxLXXXhs9PT1Dnv/SSy/FkiVL4rbbbos//OEP8cQTT8R///d/x7e//e3PHR44tfQfys0MgHIzA6C89B8YVso0Z86c1NHRMeDYJZdcklasWDHk+T/+8Y/TjBkzBhx78MEH05QpU076OSuVSoqIVKlUcuMCw6imV/oPp4dqe2UGwOnBDIDy0n8ot1r0Kusdi0eOHIkdO3ZEe3v7gOPt7e2xbdu2Ia+ZP39+vPnmm7Fp06ZIKcVbb70VTz75ZFx//fXDPs/hw4ejr69vwAMYXfoP5WYGQLmZAVBe+g+cSNZisbe3N44ePRqtra0Djre2tsb+/fuHvGb+/PmxYcOGWLx4cTQ0NMS5554bX/jCF+JnP/vZsM/T2dkZLS0t/Y+pU6fmxARqQP+h3MwAKDczAMpL/4ETqerDW+rq6gZ8nVIadOy4Xbt2xe233x733HNP7NixI55//vl4/fXXo6OjY9jvv3LlyqhUKv2PvXv3VhMTqAH9h3IzA6DczAAoL/0HhlKfc/KkSZNi/Pjxg16VOHDgwKBXL47r7OyMK664Iu6+++6IiPja174WZ511VixYsCDuv//+mDx58qBrGhsbo7GxMScaUGP6D+VmBkC5mQFQXvoPnEjWOxYbGhqira0turq6Bhzv6uqK+fPnD3nN+++/H+PGDXya8ePHR8THr3AAxaD/UG5mAJSbGQDlpf/AiWT/KPTy5cvjkUceifXr18fu3bvjzjvvjJ6env63NK9cuTKWLFnSf/4NN9wQTz/9dKxduzb27NkTL7/8ctx+++0xZ86cOO+880buTwLUnP5DuZkBUG5mAJSX/gPDyfpR6IiIxYsXx8GDB+O+++6Lffv2xaWXXhqbNm2KadOmRUTEvn37oqenp//8W265JQ4dOhQ///nP41/+5V/iC1/4Qlx55ZXxwx/+cOT+FMApof9QbmYAlJsZAOWl/8Bw6lIB3ofc19cXLS0tUalUorm5ebTjwGmhKL0qSk4okiL1qkhZoSiK1KsiZYUiKFKnipQViqIWvarqU6EBAAAAgHKzWAQAAAAAslksAgAAAADZLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkM1iEQAAAADIZrEIAAAAAGSzWAQAAAAAslksAgAAAADZLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkM1iEQAAAADIZrEIAAAAAGSzWAQAAAAAslksAgAAAADZLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkM1iEQAAAADIZrEIAAAAAGSzWAQAAAAAslksAgAAAADZLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkK2qxeKaNWti+vTp0dTUFG1tbbF169YTnn/48OFYtWpVTJs2LRobG+NLX/pSrF+/vqrAwOjSfyg3MwDKzQyA8tJ/YCj1uRds3Lgxli1bFmvWrIkrrrgi/t//+39x7bXXxq5du+KCCy4Y8pobb7wx3nrrrVi3bl38zd/8TRw4cCA++uijzx0eOLX0H8rNDIByMwOgvPQfGE5dSinlXDB37tyYNWtWrF27tv/YzJkzY9GiRdHZ2Tno/Oeffz6+9a1vxZ49e+Lss8+uKmRfX1+0tLREpVKJ5ubmqr4HMFA1vdJ/OD1U2yszAE4PZgCUl/5DudWiV1k/Cn3kyJHYsWNHtLe3Dzje3t4e27ZtG/Ka5557LmbPnh0/+tGP4vzzz4+LL7447rrrrvjLX/4y7PMcPnw4+vr6BjyA0aX/UG5mAJSbGQDlpf/AiWT9KHRvb28cPXo0WltbBxxvbW2N/fv3D3nNnj174qWXXoqmpqZ45plnore3N77zne/E22+/PezvV+js7IzVq1fnRANqTP+h3MwAKDczAMpL/4ETqerDW+rq6gZ8nVIadOy4Y8eORV1dXWzYsCHmzJkT1113XTzwwAPxq1/9athXK1auXBmVSqX/sXfv3mpiAjWg/1BuZgCUmxkA5aX/wFCy3rE4adKkGD9+/KBXJQ4cODDo1YvjJk+eHOeff360tLT0H5s5c2aklOLNN9+Miy66aNA1jY2N0djYmBMNqDH9h3IzA6DczAAoL/0HTiTrHYsNDQ3R1tYWXV1dA453dXXF/Pnzh7zmiiuuiD//+c/x7rvv9h979dVXY9y4cTFlypQqIgOjQf+h3MwAKDczAMpL/4ETyf5R6OXLl8cjjzwS69evj927d8edd94ZPT090dHREREfv315yZIl/effdNNNMXHixLj11ltj165d8eKLL8bdd98d//iP/xhnnHHGyP1JgJrTfyg3MwDKzQyA8tJ/YDhZPwodEbF48eI4ePBg3HfffbFv37649NJLY9OmTTFt2rSIiNi3b1/09PT0n/9Xf/VX0dXVFf/8z/8cs2fPjokTJ8aNN94Y999//8j9KYBTQv+h3MwAKDczAMpL/4Hh1KWU0miH+Cx9fX3R0tISlUolmpubRzsOnBaK0qui5IQiKVKvipQViqJIvSpSViiCInWqSFmhKGrRq6o+FRoAAAAAKDeLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyGaxCAAAAABks1gEAAAAALJZLAIAAAAA2SwWAQAAAIBsFosAAAAAQDaLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyGaxCAAAAABks1gEAAAAALJZLAIAAAAA2SwWAQAAAIBsFosAAAAAQDaLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyGaxCAAAAABks1gEAAAAALJZLAIAAAAA2SwWAQAAAIBsFosAAAAAQDaLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyGaxCAAAAABks1gEAAAAALJZLAIAAAAA2apaLK5ZsyamT58eTU1N0dbWFlu3bj2p615++eWor6+Pr3/969U8LTAG6D+UmxkA5WYGQHnpPzCU7MXixo0bY9myZbFq1aro7u6OBQsWxLXXXhs9PT0nvK5SqcSSJUvi7/7u76oOC4wu/YdyMwOg3MwAKC/9B4ZTl1JKORfMnTs3Zs2aFWvXru0/NnPmzFi0aFF0dnYOe923vvWtuOiii2L8+PHx7LPPxs6dO0/6Ofv6+qKlpSUqlUo0NzfnxAWGUU2v9B9OD9X2ygyA04MZAOWl/1ButehV1jsWjxw5Ejt27Ij29vYBx9vb22Pbtm3DXvfoo4/Ga6+9Fvfee+9JPc/hw4ejr69vwAMYXfoP5WYGQLmZAVBe+g+cSNZisbe3N44ePRqtra0Djre2tsb+/fuHvOaPf/xjrFixIjZs2BD19fUn9TydnZ3R0tLS/5g6dWpOTKAG9B/KzQyAcjMDoLz0HziRqj68pa6ubsDXKaVBxyIijh49GjfddFOsXr06Lr744pP+/itXroxKpdL/2Lt3bzUxgRrQfyg3MwDKzQyA8tJ/YCgn99LB/5k0aVKMHz9+0KsSBw4cGPTqRUTEoUOH4pVXXonu7u747ne/GxERx44di5RS1NfXx+bNm+PKK68cdF1jY2M0NjbmRANqTP+h3MwAKDczAMpL/4ETyXrHYkNDQ7S1tUVXV9eA411dXTF//vxB5zc3N8fvf//72LlzZ/+jo6MjvvzlL8fOnTtj7ty5ny89cMroP5SbGQDlZgZAeek/cCJZ71iMiFi+fHncfPPNMXv27Jg3b1784he/iJ6enujo6IiIj9++/Kc//Sl+/etfx7hx4+LSSy8dcP0555wTTU1Ng44DY5/+Q7mZAVBuZgCUl/4Dw8leLC5evDgOHjwY9913X+zbty8uvfTS2LRpU0ybNi0iIvbt2xc9PT0jHhQYffoP5WYGQLmZAVBe+g8Mpy6llEY7xGfp6+uLlpaWqFQq0dzcPNpx4LRQlF4VJScUSZF6VaSsUBRF6lWRskIRFKlTRcoKRVGLXlX1qdAAAAAAQLlZLAIAAAAA2SwWAQAAAIBsFosAAAAAQDaLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyGaxCAAAAABks1gEAAAAALJZLAIAAAAA2SwWAQAAAIBsFosAAAAAQDaLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyGaxCAAAAABks1gEAAAAALJZLAIAAAAA2SwWAQAAAIBsFosAAAAAQDaLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyGaxCAAAAABks1gEAAAAALJZLAIAAAAA2SwWAQAAAIBsFosAAAAAQDaLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyFbVYnHNmjUxffr0aGpqira2tti6deuw5z799NNx9dVXxxe/+MVobm6OefPmxW9+85uqAwOjS/+h3MwAKDczAMpL/4GhZC8WN27cGMuWLYtVq1ZFd3d3LFiwIK699tro6ekZ8vwXX3wxrr766ti0aVPs2LEj/vZv/zZuuOGG6O7u/tzhgVNL/6HczAAoNzMAykv/geHUpZRSzgVz586NWbNmxdq1a/uPzZw5MxYtWhSdnZ0n9T2++tWvxuLFi+Oee+45qfP7+vqipaUlKpVKNDc358QFhlFNr/QfTg/V9soMgNODGQDlpf9QbrXoVdY7Fo8cORI7duyI9vb2Acfb29tj27ZtJ/U9jh07FocOHYqzzz572HMOHz4cfX19Ax7A6NJ/KDczAMrNDIDy0n/gRLIWi729vXH06NFobW0dcLy1tTX2799/Ut/jJz/5Sbz33ntx4403DntOZ2dntLS09D+mTp2aExOoAf2HcjMDoNzMACgv/QdOpKoPb6mrqxvwdUpp0LGhPP744/GDH/wgNm7cGOecc86w561cuTIqlUr/Y+/evdXEBGpA/6HczAAoNzMAykv/gaHU55w8adKkGD9+/KBXJQ4cODDo1YtP27hxY9x2223xxBNPxFVXXXXCcxsbG6OxsTEnGlBj+g/lZgZAuZkBUF76D5xI1jsWGxoaoq2tLbq6ugYc7+rqivnz5w973eOPPx633HJLPPbYY3H99ddXlxQYVfoP5WYGQLmZAVBe+g+cSNY7FiMili9fHjfffHPMnj075s2bF7/4xS+ip6cnOjo6IuLjty//6U9/il//+tcR8fEwWbJkSfz0pz+Nb3zjG/2vcpxxxhnR0tIygn8UoNb0H8rNDIByMwOgvPQfGFaqwkMPPZSmTZuWGhoa0qxZs9KWLVv6/9vSpUvTwoUL+79euHBhiohBj6VLl57081UqlRQRqVKpVBMXGEK1vdJ/KL7P0yszAIrPDIDy0n8ot1r0qi6llGq8u/zc+vr6oqWlJSqVSjQ3N492HDgtFKVXRckJRVKkXhUpKxRFkXpVpKxQBEXqVJGyQlHUoldVfSo0AAAAAFBuFosAAAAAQDaLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyGaxCAAAAABks1gEAAAAALJZLAIAAAAA2SwWAQAAAIBsFosAAAAAQDaLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyGaxCAAAAABks1gEAAAAALJZLAIAAAAA2SwWAQAAAIBsFosAAAAAQDaLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyGaxCAAAAABks1gEAAAAALJZLAIAAAAA2SwWAQAAAIBsFosAAAAAQDaLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyGaxCAAAAABks1gEAAAAALJVtVhcs2ZNTJ8+PZqamqKtrS22bt16wvO3bNkSbW1t0dTUFDNmzIiHH364qrDA6NN/KDczAMrNDIDy0n9gKNmLxY0bN8ayZcti1apV0d3dHQsWLIhrr702enp6hjz/9ddfj+uuuy4WLFgQ3d3d8f3vfz9uv/32eOqppz53eODU0n8oNzMAys0MgPLSf2A4dSmllHPB3LlzY9asWbF27dr+YzNnzoxFixZFZ2fnoPO/973vxXPPPRe7d+/uP9bR0RG/+93vYvv27Sf1nH19fdHS0hKVSiWam5tz4gLDqKZX+g+nh2p7ZQbA6cEMgPLSfyi3WvSqPufkI0eOxI4dO2LFihUDjre3t8e2bduGvGb79u3R3t4+4Ng111wT69atiw8//DAmTJgw6JrDhw/H4cOH+7+uVCoR8fH/AGBkHO/Tyb62oP9w+sjtf4QZAKcTMwDKS/+h3KqZAZ8la7HY29sbR48ejdbW1gHHW1tbY//+/UNes3///iHP/+ijj6K3tzcmT5486JrOzs5YvXr1oONTp07NiQuchIMHD0ZLS8tnnqf/cPo52f5HmAFwOjIDoLz0H8otZwZ8lqzF4nF1dXUDvk4pDTr2WecPdfy4lStXxvLly/u/fuedd2LatGnR09MzYn/wWunr64upU6fG3r17x/TbtYuSM0LWWqlUKnHBBRfE2WefnXWd/g+vSH//so68ouSMqL7/EWbAiRTl30BRckbIWitmwMgr0t+/rLVRlKz6XxtF+fuPKE7WouSMKFbWzzMDhpO1WJw0aVKMHz9+0KsSBw4cGPRqxHHnnnvukOfX19fHxIkTh7ymsbExGhsbBx1vaWkZ839JxzU3Nxcia1FyRshaK+PGndxnOOn/ySvS37+sI68oOSNOvv8RZkCOovwbKErOCFlrxQwYeUX6+5e1NoqSVf9royh//xHFyVqUnBHFypozAz7ze+Wc3NDQEG1tbdHV1TXgeFdXV8yfP3/Ia+bNmzfo/M2bN8fs2bOH/L0KwNik/1BuZgCUmxkA5aX/wIlkryiXL18ejzzySKxfvz52794dd955Z/T09ERHR0dEfPz25SVLlvSf39HREW+88UYsX748du/eHevXr49169bFXXfdNXJ/CuCU0H8oNzMAys0MgPLSf2BYqQoPPfRQmjZtWmpoaEizZs1KW7Zs6f9vS5cuTQsXLhxw/gsvvJAuv/zy1NDQkC688MK0du3arOf74IMP0r333ps++OCDauKeUkXJWpScKclaK9Vm1f/hyVobRclalJwpfb6sZsDwipK1KDlTkrVWzICRV5ScKclaK0XJqv+1IevIK0rOlGStS2kEP2MaAAAAACiFkfttjQAAAABAaVgsAgAAAADZLBYBAAAAgGwWiwAAAABAtjGzWFyzZk1Mnz49mpqaoq2tLbZu3XrC87ds2RJtbW3R1NQUM2bMiIcffnjM5Xz66afj6quvji9+8YvR3Nwc8+bNi9/85jenJGdu1k96+eWXo76+Pr7+9a/XNuAn5GY9fPhwrFq1KqZNmxaNjY3xpS99KdavXz8ms27YsCEuu+yyOPPMM2Py5Mlx6623xsGDB2ua8cUXX4wbbrghzjvvvKirq4tnn332M68ZrU5FFKf/EWZALej/yDMDaqcoM6Ao/Y8wA0aa/tdOUfqfm/WTzICRy+ke4OSYAaOb85P0f2SzluoeYMQ+X/pz+Pd///c0YcKE9Mtf/jLt2rUr3XHHHemss85Kb7zxxpDn79mzJ5155pnpjjvuSLt27Uq//OUv04QJE9KTTz45pnLecccd6Yc//GH6r//6r/Tqq6+mlStXpgkTJqT/+Z//qWnOarIe984776QZM2ak9vb2dNlll9U8Z7VZv/nNb6a5c+emrq6u9Prrr6f//M//TC+//PKYy7p169Y0bty49NOf/jTt2bMnbd26NX31q19NixYtqmnOTZs2pVWrVqWnnnoqRUR65plnTnj+aHUqpeL0v5qsZkBtcur/ZzMDxkbW0ZoBRel/tVnNgBPT/7GR1T1A7bKOxgwoSv9TMgPGSlb3ALXJ6h7gxEar/2NisThnzpzU0dEx4Ngll1ySVqxYMeT5//qv/5ouueSSAcf+6Z/+KX3jG9+oWcaU8nMO5Stf+UpavXr1SEcbpNqsixcvTv/2b/+W7r333lM2UHKz/sd//EdqaWlJBw8ePBXxBsjN+uMf/zjNmDFjwLEHH3wwTZkypWYZP+1kBspodSql4vQ/JTOgFvS/9syAkVOUGVCU/qdkBtSa/o+covQ/JTOgForY/5TMgJFUlBmg/7VRxBlwKvs/6j8KfeTIkdixY0e0t7cPON7e3h7btm0b8prt27cPOv+aa66JV155JT788MMxk/PTjh07FocOHYqzzz67FhH7VZv10Ucfjddeey3uvffemub7pGqyPvfcczF79uz40Y9+FOeff35cfPHFcdddd8Vf/vKXMZd1/vz58eabb8amTZsipRRvvfVWPPnkk3H99dfXNGuu0ehURHH6X23WTzMDBtL/saNIvSpS1k87FTOgKP2PMAPGiiJ1qkhZP809wGBFmQGnc/8jitWrImX9NPcAAxWl/9VmLcoMGKlO1Y90sFy9vb1x9OjRaG1tHXC8tbU19u/fP+Q1+/fvH/L8jz76KHp7e2Py5MljIuen/eQnP4n33nsvbrzxxhHP90nVZP3jH/8YK1asiK1bt0Z9/an7Z1FN1j179sRLL70UTU1N8cwzz0Rvb2985zvfibfffrumv1+hmqzz58+PDRs2xOLFi+ODDz6Ijz76KL75zW/Gz372s5rlrMZodCqiOP2vNuunmQED6f/YUaReFSnrp52KGVCU/keYAWNFkTpVpKyf5h5gsKLMgNO5/xHF6lWRsn6ae4CBitL/arMWZQaMVKdG/R2Lx9XV1Q34OqU06NhnnT/U8ZGWm/O4xx9/PH7wgx/Exo0b45xzzqlVvAFONuvRo0fjpptuitWrV8fFF198SrJ9Ws7/12PHjkVdXV1s2LAh5syZE9ddd1088MAD8atf/armr1bkZt21a1fcfvvtcc8998SOHTvi+eefj9dffz06OjpqnjPXaHVquOcei/0f7rnNgM9H/8eGIvWqSFmPO9UzoCj9jzADxoIidapIWY9zD3BiRZkBp2v/I4rVqyJlPc49wPCK0v/crEWaASPRqVF/x+KkSZNi/Pjxgza9Bw4cGLQ5Pe7cc88d8vz6+vqYOHHimMl53MaNG+O2226LJ554Iq666qqa5Puk3KyHDh2KV155Jbq7u+O73/1uRHxc2pRS1NfXx+bNm+PKK68cE1kjIiZPnhznn39+tLS09B+bOXNmpJTizTffjIsuumjMZO3s7Iwrrrgi7r777oiI+NrXvhZnnXVWLFiwIO6///6avaqWazQ6FVGc/leb9TgzYGRyRuh/rRSpV0XKetypnAFF6X81WSPMgFooUqeKlPU49wAjlzVidGbA6dz/iGL1qkhZj3MPMDJZI9wD1MJIdWrU37HY0NAQbW1t0dXVNeB4V1dXzJ8/f8hr5s2bN+j8zZs3x+zZs2PChAljJmfEx69O3HLLLfHYY4+dsp+nz83a3Nwcv//972Pnzp39j46Ojvjyl78cO3fujLlz546ZrBERV1xxRfz5z3+Od999t//Yq6++GuPGjYspU6aMqazvv/9+jBs3sGbjx4+PiP//lYCxYDQ6FVGc/lebNcIMGMmcEfpfK0XqVZGyRpz6GVCU/leTNcIMqIUidapIWSPcA4x01ojRmQGnc/8jitWrImWNcA8wklkj3APUwoh1KuujXmrk+Ed3r1u3Lu3atSstW7YsnXXWWel///d/U0oprVixIt1888395x//SOw777wz7dq1K61bt+6UfMx8bs7HHnss1dfXp4ceeijt27ev//HOO+/UNGc1WT/tVH4aVG7WQ4cOpSlTpqS///u/T3/4wx/Sli1b0kUXXZS+/e1vj7msjz76aKqvr09r1qxJr732WnrppZfS7Nmz05w5c2qa89ChQ6m7uzt1d3eniEgPPPBA6u7uTm+88caQOUerUykVp//VZDUDRj6n/p8cM2BsZB2tGVCU/leT1Qz4bPo/NrK6B6hN1tGaAUXpf0pmwFjJ6h5g5LO6B/hso9X/MbFYTCmlhx56KE2bNi01NDSkWbNmpS1btvT/t6VLl6aFCxcOOP+FF15Il19+eWpoaEgXXnhhWrt27ZjLuXDhwhQRgx5Lly4dc1k/7VQOlJTys+7evTtdddVV6YwzzkhTpkxJy5cvT++///6YzPrggw+mr3zlK+mMM85IkydPTv/wD/+Q3nzzzZpm/O1vf3vCf3tjqVMpFaf/uVnNgJOj/yPPDBgbWUdzBhSl/ymZASNN/8dGVvcAJ68oM6AI/U/JDBgrWd0DnJyi9L+arGW6B6hLaQy9DxMAAAAAKIRR/x2LAAAAAEDxWCwCAAAAANksFgEAAACAbBaLAAAAAEA2i0UAAAAAIJvFIgAAAACQzWIRAAAAAMhmsQgAAAAAZLNYBAAAAACyWSwCAAAAANksFgEAAACAbBaLAAAAAEA2i0UAAAAAIJvFIgAAAACQzWIRAAAAAMhmsQgAAAAAZMteLL744otxww03xHnnnRd1dXXx7LPPfuY1W7Zsiba2tmhqaooZM2bEww8/XE1WYJTpP5SbGQDlpf9QbmYAMJzsxeJ7770Xl112Wfz85z8/qfNff/31uO6662LBggXR3d0d3//+9+P222+Pp556KjssMLr0H8rNDIDy0n8oNzMAGE5dSilVfXFdXTzzzDOxaNGiYc/53ve+F88991zs3r27/1hHR0f87ne/i+3bt1f71MAo038oNzMAykv/odzMAOCT6mv9BNu3b4/29vYBx6655ppYt25dfPjhhzFhwoRB1xw+fDgOHz7c//WxY8fi7bffjokTJ0ZdXV2tI0MppJTi0KFDcd5558W4cbX5dav6D2PTqeh/hBkAY9VYvQeIMAOg1twDQLnVYgbUfLG4f//+aG1tHXCstbU1Pvroo+jt7Y3JkycPuqazszNWr15d62hAROzduzemTJlSk++t/zC21bL/EWYAjHVj7R4gwgyAU8U9AJTbSM6Ami8WI2LQqwvHf/p6uFcdVq5cGcuXL+//ulKpxAUXXBB79+6N5ubm2gWFEunr64upU6fGX//1X9f0efQfxp5T1f8IMwDGorF6DxBhBkCtuQeAcqvFDKj5YvHcc8+N/fv3Dzh24MCBqK+vj4kTJw55TWNjYzQ2Ng463tzcbKDACKvljxXoP4xttf6xIjMAxraxdg8QYQbAqeIeAMptJGdA7X6pwv+ZN29edHV1DTi2efPmmD179rC/WwU4Peg/lJsZAOWl/1BuZgCUR/Zi8d13342dO3fGzp07I+Ljj5HfuXNn9PT0RMTHb19esmRJ//kdHR3xxhtvxPLly2P37t2xfv36WLduXdx1110j8ycAThn9h3IzA6C89B/KzQwAhpUy/fa3v00RMeixdOnSlFJKS5cuTQsXLhxwzQsvvJAuv/zy1NDQkC688MK0du3arOesVCopIlKlUsmNCwyjml7pP5wequ2VGQCnh6LcA1SbFRieewAot1r0qi6l//sNqmNYX19ftLS0RKVS8bsVYIQUpVdFyQlFUqReFSkrFEWRelWkrFAERepUkbJCUdSiVzX/HYsAAAAAwOnHYhEAAAAAyGaxCAAAAABks1gEAAAAALJZLAIAAAAA2SwWAQAAAIBsFosAAAAAQDaLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyGaxCAAAAABks1gEAAAAALJZLAIAAAAA2SwWAQAAAIBsFosAAAAAQDaLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyGaxCAAAAABks1gEAAAAALJZLAIAAAAA2SwWAQAAAIBsFosAAAAAQDaLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyGaxCAAAAABks1gEAAAAALJZLAIAAAAA2SwWAQAAAIBsFosAAAAAQLaqFotr1qyJ6dOnR1NTU7S1tcXWrVtPeP6GDRvisssuizPPPDMmT54ct956axw8eLCqwMDo0n8oNzMAys0MgPLSf2Ao2YvFjRs3xrJly2LVqlXR3d0dCxYsiGuvvTZ6enqGPP+ll16KJUuWxG233RZ/+MMf4oknnoj//u//jm9/+9ufOzxwauk/lJsZAOVmBkB56T8wrJRpzpw5qaOjY8CxSy65JK1YsWLI83/84x+nGTNmDDj24IMPpilTppz0c1YqlRQRqVKp5MYFhlFNr/QfTg/V9soMgNODGQDlpf9QbrXoVdY7Fo8cORI7duyI9vb2Acfb29tj27ZtQ14zf/78ePPNN2PTpk2RUoq33nornnzyybj++uuHfZ7Dhw9HX1/fgAcwuvQfys0MgHIzA6C89B84kazFYm9vbxw9ejRaW1sHHG9tbY39+/cPec38+fNjw4YNsXjx4mhoaIhzzz03vvCFL8TPfvazYZ+ns7MzWlpa+h9Tp07NiQnUgP5DuZkBUG5mAJSX/gMnUtWHt9TV1Q34OqU06Nhxu3btittvvz3uueee2LFjRzz//PPx+uuvR0dHx7Dff+XKlVGpVPofe/furSYmUAP6D+VmBkC5mQFQXvoPDKU+5+RJkybF+PHjB70qceDAgUGvXhzX2dkZV1xxRdx9990REfG1r30tzjrrrFiwYEHcf//9MXny5EHXNDY2RmNjY040oMb0H8rNDIByMwOgvPQfOJGsdyw2NDREW1tbdHV1DTje1dUV8+fPH/Ka999/P8aNG/g048ePj4iPX+EAikH/odzMACg3MwDKS/+BE8n+Uejly5fHI488EuvXr4/du3fHnXfeGT09Pf1vaV65cmUsWbKk//wbbrghnn766Vi7dm3s2bMnXn755bj99ttjzpw5cd55543cnwSoOf2HcjMDoNzMACgv/QeGk/Wj0BERixcvjoMHD8Z9990X+/bti0svvTQ2bdoU06ZNi4iIffv2RU9PT//5t9xySxw6dCh+/vOfx7/8y7/EF77whbjyyivjhz/84cj9KYBTQv+h3MwAKDczAMpL/4Hh1KUCvA+5r68vWlpaolKpRHNz82jHgdNCUXpVlJxQJEXqVZGyQlEUqVdFygpFUKROFSkrFEUtelXVp0IDAAAAAOVmsQgAAAAAZLNYBAAAAACyWSwCAAAAANksFgEAAACAbBaLAAAAAEA2i0UAAAAAIJvFIgAAAACQzWIRAAAAAMhmsQgAAAAAZLNYBAAAAACyWSwCAAAAANksFgEAAACAbBaLAAAAAEA2i0UAAAAAIJvFIgAAAACQzWIRAAAAAMhmsQgAAAAAZLNYBAAAAACyWSwCAAAAANksFgEAAACAbBaLAAAAAEA2i0UAAAAAIJvFIgAAAACQzWIRAAAAAMhmsQgAAAAAZLNYBAAAAACyWSwCAAAAANksFgEAAACAbBaLAAAAAEA2i0UAAAAAIFtVi8U1a9bE9OnTo6mpKdra2mLr1q0nPP/w4cOxatWqmDZtWjQ2NsaXvvSlWL9+fVWBgdGl/1BuZgCUmxkA5aX/wFDqcy/YuHFjLFu2LNasWRNXXHFF/L//9//i2muvjV27dsUFF1ww5DU33nhjvPXWW7Fu3br4m7/5mzhw4EB89NFHnzs8cGrpP5SbGQDlZgZAeek/MJy6lFLKuWDu3Lkxa9asWLt2bf+xmTNnxqJFi6Kzs3PQ+c8//3x861vfij179sTZZ59dVci+vr5oaWmJSqUSzc3NVX0PYKBqeqX/cHqotldmAJwezAAoL/2HcqtFr7J+FPrIkSOxY8eOaG9vH3C8vb09tm3bNuQ1zz33XMyePTt+9KMfxfnnnx8XX3xx3HXXXfGXv/xl2Oc5fPhw9PX1DXgAo0v/odzMACg3MwDKS/+BE8n6Ueje3t44evRotLa2Djje2toa+/fvH/KaPXv2xEsvvRRNTU3xzDPPRG9vb3znO9+Jt99+e9jfr9DZ2RmrV6/OiQbUmP5DuZkBUG5mAJSX/gMnUtWHt9TV1Q34OqU06Nhxx44di7q6utiwYUPMmTMnrrvuunjggQfiV7/61bCvVqxcuTIqlUr/Y+/evdXEBGpA/6HczAAoNzMAykv/gaFkvWNx0qRJMX78+EGvShw4cGDQqxfHTZ48Oc4///xoaWnpPzZz5sxIKcWbb74ZF1100aBrGhsbo7GxMScaUGP6D+VmBkC5mQFQXvoPnEjWOxYbGhqira0turq6Bhzv6uqK+fPnD3nNFVdcEX/+85/j3Xff7T/26quvxrhx42LKlClVRAZGg/5DuZkBUG5mAJSX/gMnkv2j0MuXL49HHnkk1q9fH7t3744777wzenp6oqOjIyI+fvvykiVL+s+/6aabYuLEiXHrrbfGrl274sUXX4y77747/vEf/zHOOOOMkfuTADWn/1BuZgCUmxkA5aX/wHCyfhQ6ImLx4sVx8ODBuO+++2Lfvn1x6aWXxqZNm2LatGkREbFv377o6enpP/+v/uqvoqurK/75n/85Zs+eHRMnTowbb7wx7r///pH7UwCnhP5DuZkBUG5mAJSX/gPDqUsppdEO8Vn6+vqipaUlKpVKNDc3j3YcOC0UpVdFyQlFUqReFSkrFEWRelWkrFAERepUkbJCUdSiV1V9KjQAAAAAUG4WiwAAAABANotFAAAAACCbxSIAAAAAkM1iEQAAAADIZrEIAAAAAGSzWAQAAAAAslksAgAAAADZLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkM1iEQAAAADIZrEIAAAAAGSzWAQAAAAAslksAgAAAADZLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkM1iEQAAAADIZrEIAAAAAGSzWAQAAAAAslksAgAAAADZLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkM1iEQAAAADIZrEIAAAAAGSzWAQAAAAAslW1WFyzZk1Mnz49mpqaoq2tLbZu3XpS17388stRX18fX//616t5WmAM0H8oNzMAys0MgPLSf2Ao2YvFjRs3xrJly2LVqlXR3d0dCxYsiGuvvTZ6enpOeF2lUoklS5bE3/3d31UdFhhd+g/lZgZAuZkBUF76DwynLqWUci6YO3duzJo1K9auXdt/bObMmbFo0aLo7Owc9rpvfetbcdFFF8X48ePj2WefjZ07d570c/b19UVLS0tUKpVobm7OiQsMo5pe6T+cHqrtlRkApwczAMpL/6HcatGrrHcsHjlyJHbs2BHt7e0Djre3t8e2bduGve7RRx+N1157Le69996Tep7Dhw9HX1/fgAcwuvQfys0MgHIzA6C89B84kazFYm9vbxw9ejRaW1sHHG9tbY39+/cPec0f//jHWLFiRWzYsCHq6+tP6nk6OzujpaWl/zF16tScmEAN6D+UmxkA5WYGQHnpP3AiVX14S11d3YCvU0qDjkVEHD16NG666aZYvXp1XHzxxSf9/VeuXBmVSqX/sXfv3mpiAjWg/1BuZgCUmxkA5aX/wFBO7qWD/zNp0qQYP378oFclDhw4MOjVi4iIQ4cOxSuvvBLd3d3x3e9+NyIijh07FimlqK+vj82bN8eVV1456LrGxsZobGzMiQbUmP5DuZkBUG5mAJSX/gMnkvWOxYaGhmhra4uurq4Bx7u6umL+/PmDzm9ubo7f//73sXPnzv5HR0dHfPnLX46dO3fG3LlzP1964JTRfyg3MwDKzQyA8tJ/4ESy3rEYEbF8+fK4+eabY/bs2TFv3rz4xS9+ET09PdHR0RERH799+U9/+lP8+te/jnHjxsWll1464PpzzjknmpqaBh0Hxj79h3IzA6DczAAoL/0HhpO9WFy8eHEcPHgw7rvvvti3b19ceumlsWnTppg2bVpEROzbty96enpGPCgw+vQfys0MgHIzA6C89B8YTl1KKY12iM/S19cXLS0tUalUorm5ebTjwGmhKL0qSk4okiL1qkhZoSiK1KsiZYUiKFKnipQViqIWvarqU6EBAAAAgHKzWAQAAAAAslksAgAAAADZLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkM1iEQAAAADIZrEIAAAAAGSzWAQAAAAAslksAgAAAADZLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkM1iEQAAAADIZrEIAAAAAGSzWAQAAAAAslksAgAAAADZLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkM1iEQAAAADIZrEIAAAAAGSzWAQAAAAAslksAgAAAADZLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkK2qxeKaNWti+vTp0dTUFG1tbbF169Zhz3366afj6quvji9+8YvR3Nwc8+bNi9/85jdVBwZGl/5DuZkBUG5mAJSX/gNDyV4sbty4MZYtWxarVq2K7u7uWLBgQVx77bXR09Mz5PkvvvhiXH311bFp06bYsWNH/O3f/m3ccMMN0d3d/bnDA6eW/kO5mQFQbmYAlJf+A8OpSymlnAvmzp0bs2bNirVr1/YfmzlzZixatCg6OztP6nt89atfjcWLF8c999xzUuf39fVFS0tLVCqVaG5uzokLDKOaXuk/nB6q7ZUZAKcHMwDKS/+h3GrRq6x3LB45ciR27NgR7e3tA463t7fHtm3bTup7HDt2LA4dOhRnn332sOccPnw4+vr6BjyA0aX/UG5mAJSbGQDlpf/AiWQtFnt7e+Po0aPR2to64Hhra2vs37//pL7HT37yk3jvvffixhtvHPaczs7OaGlp6X9MnTo1JyZQA/oP5WYGQLmZAVBe+g+cSFUf3lJXVzfg65TSoGNDefzxx+MHP/hBbNy4Mc4555xhz1u5cmVUKpX+x969e6uJCdSA/kO5mQFQbmYAlJf+A0Opzzl50qRJMX78+EGvShw4cGDQqxeftnHjxrjtttviiSeeiKuuuuqE5zY2NkZjY2NONKDG9B/KzQyAcjMDoLz0HziRrHcsNjQ0RFtbW3R1dQ043tXVFfPnzx/2uscffzxuueWWeOyxx+L666+vLikwqvQfys0MgHIzA6C89B84kax3LEZELF++PG6++eaYPXt2zJs3L37xi19ET09PdHR0RMTHb1/+05/+FL/+9a8j4uNhsmTJkvjpT38a3/jGN/pf5TjjjDOipaVlBP8oQK3pP5SbGQDlZgZAeek/MKxUhYceeihNmzYtNTQ0pFmzZqUtW7b0/7elS5emhQsX9n+9cOHCFBGDHkuXLj3p56tUKikiUqVSqSYuMIRqe6X/UHyfp1dmABSfGQDlpf9QbrXoVV1KKdV4d/m59fX1RUtLS1QqlWhubh7tOHBaKEqvipITiqRIvSpSViiKIvWqSFmhCIrUqSJlhaKoRa+q+lRoAAAAAKDcLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkM1iEQAAAADIZrEIAAAAAGSzWAQAAAAAslksAgAAAADZLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkM1iEQAAAADIZrEIAAAAAGSzWAQAAAAAslksAgAAAADZLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkM1iEQAAAADIZrEIAAAAAGSzWAQAAAAAslksAgAAAADZLBYBAAAAgGwWiwAAAABANotFAAAAACCbxSIAAAAAkM1iEQAAAADIZrEIAAAAAGSrarG4Zs2amD59ejQ1NUVbW1ts3br1hOdv2bIl2traoqmpKWbMmBEPP/xwVWGB0af/UG5mAJSbGQDlpf/AULIXixs3boxly5bFqlWroru7OxYsWBDXXntt9PT0DHn+66+/Htddd10sWLAguru74/vf/37cfvvt8dRTT33u8MCppf9QbmYAlJsZAOWl/8Bw6lJKKeeCuXPnxqxZs2Lt2rX9x2bOnBmLFi2Kzs7OQed/73vfi+eeey52797df6yjoyN+97vfxfbt20/qOfv6+qKlpSUqlUo0NzfnxAWGUU2v9B9OD9X2ygyA04MZAOWl/1ButehVfc7JR44ciR07dsSKFSsGHG9vb49t27YNec327dujvb19wLFrrrkm1q1bFx9++GFMmDBh0DWHDx+Ow4cP939dqVQi4uP/AcDION6nk31tQf/h9JHb/wgzAE4nZgCUl/5DuVUzAz5L1mKxt7c3jh49Gq2trQOOt7a2xv79+4e8Zv/+/UOe/9FHH0Vvb29Mnjx50DWdnZ2xevXqQcenTp2aExc4CQcPHoyWlpbPPE//4fRzsv2PMAPgdGQGQHnpP5Rbzgz4LFmLxePq6uoGfJ1SGnTss84f6vhxK1eujOXLl/d//c4778S0adOip6dnxP7gtdLX1xdTp06NvXv3jum3axclZ4SstVKpVOKCCy6Is88+O+s6/R9ekf7+ZR15RckZUX3/I8yAEynKv4Gi5IyQtVbMgJFXpL9/WWujKFn1vzaK8vcfUZysRckZUaysn2cGDCdrsThp0qQYP378oFclDhw4MOjViOPOPffcIc+vr6+PiRMnDnlNY2NjNDY2Djre0tIy5v+Sjmtubi5E1qLkjJC1VsaNO7nPcNL/k1ekv39ZR15RckacfP8jzIAcRfk3UJScEbLWihkw8or09y9rbRQlq/7XRlH+/iOKk7UoOSOKlTVnBnzm98o5uaGhIdra2qKrq2vA8a6urpg/f/6Q18ybN2/Q+Zs3b47Zs2cP+XsVgLFJ/6HczAAoNzMAykv/gRPJXlEuX748HnnkkVi/fn3s3r077rzzzujp6YmOjo6I+Pjty0uWLOk/v6OjI954441Yvnx57N69O9avXx/r1q2Lu+66a+T+FMApof9QbmYAlJsZAOWl/8CwUhUeeuihNG3atNTQ0JBmzZqVtmzZ0v/fli5dmhYuXDjg/BdeeCFdfvnlqaGhIV144YVp7dq1Wc/3wQcfpHvvvTd98MEH1cQ9pYqStSg5U5K1VqrNqv/Dk7U2ipK1KDlT+nxZzYDhFSVrUXKmJGutmAEjryg5U5K1VoqSVf9rQ9aRV5ScKclal9IIfsY0AAAAAFAKI/fbGgEAAACA0rBYBAAAAACyWSwCAAAAANksFgEAAACAbGNmsbhmzZqYPn16NDU1RVtbW2zduvWE52/ZsiXa2tqiqakpZsyYEQ8//PCYy/n000/H1VdfHV/84hejubk55s2bF7/5zW9OSc7crJ/08ssvR319fXz961+vbcBPyM16+PDhWLVqVUybNi0aGxvjS1/6Uqxfv35MZt2wYUNcdtllceaZZ8bkyZPj1ltvjYMHD9Y044svvhg33HBDnHfeeVFXVxfPPvvsZ14zWp2KKE7/I8yAWtD/kWcG1E5RZkBR+h9hBow0/a+dovQ/N+snmQEjl9M9wMkxA0Y35yfp/8hmLdU9wIh9vvTn8O///u9pwoQJ6Ze//GXatWtXuuOOO9JZZ52V3njjjSHP37NnTzrzzDPTHXfckXbt2pV++ctfpgkTJqQnn3xyTOW844470g9/+MP0X//1X+nVV19NK1euTBMmTEj/8z//U9Oc1WQ97p133kkzZsxI7e3t6bLLLqt5zmqzfvOb30xz585NXV1d6fXXX0//+Z//mV5++eUxl3Xr1q1p3Lhx6ac//Wnas2dP2rp1a/rqV7+aFi1aVNOcmzZtSqtWrUpPPfVUioj0zDPPnPD80epUSsXpfzVZzYDa5NT/z2YGjI2sozUDitL/arOaASem/2Mjq3uA2mUdjRlQlP6nZAaMlazuAWqT1T3AiY1W/8fEYnHOnDmpo6NjwLFLLrkkrVixYsjz//Vf/zVdcsklA4790z/9U/rGN75Rs4wp5eccyle+8pW0evXqkY42SLVZFy9enP7t3/4t3XvvvadsoORm/Y//+I/U0tKSDh48eCriDZCb9cc//nGaMWPGgGMPPvhgmjJlSs0yftrJDJTR6lRKxel/SmZALeh/7ZkBI6coM6Ao/U/JDKg1/R85Rel/SmZALRSx/ymZASOpKDNA/2ujiDPgVPZ/1H8U+siRI7Fjx45ob28fcLy9vT22bds25DXbt28fdP4111wTr7zySnz44YdjJuenHTt2LA4dOhRnn312LSL2qzbro48+Gq+99lrce++9Nc33SdVkfe6552L27Nnxox/9KM4///y4+OKL46677oq//OUvYy7r/Pnz480334xNmzZFSineeuutePLJJ+P666+vadZco9GpiOL0v9qsn2YGDKT/Y0eRelWkrJ92KmZAUfofYQaMFUXqVJGyfpp7gMGKMgNO5/5HFKtXRcr6ae4BBipK/6vNWpQZMFKdqh/pYLl6e3vj6NGj0draOuB4a2tr7N+/f8hr9u/fP+T5H330UfT29sbkyZPHRM5P+8lPfhLvvfde3HjjjSOe75OqyfrHP/4xVqxYEVu3bo36+lP3z6KarHv27ImXXnopmpqa4plnnone3t74zne+E2+//XZNf79CNVnnz58fGzZsiMWLF8cHH3wQH330UXzzm9+Mn/3sZzXLWY3R6FREcfpfbdZPMwMG0v+xo0i9KlLWTzsVM6Ao/Y8wA8aKInWqSFk/zT3AYEWZAadz/yOK1asiZf009wADFaX/1WYtygwYqU6N+jsWj6urqxvwdUpp0LHPOn+o4yMtN+dxjz/+ePzgBz+IjRs3xjnnnFOreAOcbNajR4/GTTfdFKtXr46LL774lGT7tJz/r8eOHYu6urrYsGFDzJkzJ6677rp44IEH4le/+lXNX63Izbpr1664/fbb45577okdO3bE888/H6+//np0dHTUPGeu0erUcM89Fvs/3HObAZ+P/o8NRepVkbIed6pnQFH6H2EGjAVF6lSRsh7nHuDEijIDTtf+RxSrV0XKepx7gOEVpf+5WYs0A0aiU6P+jsVJkybF+PHjB216Dxw4MGhzety555475Pn19fUxceLEMZPzuI0bN8Ztt90WTzzxRFx11VU1yfdJuVkPHToUr7zySnR3d8d3v/vdiPi4tCmlqK+vj82bN8eVV145JrJGREyePDnOP//8aGlp6T82c+bMSCnFm2++GRdddNGYydrZ2RlXXHFF3H333RER8bWvfS3OOuusWLBgQdx///01e1Ut12h0KqI4/a8263FmwMjkjND/WilSr4qU9bhTOQOK0v9qskaYAbVQpE4VKetx7gFGLmvE6MyA07n/EcXqVZGyHuceYGSyRrgHqIWR6tSov2OxoaEh2traoqura8Dxrq6umD9//pDXzJs3b9D5mzdvjtmzZ8eECRPGTM6Ij1+duOWWW+Kxxx47ZT9Pn5u1ubk5fv/738fOnTv7Hx0dHfHlL385du7cGXPnzh0zWSMirrjiivjzn/8c7777bv+xV199NcaNGxdTpkwZU1nff//9GDduYM3Gjx8fEf//KwFjwWh0KqI4/a82a4QZMJI5I/S/VorUqyJljTj1M6Ao/a8ma4QZUAtF6lSRska4BxjprBGjMwNO5/5HFKtXRcoa4R5gJLNGuAeohRHrVNZHvdTI8Y/uXrduXdq1a1datmxZOuuss9L//u//ppRSWrFiRbr55pv7zz/+kdh33nln2rVrV1q3bt0p+Zj53JyPPfZYqq+vTw899FDat29f/+Odd96pac5qsn7aqfw0qNyshw4dSlOmTEl///d/n/7whz+kLVu2pIsuuih9+9vfHnNZH3300VRfX5/WrFmTXnvttfTSSy+l2bNnpzlz5tQ056FDh1J3d3fq7u5OEZEeeOCB1N3dnd54440hc45Wp1IqTv+ryWoGjHxO/T85ZsDYyDpaM6Ao/a8mqxnw2fR/bGR1D1CbrKM1A4rS/5TMgLGS1T3AyGd1D/DZRqv/Y2KxmFJKDz30UJo2bVpqaGhIs2bNSlu2bOn/b0uXLk0LFy4ccP4LL7yQLr/88tTQ0JAuvPDCtHbt2jGXc+HChSkiBj2WLl065rJ+2qkcKCnlZ929e3e66qqr0hlnnJGmTJmSli9fnt5///0xmfXBBx9MX/nKV9IZZ5yRJk+enP7hH/4hvfnmmzXN+Nvf/vaE//bGUqdSKk7/c7OaASdH/0eeGTA2so7mDChK/1MyA0aa/o+NrO4BTl5RZkAR+p+SGTBWsroHODlF6X81Wct0D1CX0hh6HyYAAAAAUAij/jsWAQAAAIDisVgEAAAAALJZLAIAAAAA2SwWAQAAAIBsFosAAAAAQDaLRQAAAAAgm8UiAAAAAJDNYhEAAAAAyGaxCAAAAABks1gEAAAAALJZLAIAAAAA2SwWAQAAAIBs/x+Yd4CmouZKAAAAAABJRU5ErkJggg==",
      "text/plain": [
       "<Figure size 1600x600 with 10 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "images_and_masks = list(dataset.take(5))\n",
    "\n",
    "fig, ax = plt.subplots(nrows = 2, ncols = 5, figsize=(16, 6))\n",
    "\n",
    "for i, (image, masks) in enumerate(images_and_masks):\n",
    "    ax[0, i].set_title('Image')\n",
    "    ax[0, i].set_axis_off()\n",
    "    ax[0, i].imshow(image)\n",
    "        \n",
    "    ax[1, i].set_title('Mask')\n",
    "    ax[1, i].set_axis_off()    \n",
    "    ax[1, i].imshow(image/1.5)\n",
    "   \n",
    "    for channel in range(CLASSES):\n",
    "        contours = measure.find_contours(np.array(masks[:,:,channel]))\n",
    "        for contour in contours:\n",
    "            ax[1, i].plot(contour[:, 1], contour[:, 0], linewidth=1, color=COLORS[channel])\n",
    "\n",
    "plt.show()\n",
    "plt.close()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "6bcc77cd",
   "metadata": {},
   "outputs": [],
   "source": [
    "train_dataset = dataset.take(2000).cache()\n",
    "test_dataset = dataset.skip(2000).take(100).cache()\n",
    " \n",
    "train_dataset = train_dataset.batch(8)\n",
    "test_dataset = test_dataset.batch(8)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b0d6f651",
   "metadata": {},
   "outputs": [],
   "source": [
    "def input_layer():\n",
    "    return tf.keras.layers.Input(shape=SAMPLE_SIZE + (3,))\n",
    "\n",
    "def downsample_block(filters, size, batch_norm=True):\n",
    "    initializer = tf.keras.initializers.GlorotNormal()\n",
    "\n",
    "    result = tf.keras.Sequential()\n",
    "    \n",
    "    result.add(\n",
    "      tf.keras.layers.Conv2D(filters, size, strides=2, padding='same',\n",
    "                             kernel_initializer=initializer, use_bias=False))\n",
    "\n",
    "    if batch_norm:\n",
    "        result.add(tf.keras.layers.BatchNormalization())\n",
    "    \n",
    "    result.add(tf.keras.layers.LeakyReLU())\n",
    "    return result\n",
    "\n",
    "def upsample_block(filters, size, dropout=False):\n",
    "    initializer = tf.keras.initializers.GlorotNormal()\n",
    "\n",
    "    result = tf.keras.Sequential()\n",
    "    \n",
    "    result.add(\n",
    "        tf.keras.layers.Conv2DTranspose(filters, size, strides=2, padding='same',\n",
    "                                        kernel_initializer=initializer, use_bias=False))\n",
    "\n",
    "    result.add(tf.keras.layers.BatchNormalization())\n",
    "    \n",
    "    if dropout:\n",
    "        result.add(tf.keras.layers.Dropout(0.25))\n",
    "    \n",
    "    result.add(tf.keras.layers.ReLU())\n",
    "    return result\n",
    "\n",
    "def output_layer(size):\n",
    "    initializer = tf.keras.initializers.GlorotNormal()\n",
    "    return tf.keras.layers.Conv2DTranspose(CLASSES, size, strides=2, padding='same',\n",
    "                                           kernel_initializer=initializer, activation='sigmoid')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "be559e13",
   "metadata": {},
   "outputs": [],
   "source": [
    "inp_layer = input_layer()\n",
    "\n",
    "downsample_stack = [\n",
    "    downsample_block(64, 4, batch_norm=False),\n",
    "    downsample_block(128, 4),\n",
    "    downsample_block(256, 4),\n",
    "    downsample_block(512, 4),\n",
    "    downsample_block(512, 4),\n",
    "    downsample_block(512, 4),\n",
    "    downsample_block(512, 4),\n",
    "]\n",
    "\n",
    "upsample_stack = [\n",
    "    upsample_block(512, 4, dropout=True),\n",
    "    upsample_block(512, 4, dropout=True),\n",
    "    upsample_block(512, 4, dropout=True),\n",
    "    upsample_block(256, 4),\n",
    "    upsample_block(128, 4),\n",
    "    upsample_block(64, 4)\n",
    "]\n",
    "\n",
    "out_layer = output_layer(4)\n",
    "\n",
    "# Реализуем skip connections\n",
    "x = inp_layer\n",
    "\n",
    "downsample_skips = []\n",
    "\n",
    "for block in downsample_stack:\n",
    "    x = block(x)\n",
    "    downsample_skips.append(x)\n",
    "    \n",
    "downsample_skips = reversed(downsample_skips[:-1])\n",
    "\n",
    "for up_block, down_block in zip(upsample_stack, downsample_skips):\n",
    "    x = up_block(x)\n",
    "    x = tf.keras.layers.Concatenate()([x, down_block])\n",
    "\n",
    "out_layer = out_layer(x)\n",
    "\n",
    "unet_like = tf.keras.Model(inputs=inp_layer, outputs=out_layer)\n",
    "\n",
    "tf.keras.utils.plot_model(unet_like, show_shapes=True, dpi=72)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "3f2699d0",
   "metadata": {},
   "outputs": [],
   "source": [
    "def dice_mc_metric(a, b):\n",
    "    a = tf.unstack(a, axis=3)\n",
    "    b = tf.unstack(b, axis=3)\n",
    "    \n",
    "    dice_summ = 0\n",
    "    \n",
    "    for i, (aa, bb) in enumerate(zip(a, b)):\n",
    "        numenator = 2 * tf.math.reduce_sum(aa * bb) + 1\n",
    "        denomerator = tf.math.reduce_sum(aa + bb) + 1\n",
    "        dice_summ += numenator / denomerator\n",
    "        \n",
    "    avg_dice = dice_summ / CLASSES\n",
    "    \n",
    "    return avg_dice\n",
    "\n",
    "def dice_mc_loss(a, b):\n",
    "    return 1 - dice_mc_metric(a, b)\n",
    "\n",
    "def dice_bce_mc_loss(a, b):\n",
    "    return 0.3 * dice_mc_loss(a, b) + tf.keras.losses.binary_crossentropy(a, b)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "6ae35832",
   "metadata": {},
   "outputs": [],
   "source": [
    "unet_like.compile(optimizer='adam', loss=[dice_bce_mc_loss], metrics=[dice_mc_metric])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "43a8bd6e",
   "metadata": {},
   "outputs": [],
   "source": [
    "history_dice = unet_like.fit(train_dataset, validation_data=test_dataset, epochs=25, initial_epoch=0)\n",
    "\n",
    "unet_like.save_weights('UNet/networks/unet')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "3796e369",
   "metadata": {},
   "outputs": [],
   "source": [
    "unet_like.load_weights('UNet/networks/unet')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "dd150ef5",
   "metadata": {},
   "outputs": [],
   "source": [
    "rgb_colors = [\n",
    "    (0,   0,   0),\n",
    "    (255, 0,   0),\n",
    "    (0,   255, 0),\n",
    "    (0,   0,   255),\n",
    "    (255, 165, 0),\n",
    "    (255, 192, 203),\n",
    "    (0,   255, 255),\n",
    "    (255, 0,   255)\n",
    "]\n",
    "\n",
    "frames = sorted(glob.glob('UNet/Data_test/*.jpg'))\n",
    "\n",
    "for filename in frames:\n",
    "    frame = imread(filename)\n",
    "    sample = resize(frame, SAMPLE_SIZE)\n",
    "    \n",
    "    predict = unet_like.predict(sample.reshape((1,) +  SAMPLE_SIZE + (3,)))\n",
    "    predict = predict.reshape(SAMPLE_SIZE + (CLASSES,))\n",
    "        \n",
    "    scale = frame.shape[0] / SAMPLE_SIZE[0], frame.shape[1] / SAMPLE_SIZE[1]\n",
    "    \n",
    "    frame = (frame / 1.5).astype(np.uint8)\n",
    "    \n",
    "    for channel in range(1, CLASSES): \n",
    "        contour_overlay = np.zeros((frame.shape[0], frame.shape[1]))\n",
    "        contours = measure.find_contours(np.array(predict[:,:,channel]))\n",
    "        \n",
    "        try:\n",
    "            for contour in contours:\n",
    "                rr, cc = polygon_perimeter(contour[:, 0] * scale[0],\n",
    "                                           contour[:, 1] * scale[1],\n",
    "                                           shape=contour_overlay.shape)\n",
    "                \n",
    "                contour_overlay[rr, cc] = 1        \n",
    "            \n",
    "            contour_overlay = dilation(contour_overlay, disk(1))\n",
    "            frame[contour_overlay == 1] = rgb_colors[channel]\n",
    "        except:\n",
    "            pass\n",
    "\n",
    "    imsave(f'UNet/Processed/{os.path.basename(filename)}', frame)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.11.7"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
