{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "c8a4fbd7",
   "metadata": {},
   "source": [
    "# Fine-tuning on Pre-trained Model for Cell-type Annotation\n",
    "In this tutorial, we demonstrate how to fine-tune a pre-trained model on a new dataset for the cell type annotation task. We use the Multiple Sclerosis dataset as an example and fine-tune on the pre-trained whole-body model. Please download the dataset folder from https://drive.google.com/drive/folders/1Qd42YNabzyr2pWt9xoY4cVMTAxsNBt4v?usp=sharing\n",
    "\n",
    "We summarize the fine-tuning pipeline in the following steps, which can be used as a general recipe for finetuning on cell-type annotation tasks and beyond: \n",
    "\n",
    "     1. Specify hyper-parameter setup for integration task\n",
    "     \n",
    "     2. Load and pre-process data\n",
    "     \n",
    "     3. Load the pre-trained scGPT model\n",
    "     \n",
    "     4. Finetune scGPT with task-specific objectives\n",
    "     \n",
    "     5. Evaluate fine-tuned scGPT"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "9406b4da",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/root/anaconda3/envs/scgpt_new/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
      "  from .autonotebook import tqdm as notebook_tqdm\n",
      "/root/anaconda3/envs/scgpt_new/lib/python3.10/site-packages/torch_npu/dynamo/__init__.py:18: UserWarning: Register eager implementation for the 'npu' backend of dynamo, as torch_npu was not compiled with torchair.\n",
      "  warnings.warn(\n",
      "/root/anaconda3/envs/scgpt_new/lib/python3.10/site-packages/torch_npu/contrib/transfer_to_npu.py:164: ImportWarning: \n",
      "    *************************************************************************************************************\n",
      "    The torch.Tensor.cuda and torch.nn.Module.cuda are replaced with torch.Tensor.npu and torch.nn.Module.npu now..\n",
      "    The torch.cuda.DoubleTensor is replaced with torch.npu.FloatTensor cause the double type is not supported now..\n",
      "    The backend in torch.distributed.init_process_group set to hccl now..\n",
      "    The torch.cuda.* and torch.cuda.amp.* are replaced with torch.npu.* and torch.npu.amp.* now..\n",
      "    The device parameters have been replaced with npu in the function below:\n",
      "    torch.logspace, torch.randint, torch.hann_window, torch.rand, torch.full_like, torch.ones_like, torch.rand_like, torch.randperm, torch.arange, torch.frombuffer, torch.normal, torch._empty_per_channel_affine_quantized, torch.empty_strided, torch.empty_like, torch.scalar_tensor, torch.tril_indices, torch.bartlett_window, torch.ones, torch.sparse_coo_tensor, torch.randn, torch.kaiser_window, torch.tensor, torch.triu_indices, torch.as_tensor, torch.zeros, torch.randint_like, torch.full, torch.eye, torch._sparse_csr_tensor_unsafe, torch.empty, torch._sparse_coo_tensor_unsafe, torch.blackman_window, torch.zeros_like, torch.range, torch.sparse_csr_tensor, torch.randn_like, torch.from_file, torch._cudnn_init_dropout_state, torch._empty_affine_quantized, torch.linspace, torch.hamming_window, torch.empty_quantized, torch._pin_memory, torch.Tensor.new_empty, torch.Tensor.new_empty_strided, torch.Tensor.new_full, torch.Tensor.new_ones, torch.Tensor.new_tensor, torch.Tensor.new_zeros, torch.Tensor.to, torch.nn.Module.to, torch.nn.Module.to_empty\n",
      "    *************************************************************************************************************\n",
      "    \n",
      "  warnings.warn(msg, ImportWarning)\n",
      "/root/anaconda3/envs/scgpt_new/lib/python3.10/site-packages/scanpy/_settings.py:488: DeprecationWarning: `set_matplotlib_formats` is deprecated since IPython 7.23, directly use `matplotlib_inline.backend_inline.set_matplotlib_formats()`\n",
      "  IPython.display.set_matplotlib_formats(*ipython_format)\n"
     ]
    }
   ],
   "source": [
    "# %%\n",
    "import copy\n",
    "import gc\n",
    "import json\n",
    "import os\n",
    "from pathlib import Path\n",
    "import shutil\n",
    "import sys\n",
    "import time\n",
    "import traceback\n",
    "from typing import List, Tuple, Dict, Union, Optional\n",
    "import warnings\n",
    "import pandas as pd\n",
    "# from . import asyn\n",
    "import pickle\n",
    "import torch\n",
    "from anndata import AnnData\n",
    "import scanpy as sc\n",
    "# import scvi\n",
    "import seaborn as sns\n",
    "import numpy as np\n",
    "import wandb\n",
    "from scipy.sparse import issparse\n",
    "import matplotlib.pyplot as plt\n",
    "from torch import nn\n",
    "\n",
    "from torch_npu.contrib import transfer_to_npu\n",
    "import torch_npu\n",
    "\n",
    "from torch.nn import functional as F\n",
    "from torch.utils.data import Dataset, DataLoader\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn.metrics import adjusted_rand_score, normalized_mutual_info_score\n",
    "from torchtext.vocab import Vocab\n",
    "from torchtext._torchtext import (\n",
    "    Vocab as VocabPybind,\n",
    ")\n",
    "from sklearn.metrics import confusion_matrix\n",
    "\n",
    "sys.path.insert(0, \"../\")\n",
    "import ricefm as rfm\n",
    "from ricefm.model.model_main import TransformerModel, AdversarialDiscriminator\n",
    "from ricefm.tokenizer import tokenize_and_pad_batch, random_mask_value\n",
    "from ricefm.loss import (\n",
    "    masked_mse_loss,\n",
    "    masked_relative_error,\n",
    "    criterion_neg_log_bernoulli,\n",
    ")\n",
    "from ricefm.tokenizer.gene_tokenizer import GeneVocab\n",
    "from ricefm.preprocess import Preprocessor\n",
    "from ricefm import SubsetsBatchSampler\n",
    "from ricefm.utils import set_seed, category_str2int, eval_scib_metrics\n",
    "\n",
    "sc.set_figure_params(figsize=(6, 6))\n",
    "os.environ[\"KMP_WARNINGS\"] = \"off\"\n",
    "warnings.filterwarnings('ignore')"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "5f3b5d67",
   "metadata": {},
   "source": [
    "## Step1: Specify hyper-parameter setup for cell-type annotation task\n",
    "Listed below are some hyper-parameter recommendations for the cell-type task. Note that the CLS objective is on to facilitate cell-type classification."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "d07b5257",
   "metadata": {},
   "outputs": [],
   "source": [
    "hyperparameter_defaults = dict(\n",
    "    seed=0,\n",
    "    dataset_name=\"SRP250946\",\n",
    "    do_train=True,\n",
    "    load_model=\"/root/riceFM_save/eval-Nov05-18-46-2025\",\n",
    "    mask_ratio=0.0,\n",
    "    epochs=10,\n",
    "    n_bins=51,\n",
    "    MVC=False, # Masked value prediction for cell embedding\n",
    "    ecs_thres=0.0, # Elastic cell similarity objective, 0.0 to 1.0, 0.0 to disable\n",
    "    dab_weight=0.0,\n",
    "    lr=1e-4,\n",
    "    batch_size=32,\n",
    "    layer_size=256,\n",
    "    nlayers=6,  # number of nn.TransformerEncoderLayer in nn.TransformerEncoder\n",
    "    nhead=4,  # number of heads in nn.MultiheadAttention\n",
    "    dropout=0.2,  # dropout probability\n",
    "    schedule_ratio=0.9,  # ratio of epochs for learning rate schedule\n",
    "    save_eval_interval=5,\n",
    "    pre_norm=False,\n",
    "    amp=True,  # Automatic Mixed Precision\n",
    "    include_zero_gene = False,\n",
    "    freeze = False, #freeze\n",
    "    DSBN = False,  # Domain-spec batchnorm\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "94c08ee0",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\u001b[34m\u001b[1mwandb\u001b[0m: \u001b[33mWARNING\u001b[0m `start_method` is deprecated and will be removed in a future version of wandb. This setting is currently non-functional and safely ignored.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "{'seed': 0, 'dataset_name': 'SRP250946', 'do_train': True, 'load_model': '/root/riceFM_save/eval-Nov05-18-46-2025', 'mask_ratio': 0.0, 'epochs': 10, 'n_bins': 51, 'MVC': False, 'ecs_thres': 0.0, 'dab_weight': 0.0, 'lr': 0.0001, 'batch_size': 32, 'layer_size': 256, 'nlayers': 6, 'nhead': 4, 'dropout': 0.2, 'schedule_ratio': 0.9, 'save_eval_interval': 5, 'pre_norm': False, 'amp': True, 'include_zero_gene': False, 'freeze': False, 'DSBN': False}\n"
     ]
    }
   ],
   "source": [
    "run = wandb.init(\n",
    "    config=hyperparameter_defaults,\n",
    "    project=\"ricefm\",\n",
    "    reinit=True,\n",
    "    settings=wandb.Settings(start_method=\"fork\"),\n",
    "    mode=\"disabled\"\n",
    ")\n",
    "config = wandb.config\n",
    "print(config)\n",
    "\n",
    "set_seed(config.seed)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "7d7890b7",
   "metadata": {},
   "outputs": [],
   "source": [
    "# settings for input and preprocessing\n",
    "pad_token = \"<pad>\"\n",
    "special_tokens = [pad_token, \"<cls>\", \"<eoc>\"]\n",
    "mask_ratio = config.mask_ratio\n",
    "mask_value = \"auto\"  # for masked values, now it should always be auto\n",
    "\n",
    "include_zero_gene = config.include_zero_gene  # if True, include zero genes among hvgs in the training\n",
    "max_seq_len = 3001\n",
    "n_bins = config.n_bins\n",
    "\n",
    "# input/output representation\n",
    "input_style = \"binned\"  # \"normed_raw\", \"log1p\", or \"binned\"\n",
    "output_style = \"binned\"  # \"normed_raw\", \"log1p\", or \"binned\"\n",
    "\n",
    "# settings for training\n",
    "MLM = False  # whether to use masked language modeling, currently it is always on.\n",
    "CLS = True  # celltype classification objective\n",
    "ADV = False  # Adversarial training for batch correction\n",
    "CCE = False  # Contrastive cell embedding objective\n",
    "MVC = config.MVC  # Masked value prediction for cell embedding\n",
    "ECS = config.ecs_thres > 0  # Elastic cell similarity objective\n",
    "DAB = False  # Domain adaptation by reverse backpropagation, set to 2 for separate optimizer\n",
    "INPUT_BATCH_LABELS = False  # TODO: have these help MLM and MVC, while not to classifier\n",
    "input_emb_style = \"continuous\"  # \"category\" or \"continuous\" or \"scaling\"\n",
    "cell_emb_style = \"cls\"  # \"avg-pool\" or \"w-pool\" or \"cls\"\n",
    "adv_E_delay_epochs = 0  # delay adversarial training on encoder for a few epochs\n",
    "adv_D_delay_epochs = 0\n",
    "mvc_decoder_style = \"inner product\"\n",
    "ecs_threshold = config.ecs_thres\n",
    "dab_weight = config.dab_weight\n",
    "\n",
    "explicit_zero_prob = MLM and include_zero_gene  # whether explicit bernoulli for zeros\n",
    "do_sample_in_train = False and explicit_zero_prob  # sample the bernoulli in training\n",
    "\n",
    "per_seq_batch_sample = False\n",
    "\n",
    "# settings for optimizer\n",
    "lr = config.lr  # TODO: test learning rate ratio between two tasks\n",
    "lr_ADV = 1e-3  # learning rate for discriminator, used when ADV is True\n",
    "batch_size = config.batch_size\n",
    "eval_batch_size = config.batch_size\n",
    "epochs = config.epochs\n",
    "schedule_interval = 1\n",
    "\n",
    "# settings for the model\n",
    "embsize = config.layer_size  # embedding dimension\n",
    "d_hid = config.layer_size  # dimension of the feedforward network in TransformerEncoder\n",
    "nlayers = config.nlayers  # number of TransformerEncoderLayer in TransformerEncoder\n",
    "nhead = config.nhead  # number of heads in nn.MultiheadAttention\n",
    "dropout = config.dropout  # dropout probability\n",
    "\n",
    "# logging\n",
    "log_interval = 100  # iterations\n",
    "save_eval_interval = config.save_eval_interval  # epochs\n",
    "do_eval_scib_metrics = True\n",
    "\n",
    "#sc--->0   st--->1\n",
    "mod_type = 0"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "17ff2309",
   "metadata": {},
   "outputs": [],
   "source": [
    "# %% validate settings\n",
    "assert input_style in [\"normed_raw\", \"log1p\", \"binned\"]\n",
    "assert output_style in [\"normed_raw\", \"log1p\", \"binned\"]\n",
    "assert input_emb_style in [\"category\", \"continuous\", \"scaling\"]\n",
    "if input_style == \"binned\":\n",
    "    if input_emb_style == \"scaling\":\n",
    "        raise ValueError(\"input_emb_style `scaling` is not supported for binned input.\")\n",
    "elif input_style == \"log1p\" or input_style == \"normed_raw\":\n",
    "    if input_emb_style == \"category\":\n",
    "        raise ValueError(\n",
    "            \"input_emb_style `category` is not supported for log1p or normed_raw input.\"\n",
    "        )\n",
    "\n",
    "if input_emb_style == \"category\":\n",
    "    mask_value = n_bins + 1\n",
    "    pad_value = n_bins  # for padding gene expr values\n",
    "    n_input_bins = n_bins + 2\n",
    "else:\n",
    "    mask_value = -1\n",
    "    pad_value = -2\n",
    "    n_input_bins = n_bins\n",
    "\n",
    "if ADV and DAB:\n",
    "    raise ValueError(\"ADV and DAB cannot be both True.\")\n",
    "DAB_separate_optim = True if DAB > 1 else False"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "cf7112d8",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "save to save/dev_SRP250946-Dec03-14-24\n"
     ]
    }
   ],
   "source": [
    "dataset_name = config.dataset_name\n",
    "save_dir = Path(f\"./save/dev_{dataset_name}-{time.strftime('%b%d-%H-%M')}/\")\n",
    "save_dir.mkdir(parents=True, exist_ok=True)\n",
    "print(f\"save to {save_dir}\")\n",
    "logger = rfm.logger\n",
    "rfm.utils.add_file_handler(logger, save_dir / \"run.log\")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "37fc7002",
   "metadata": {},
   "source": [
    "## Step 2: Load and pre-process data\n",
    "We follow the standard scGPT data pre-processing pipelines for the cell-type annotation task. Note that since now we have two datasets at hand (i.e., reference and query data), the same pre-prpocessing steps need to be applied to both of them."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "95b50200",
   "metadata": {},
   "outputs": [],
   "source": [
    "adata = sc.read(\"/root/celltype_annotation/SRP250946/SRX7814225.h5ad\")\n",
    "adata_test = sc.read(\"/root/celltype_annotation/SRP250946/SRX7814224.h5ad\")\n",
    "adata.obs[\"celltype\"] = adata.obs[\"Celltype\"].astype(\"category\")\n",
    "adata_test.obs[\"celltype\"] = adata_test.obs[\"Celltype\"].astype(\"category\")\n",
    "adata.obs[\"batch_id\"]  = adata.obs[\"str_batch\"] = \"0\"\n",
    "adata_test.obs[\"batch_id\"]  = adata_test.obs[\"str_batch\"] = \"1\"   \n",
    "adata.obs['mod_type'] = mod_type\n",
    "adata_test.obs['mod_type'] = mod_type\n",
    "\n",
    "\n",
    "# adata.var.set_index(adata.var[\"gene_name\"], inplace=True)\n",
    "# adata_test.var.set_index(adata.var[\"gene_name\"], inplace=True)\n",
    "data_is_raw = True\n",
    "filter_gene_by_counts = False\n",
    "adata_test_raw = adata_test.copy()\n",
    "adata = adata.concatenate(adata_test, batch_key=\"str_batch\")\n",
    "                \n",
    "# make the batch category column\n",
    "batch_id_labels = adata.obs[\"str_batch\"].astype(\"category\").cat.codes.values\n",
    "adata.obs[\"batch_id\"] = batch_id_labels\n",
    "celltype_id_labels = adata.obs[\"celltype\"].astype(\"category\").cat.codes.values\n",
    "celltypes = adata.obs[\"celltype\"].unique()\n",
    "num_types = len(np.unique(celltype_id_labels))\n",
    "id2type = dict(enumerate(adata.obs[\"celltype\"].astype(\"category\").cat.categories))\n",
    "adata.obs[\"celltype_id\"] = celltype_id_labels\n",
    "adata.var[\"gene_name\"] = adata.var.index.tolist()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "0dc5a6ed",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "riceFM - INFO - match 25872/25872 genes in vocabulary of size 40158.\n",
      "riceFM - INFO - Resume model from /root/riceFM_save/eval-Nov05-18-46-2025/best_model.pt, the model args will override the config /root/riceFM_save/eval-Nov05-18-46-2025/args.json.\n"
     ]
    }
   ],
   "source": [
    "if config.load_model is not None:\n",
    "    model_dir = Path(config.load_model)\n",
    "    model_config_file = model_dir / \"args.json\"\n",
    "    model_file = model_dir / \"best_model.pt\"\n",
    "    vocab_file = model_dir / \"vocab.json\"\n",
    "\n",
    "    vocab = GeneVocab.from_file(vocab_file)\n",
    "    shutil.copy(vocab_file, save_dir / \"vocab.json\")\n",
    "    shutil.copy(model_config_file, save_dir / \"args.json\")\n",
    "    for s in special_tokens:\n",
    "        if s not in vocab:\n",
    "            vocab.append_token(s)\n",
    "\n",
    "    adata.var[\"id_in_vocab\"] = [\n",
    "        1 if gene in vocab else -1 for gene in adata.var[\"gene_name\"]\n",
    "    ]\n",
    "    gene_ids_in_vocab = np.array(adata.var[\"id_in_vocab\"])\n",
    "    logger.info(\n",
    "        f\"match {np.sum(gene_ids_in_vocab >= 0)}/{len(gene_ids_in_vocab)} genes \"\n",
    "        f\"in vocabulary of size {len(vocab)}.\"\n",
    "    )\n",
    "    adata = adata[:, adata.var[\"id_in_vocab\"] >= 0]\n",
    "\n",
    "    # model\n",
    "    with open(model_config_file, \"r\") as f:\n",
    "        model_configs = json.load(f)\n",
    "    logger.info(\n",
    "        f\"Resume model from {model_file}, the model args will override the \"\n",
    "        f\"config {model_config_file}.\"\n",
    "    )\n",
    "    embsize = model_configs[\"embsize\"]\n",
    "    nhead = model_configs[\"nheads\"]\n",
    "    d_hid = model_configs[\"d_hid\"]\n",
    "    nlayers = model_configs[\"nlayers\"]\n",
    "    n_layers_cls = model_configs[\"n_layers_cls\"]\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "d08757ed",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "riceFM - INFO - Filtering cells by counts ...\n",
      "riceFM - INFO - Normalizing total counts ...\n",
      "riceFM - INFO - Log1p transforming ...\n",
      "riceFM - INFO - Binning data ...\n",
      "riceFM - INFO - Filtering cells by counts ...\n",
      "riceFM - INFO - Normalizing total counts ...\n",
      "riceFM - INFO - Log1p transforming ...\n",
      "riceFM - INFO - Binning data ...\n"
     ]
    }
   ],
   "source": [
    "# set up the preprocessor, use the args to config the workflow\n",
    "preprocessor = Preprocessor(\n",
    "    use_key=\"X\",  # the key in adata.layers to use as raw data\n",
    "    filter_gene_by_counts=filter_gene_by_counts,  # step 1\n",
    "    filter_cell_by_counts=False,  # step 2\n",
    "    normalize_total=1e4,  # 3. whether to normalize the raw data and to what sum\n",
    "    result_normed_key=\"X_normed\",  # the key in adata.layers to store the normalized data\n",
    "    log1p=data_is_raw,  # 4. whether to log1p the normalized data\n",
    "    result_log1p_key=\"X_log1p\",\n",
    "    subset_hvg=False,  # 5. whether to subset the raw data to highly variable genes\n",
    "    hvg_flavor=\"seurat_v3\" if data_is_raw else \"cell_ranger\",\n",
    "    binning=n_bins,  # 6. whether to bin the raw data and to what number of bins\n",
    "    result_binned_key=\"X_binned\",  # the key in adata.layers to store the binned data\n",
    ")\n",
    "\n",
    "\n",
    "adata_test = adata[adata.obs[\"str_batch\"] == \"1\"]\n",
    "adata = adata[adata.obs[\"str_batch\"] == \"0\"]\n",
    "\n",
    "preprocessor(adata, batch_key=None)\n",
    "preprocessor(adata_test, batch_key=None)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "cbc1b20f",
   "metadata": {},
   "outputs": [],
   "source": [
    "input_layer_key = {  # the values of this map coorespond to the keys in preprocessing\n",
    "    \"normed_raw\": \"X_normed\",\n",
    "    \"log1p\": \"X_normed\",\n",
    "    \"binned\": \"X_binned\",\n",
    "}[input_style]\n",
    "all_counts = (\n",
    "    adata.layers[input_layer_key].A\n",
    "    if issparse(adata.layers[input_layer_key])\n",
    "    else adata.layers[input_layer_key]\n",
    ")\n",
    "genes = adata.var[\"gene_name\"].tolist()\n",
    "\n",
    "celltypes_labels = adata.obs[\"celltype_id\"].tolist()  # make sure count from 0\n",
    "celltypes_labels = np.array(celltypes_labels)\n",
    "\n",
    "batch_ids = adata.obs[\"batch_id\"].tolist()\n",
    "num_batch_types = len(set(batch_ids))\n",
    "batch_ids = np.array(batch_ids)\n",
    "\n",
    "mod_types = adata.obs[\"mod_type\"].tolist()\n",
    "mod_types = np.array(mod_types)\n",
    "\n",
    "(\n",
    "    train_data,\n",
    "    valid_data,\n",
    "    train_celltype_labels,\n",
    "    valid_celltype_labels,\n",
    "    train_batch_labels,\n",
    "    valid_batch_labels,\n",
    "    train_mod_labels,\n",
    "    valid_mod_labels,\n",
    ") = train_test_split(\n",
    "    all_counts, celltypes_labels, batch_ids, mod_types, test_size=0.1, shuffle=True\n",
    ")\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "4cd701b1",
   "metadata": {},
   "outputs": [],
   "source": [
    "if config.load_model is None:\n",
    "    vocab = Vocab(\n",
    "        VocabPybind(genes + special_tokens, None)\n",
    "    )  # bidirectional lookup [gene <-> int]\n",
    "vocab.set_default_index(vocab[\"<pad>\"])\n",
    "gene_ids = np.array(vocab(genes), dtype=int)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "818bfcc0",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "riceFM - INFO - train set number of samples: 13742, \n",
      "\t feature length: 3001\n",
      "riceFM - INFO - valid set number of samples: 1527, \n",
      "\t feature length: 3001\n"
     ]
    }
   ],
   "source": [
    "tokenized_train = tokenize_and_pad_batch(\n",
    "    train_data,\n",
    "    gene_ids,\n",
    "    max_len=max_seq_len,\n",
    "    vocab=vocab,\n",
    "    pad_token=pad_token,\n",
    "    pad_value=pad_value,\n",
    "    append_cls=True,  # append <cls> token at the beginning\n",
    "    include_zero_gene=include_zero_gene,\n",
    ")\n",
    "tokenized_valid = tokenize_and_pad_batch(\n",
    "    valid_data,\n",
    "    gene_ids,\n",
    "    max_len=max_seq_len,\n",
    "    vocab=vocab,\n",
    "    pad_token=pad_token,\n",
    "    pad_value=pad_value,\n",
    "    append_cls=True,\n",
    "    include_zero_gene=include_zero_gene,\n",
    ")\n",
    "logger.info(\n",
    "    f\"train set number of samples: {tokenized_train['genes'].shape[0]}, \"\n",
    "    f\"\\n\\t feature length: {tokenized_train['genes'].shape[1]}\"\n",
    ")\n",
    "logger.info(\n",
    "    f\"valid set number of samples: {tokenized_valid['genes'].shape[0]}, \"\n",
    "    f\"\\n\\t feature length: {tokenized_valid['genes'].shape[1]}\"\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "37a80818",
   "metadata": {},
   "outputs": [],
   "source": [
    "def prepare_data(sort_seq_batch=False) -> Tuple[Dict[str, torch.Tensor]]:\n",
    "    masked_values_train = random_mask_value(\n",
    "        tokenized_train[\"values\"],\n",
    "        mask_ratio=mask_ratio,\n",
    "        mask_value=mask_value,\n",
    "        pad_value=pad_value,\n",
    "    )\n",
    "    masked_values_valid = random_mask_value(\n",
    "        tokenized_valid[\"values\"],\n",
    "        mask_ratio=mask_ratio,\n",
    "        mask_value=mask_value,\n",
    "        pad_value=pad_value,\n",
    "    )\n",
    "    print(\n",
    "        f\"random masking at epoch {epoch:3d}, ratio of masked values in train: \",\n",
    "        f\"{(masked_values_train == mask_value).sum() / (masked_values_train - pad_value).count_nonzero():.4f}\",\n",
    "    )\n",
    "\n",
    "    input_gene_ids_train, input_gene_ids_valid = (\n",
    "        tokenized_train[\"genes\"],\n",
    "        tokenized_valid[\"genes\"],\n",
    "    )\n",
    "    input_values_train, input_values_valid = masked_values_train, masked_values_valid\n",
    "    target_values_train, target_values_valid = (\n",
    "        tokenized_train[\"values\"],\n",
    "        tokenized_valid[\"values\"],\n",
    "    )\n",
    "\n",
    "    tensor_batch_labels_train = torch.from_numpy(train_batch_labels).long()\n",
    "    tensor_batch_labels_valid = torch.from_numpy(valid_batch_labels).long()\n",
    "\n",
    "    tensor_mod_labels_train = torch.from_numpy(train_mod_labels).long()\n",
    "    tensor_mod_labels_valid = torch.from_numpy(valid_mod_labels).long()\n",
    "\n",
    "    tensor_celltype_labels_train = torch.from_numpy(train_celltype_labels).long()\n",
    "    tensor_celltype_labels_valid = torch.from_numpy(valid_celltype_labels).long()\n",
    "\n",
    "    if sort_seq_batch:  # TODO: update to random pick seq source in each traning batch\n",
    "        train_sort_ids = np.argsort(train_batch_labels)\n",
    "        input_gene_ids_train = input_gene_ids_train[train_sort_ids]\n",
    "        input_values_train = input_values_train[train_sort_ids]\n",
    "        target_values_train = target_values_train[train_sort_ids]\n",
    "        tensor_batch_labels_train = tensor_batch_labels_train[train_sort_ids]\n",
    "        tensor_mod_labels_train = tensor_mod_labels_train[train_sort_ids]\n",
    "        tensor_celltype_labels_train = tensor_celltype_labels_train[train_sort_ids]\n",
    "\n",
    "        valid_sort_ids = np.argsort(valid_batch_labels)\n",
    "        input_gene_ids_valid = input_gene_ids_valid[valid_sort_ids]\n",
    "        input_values_valid = input_values_valid[valid_sort_ids]\n",
    "        target_values_valid = target_values_valid[valid_sort_ids]\n",
    "        tensor_batch_labels_valid = tensor_batch_labels_valid[valid_sort_ids]\n",
    "        tensor_mod_labels_valid = tensor_mod_labels_valid[valid_sort_ids]\n",
    "        tensor_celltype_labels_valid = tensor_celltype_labels_valid[valid_sort_ids]\n",
    "\n",
    "    train_data_pt = {\n",
    "        \"gene_ids\": input_gene_ids_train,\n",
    "        \"values\": input_values_train,\n",
    "        \"target_values\": target_values_train,\n",
    "        \"batch_labels\": tensor_batch_labels_train,\n",
    "        \"mod_labels\": tensor_mod_labels_train,\n",
    "        \"celltype_labels\": tensor_celltype_labels_train,\n",
    "    }\n",
    "    valid_data_pt = {\n",
    "        \"gene_ids\": input_gene_ids_valid,\n",
    "        \"values\": input_values_valid,\n",
    "        \"target_values\": target_values_valid,\n",
    "        \"batch_labels\": tensor_batch_labels_valid,\n",
    "        \"mod_labels\": tensor_mod_labels_valid,\n",
    "        \"celltype_labels\": tensor_celltype_labels_valid,\n",
    "    }\n",
    "\n",
    "    return train_data_pt, valid_data_pt\n",
    "\n",
    "\n",
    "# dataset\n",
    "class SeqDataset(Dataset):\n",
    "    def __init__(self, data: Dict[str, torch.Tensor]):\n",
    "        self.data = data\n",
    "\n",
    "    def __len__(self):\n",
    "        return self.data[\"gene_ids\"].shape[0]\n",
    "\n",
    "    def __getitem__(self, idx):\n",
    "        return {k: v[idx] for k, v in self.data.items()}\n",
    "\n",
    "\n",
    "# data_loader\n",
    "def prepare_dataloader(\n",
    "    data_pt: Dict[str, torch.Tensor],\n",
    "    batch_size: int,\n",
    "    shuffle: bool = False,\n",
    "    intra_domain_shuffle: bool = False,\n",
    "    drop_last: bool = False,\n",
    "    num_workers: int = 0,\n",
    ") -> DataLoader:\n",
    "    if num_workers == 0:\n",
    "        num_workers = min(len(os.sched_getaffinity(0)), batch_size // 2)\n",
    "\n",
    "    dataset = SeqDataset(data_pt)\n",
    "\n",
    "    if per_seq_batch_sample:\n",
    "        # find the indices of samples in each seq batch\n",
    "        subsets = []\n",
    "        batch_labels_array = data_pt[\"batch_labels\"].numpy()\n",
    "        for batch_label in np.unique(batch_labels_array):\n",
    "            batch_indices = np.where(batch_labels_array == batch_label)[0].tolist()\n",
    "            subsets.append(batch_indices)\n",
    "        data_loader = DataLoader(\n",
    "            dataset=dataset,\n",
    "            batch_sampler=SubsetsBatchSampler(\n",
    "                subsets,\n",
    "                batch_size,\n",
    "                intra_subset_shuffle=intra_domain_shuffle,\n",
    "                inter_subset_shuffle=shuffle,\n",
    "                drop_last=drop_last,\n",
    "            ),\n",
    "            num_workers=num_workers,\n",
    "            pin_memory=True,\n",
    "        )\n",
    "        return data_loader\n",
    "\n",
    "    data_loader = DataLoader(\n",
    "        dataset=dataset,\n",
    "        batch_size=batch_size,\n",
    "        shuffle=shuffle,\n",
    "        drop_last=drop_last,\n",
    "        num_workers=num_workers,\n",
    "        pin_memory=True,\n",
    "    )\n",
    "    return data_loader\n"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "77105fda",
   "metadata": {},
   "source": [
    "## Step 3: Load the pre-trained scGPT model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "219bb9db",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "riceFM - INFO - Loading params encoder.embedding.weight with shape torch.Size([40158, 256])\n",
      "riceFM - INFO - Loading params encoder.enc_norm.weight with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params encoder.enc_norm.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params value_encoder.linear1.weight with shape torch.Size([256, 1])\n",
      "riceFM - INFO - Loading params value_encoder.linear1.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params value_encoder.linear2.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params value_encoder.linear2.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params value_encoder.norm.weight with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params value_encoder.norm.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params mod_encoder.embedding.weight with shape torch.Size([2, 256])\n",
      "riceFM - INFO - Loading params mod_encoder.enc_norm.weight with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params mod_encoder.enc_norm.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.0.self_attn.out_proj.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.0.self_attn.out_proj.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.0.linear1.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.0.linear1.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.0.linear2.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.0.linear2.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.0.norm1.weight with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.0.norm1.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.0.norm2.weight with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.0.norm2.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.1.self_attn.out_proj.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.1.self_attn.out_proj.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.1.linear1.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.1.linear1.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.1.linear2.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.1.linear2.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.1.norm1.weight with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.1.norm1.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.1.norm2.weight with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.1.norm2.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.2.self_attn.out_proj.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.2.self_attn.out_proj.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.2.linear1.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.2.linear1.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.2.linear2.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.2.linear2.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.2.norm1.weight with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.2.norm1.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.2.norm2.weight with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.2.norm2.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.3.self_attn.out_proj.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.3.self_attn.out_proj.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.3.linear1.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.3.linear1.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.3.linear2.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.3.linear2.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.3.norm1.weight with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.3.norm1.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.3.norm2.weight with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.3.norm2.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.4.self_attn.out_proj.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.4.self_attn.out_proj.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.4.linear1.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.4.linear1.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.4.linear2.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.4.linear2.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.4.norm1.weight with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.4.norm1.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.4.norm2.weight with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.4.norm2.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.5.self_attn.out_proj.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.5.self_attn.out_proj.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.5.linear1.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.5.linear1.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.5.linear2.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.5.linear2.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.5.norm1.weight with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.5.norm1.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.5.norm2.weight with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params transformer_encoder.layers.5.norm2.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params decoder.fc.0.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params decoder.fc.0.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params decoder.fc.2.weight with shape torch.Size([256, 256])\n",
      "riceFM - INFO - Loading params decoder.fc.2.bias with shape torch.Size([256])\n",
      "riceFM - INFO - Loading params decoder.fc.4.weight with shape torch.Size([1, 256])\n",
      "riceFM - INFO - Loading params decoder.fc.4.bias with shape torch.Size([1])\n",
      "--------------------\n",
      "name: encoder.embedding.weight\n",
      "--------------------\n",
      "name: encoder.enc_norm.weight\n",
      "--------------------\n",
      "name: encoder.enc_norm.bias\n",
      "--------------------\n",
      "name: value_encoder.linear1.weight\n",
      "--------------------\n",
      "name: value_encoder.linear1.bias\n",
      "--------------------\n",
      "name: value_encoder.linear2.weight\n",
      "--------------------\n",
      "name: value_encoder.linear2.bias\n",
      "--------------------\n",
      "name: value_encoder.norm.weight\n",
      "--------------------\n",
      "name: value_encoder.norm.bias\n",
      "--------------------\n",
      "name: mod_encoder.embedding.weight\n",
      "--------------------\n",
      "name: mod_encoder.enc_norm.weight\n",
      "--------------------\n",
      "name: mod_encoder.enc_norm.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.0.self_attn.in_proj_weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.0.self_attn.in_proj_bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.0.self_attn.out_proj.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.0.self_attn.out_proj.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.0.linear1.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.0.linear1.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.0.linear2.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.0.linear2.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.0.norm1.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.0.norm1.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.0.norm2.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.0.norm2.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.1.self_attn.in_proj_weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.1.self_attn.in_proj_bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.1.self_attn.out_proj.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.1.self_attn.out_proj.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.1.linear1.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.1.linear1.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.1.linear2.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.1.linear2.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.1.norm1.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.1.norm1.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.1.norm2.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.1.norm2.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.2.self_attn.in_proj_weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.2.self_attn.in_proj_bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.2.self_attn.out_proj.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.2.self_attn.out_proj.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.2.linear1.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.2.linear1.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.2.linear2.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.2.linear2.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.2.norm1.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.2.norm1.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.2.norm2.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.2.norm2.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.3.self_attn.in_proj_weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.3.self_attn.in_proj_bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.3.self_attn.out_proj.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.3.self_attn.out_proj.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.3.linear1.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.3.linear1.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.3.linear2.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.3.linear2.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.3.norm1.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.3.norm1.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.3.norm2.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.3.norm2.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.4.self_attn.in_proj_weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.4.self_attn.in_proj_bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.4.self_attn.out_proj.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.4.self_attn.out_proj.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.4.linear1.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.4.linear1.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.4.linear2.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.4.linear2.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.4.norm1.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.4.norm1.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.4.norm2.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.4.norm2.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.5.self_attn.in_proj_weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.5.self_attn.in_proj_bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.5.self_attn.out_proj.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.5.self_attn.out_proj.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.5.linear1.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.5.linear1.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.5.linear2.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.5.linear2.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.5.norm1.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.5.norm1.bias\n",
      "--------------------\n",
      "name: transformer_encoder.layers.5.norm2.weight\n",
      "--------------------\n",
      "name: transformer_encoder.layers.5.norm2.bias\n",
      "--------------------\n",
      "name: decoder.fc.0.weight\n",
      "--------------------\n",
      "name: decoder.fc.0.bias\n",
      "--------------------\n",
      "name: decoder.fc.2.weight\n",
      "--------------------\n",
      "name: decoder.fc.2.bias\n",
      "--------------------\n",
      "name: decoder.fc.4.weight\n",
      "--------------------\n",
      "name: decoder.fc.4.bias\n",
      "--------------------\n",
      "name: cls_decoder._decoder.0.weight\n",
      "--------------------\n",
      "name: cls_decoder._decoder.0.bias\n",
      "--------------------\n",
      "name: cls_decoder._decoder.2.weight\n",
      "--------------------\n",
      "name: cls_decoder._decoder.2.bias\n",
      "--------------------\n",
      "name: cls_decoder._decoder.3.weight\n",
      "--------------------\n",
      "name: cls_decoder._decoder.3.bias\n",
      "--------------------\n",
      "name: cls_decoder._decoder.5.weight\n",
      "--------------------\n",
      "name: cls_decoder._decoder.5.bias\n",
      "--------------------\n",
      "name: cls_decoder.out_layer.weight\n",
      "--------------------\n",
      "name: cls_decoder.out_layer.bias\n",
      "riceFM - INFO - Total Pre freeze Params 12990218\n",
      "riceFM - INFO - Total Post freeze Params 12990218\n"
     ]
    }
   ],
   "source": [
    "# device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
    "device = torch.device(\"npu:0\" if torch.npu.is_available() else \"cpu\")\n",
    "torch.npu.set_device(device)\n",
    "\n",
    "ntokens = len(vocab)  # size of vocabulary\n",
    "model = TransformerModel(\n",
    "    ntokens,\n",
    "    embsize,\n",
    "    nhead,\n",
    "    d_hid,\n",
    "    nlayers,\n",
    "    nlayers_cls=3,\n",
    "    n_cls=num_types if CLS else 1,\n",
    "    vocab=vocab,\n",
    "    dropout=dropout,\n",
    "    pad_token=pad_token,\n",
    "    pad_value=pad_value,\n",
    "    do_mvc=MVC,\n",
    "    do_dab=DAB,\n",
    "    use_batch_labels=INPUT_BATCH_LABELS,\n",
    "    num_batch_labels=num_batch_types,\n",
    "    domain_spec_batchnorm=config.DSBN,\n",
    "    input_emb_style=input_emb_style,\n",
    "    n_input_bins=n_input_bins,\n",
    "    cell_emb_style=cell_emb_style,\n",
    "    mvc_decoder_style=mvc_decoder_style,\n",
    "    ecs_threshold=ecs_threshold,\n",
    "    explicit_zero_prob=explicit_zero_prob,\n",
    "    pre_norm=config.pre_norm,\n",
    ")\n",
    "if config.load_model is not None:\n",
    "    try:\n",
    "        model.load_state_dict(torch.load(model_file))\n",
    "        logger.info(f\"Loading all model params from {model_file}\")\n",
    "    except:\n",
    "        # only load params that are in the model and match the size\n",
    "        model_dict = model.state_dict()\n",
    "        pretrained_dict = torch.load(model_file)\n",
    "        pretrained_dict = {\n",
    "            k: v\n",
    "            for k, v in pretrained_dict.items()\n",
    "            if k in model_dict and v.shape == model_dict[k].shape\n",
    "        }\n",
    "        for k, v in pretrained_dict.items():\n",
    "            logger.info(f\"Loading params {k} with shape {v.shape}\")\n",
    "        model_dict.update(pretrained_dict)\n",
    "        model.load_state_dict(model_dict)\n",
    "\n",
    "pre_freeze_param_count = sum(dict((p.data_ptr(), p.numel()) for p in model.parameters() if p.requires_grad).values())\n",
    "\n",
    "# Freeze all pre-decoder weights\n",
    "for name, para in model.named_parameters():\n",
    "    print(\"-\"*20)\n",
    "    print(f\"name: {name}\")\n",
    "    if config.freeze and \"encoder\" in name and \"transformer_encoder\" not in name:\n",
    "    # if config.freeze and \"encoder\" in name:\n",
    "        print(f\"freezing weights for: {name}\")\n",
    "        para.requires_grad = False\n",
    "\n",
    "post_freeze_param_count = sum(dict((p.data_ptr(), p.numel()) for p in model.parameters() if p.requires_grad).values())\n",
    "\n",
    "logger.info(f\"Total Pre freeze Params {(pre_freeze_param_count )}\")\n",
    "logger.info(f\"Total Post freeze Params {(post_freeze_param_count )}\")\n",
    "wandb.log(\n",
    "        {\n",
    "            \"info/pre_freeze_param_count\": pre_freeze_param_count,\n",
    "            \"info/post_freeze_param_count\": post_freeze_param_count,\n",
    "        },\n",
    ")\n",
    "\n",
    "model.to(device)\n",
    "wandb.watch(model)\n",
    "\n",
    "if ADV:\n",
    "    discriminator = AdversarialDiscriminator(\n",
    "        d_model=embsize,\n",
    "        n_cls=num_batch_types,\n",
    "    ).to(device)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "1e4ea79e",
   "metadata": {},
   "outputs": [],
   "source": [
    "criterion = masked_mse_loss\n",
    "criterion_cls = nn.CrossEntropyLoss()\n",
    "criterion_dab = nn.CrossEntropyLoss()\n",
    "optimizer = torch.optim.Adam(\n",
    "    model.parameters(), lr=lr, eps=1e-4 if config.amp else 1e-8\n",
    ")\n",
    "scheduler = torch.optim.lr_scheduler.StepLR(\n",
    "    optimizer, schedule_interval, gamma=config.schedule_ratio\n",
    ")\n",
    "if DAB_separate_optim:\n",
    "    optimizer_dab = torch.optim.Adam(model.parameters(), lr=lr)\n",
    "    scheduler_dab = torch.optim.lr_scheduler.StepLR(\n",
    "        optimizer_dab, schedule_interval, gamma=config.schedule_ratio\n",
    "    )\n",
    "if ADV:\n",
    "    criterion_adv = nn.CrossEntropyLoss()  # consider using label smoothing\n",
    "    optimizer_E = torch.optim.Adam(model.parameters(), lr=lr_ADV)\n",
    "    scheduler_E = torch.optim.lr_scheduler.StepLR(\n",
    "        optimizer_E, schedule_interval, gamma=config.schedule_ratio\n",
    "    )\n",
    "    optimizer_D = torch.optim.Adam(discriminator.parameters(), lr=lr_ADV)\n",
    "    scheduler_D = torch.optim.lr_scheduler.StepLR(\n",
    "        optimizer_D, schedule_interval, gamma=config.schedule_ratio\n",
    "    )\n",
    "\n",
    "scaler = torch.cuda.amp.GradScaler(enabled=config.amp)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "b734269a",
   "metadata": {},
   "outputs": [],
   "source": [
    "def train(model: nn.Module, loader: DataLoader) -> None:\n",
    "    \"\"\"\n",
    "    Train the model for one epoch.\n",
    "    \"\"\"\n",
    "    model.train()\n",
    "    (\n",
    "        total_loss,\n",
    "        total_mse,\n",
    "        total_cls,\n",
    "        total_cce,\n",
    "        total_mvc,\n",
    "        total_ecs,\n",
    "        total_dab,\n",
    "        total_adv_E,\n",
    "        total_adv_D,\n",
    "        total_zero_log_prob,\n",
    "        total_mvc_zero_log_prob,\n",
    "    ) = (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)\n",
    "    total_error = 0.0\n",
    "    start_time = time.time()\n",
    "\n",
    "    num_batches = len(loader)\n",
    "    for batch, batch_data in enumerate(loader):\n",
    "        input_gene_ids = batch_data[\"gene_ids\"].to(device)\n",
    "        input_values = batch_data[\"values\"].to(device)\n",
    "        target_values = batch_data[\"target_values\"].to(device)\n",
    "        batch_labels = batch_data[\"batch_labels\"].to(device)\n",
    "        mod_labels = batch_data[\"mod_labels\"].to(device)\n",
    "        celltype_labels = batch_data[\"celltype_labels\"].to(device)\n",
    "\n",
    "        # batch_labels=batch_labels if INPUT_BATCH_LABELS or config.DSBN else None\n",
    "        # print(batch_labels)  #\"None\"\n",
    "        # print(config.DSBN)   #False\n",
    "\n",
    "        src_key_padding_mask = input_gene_ids.eq(vocab[pad_token])\n",
    "        with torch.cuda.amp.autocast(enabled=config.amp):\n",
    "            output_dict = model(\n",
    "                input_gene_ids,\n",
    "                input_values,\n",
    "                src_key_padding_mask=src_key_padding_mask,\n",
    "                mod_types=mod_labels,\n",
    "                batch_labels=batch_labels if INPUT_BATCH_LABELS or config.DSBN else None,\n",
    "                CLS=CLS,\n",
    "                CCE=CCE,\n",
    "                MVC=MVC,\n",
    "                ECS=ECS,\n",
    "                do_sample=do_sample_in_train,\n",
    "                #generative_training=False\n",
    "            )\n",
    "\n",
    "            masked_positions = input_values.eq(mask_value)  # the postions to predict\n",
    "            loss = 0.0\n",
    "            metrics_to_log = {}\n",
    "            if MLM:\n",
    "                loss_mse = criterion(\n",
    "                    output_dict[\"mlm_output\"], target_values, masked_positions\n",
    "                )\n",
    "                loss = loss + loss_mse\n",
    "                metrics_to_log = {\"train/mse\": loss_mse.item()}\n",
    "            if explicit_zero_prob:\n",
    "                loss_zero_log_prob = criterion_neg_log_bernoulli(\n",
    "                    output_dict[\"mlm_zero_probs\"], target_values, masked_positions\n",
    "                )\n",
    "                loss = loss + loss_zero_log_prob\n",
    "                metrics_to_log.update({\"train/nzlp\": loss_zero_log_prob.item()})\n",
    "            if CLS:\n",
    "                loss_cls = criterion_cls(output_dict[\"cls_output\"], celltype_labels)\n",
    "                loss = loss + loss_cls\n",
    "                metrics_to_log.update({\"train/cls\": loss_cls.item()})\n",
    "\n",
    "                error_rate = 1 - (\n",
    "                    (output_dict[\"cls_output\"].argmax(1) == celltype_labels)\n",
    "                    .sum()\n",
    "                    .item()\n",
    "                ) / celltype_labels.size(0)\n",
    "            if CCE:\n",
    "                loss_cce = 10 * output_dict[\"loss_cce\"]\n",
    "                loss = loss + loss_cce\n",
    "                metrics_to_log.update({\"train/cce\": loss_cce.item()})\n",
    "            if MVC:\n",
    "                loss_mvc = criterion(\n",
    "                    output_dict[\"mvc_output\"], target_values, masked_positions\n",
    "                )\n",
    "                loss = loss + loss_mvc\n",
    "                metrics_to_log.update({\"train/mvc\": loss_mvc.item()})\n",
    "            if MVC and explicit_zero_prob:\n",
    "                loss_mvc_zero_log_prob = criterion_neg_log_bernoulli(\n",
    "                    output_dict[\"mvc_zero_probs\"], target_values, masked_positions\n",
    "                )\n",
    "                loss = loss + loss_mvc_zero_log_prob\n",
    "                metrics_to_log.update({\"train/mvc_nzlp\": loss_mvc_zero_log_prob.item()})\n",
    "            if ECS:\n",
    "                loss_ecs = 10 * output_dict[\"loss_ecs\"]\n",
    "                loss = loss + loss_ecs\n",
    "                metrics_to_log.update({\"train/ecs\": loss_ecs.item()})\n",
    "            if DAB:\n",
    "                # try weighting and separate optimizer\n",
    "                loss_dab = criterion_dab(output_dict[\"dab_output\"], batch_labels)\n",
    "                loss = loss + dab_weight * loss_dab\n",
    "                metrics_to_log.update({\"train/dab\": loss_dab.item()})\n",
    "\n",
    "        model.zero_grad()\n",
    "        scaler.scale(loss).backward()\n",
    "        scaler.unscale_(optimizer)\n",
    "        with warnings.catch_warnings(record=True) as w:\n",
    "            warnings.filterwarnings(\"always\")\n",
    "            torch.nn.utils.clip_grad_norm_(\n",
    "                model.parameters(),\n",
    "                1.0,\n",
    "                error_if_nonfinite=False if scaler.is_enabled() else True,\n",
    "            )\n",
    "            if len(w) > 0:\n",
    "                logger.warning(\n",
    "                    f\"Found infinite gradient. This may be caused by the gradient \"\n",
    "                    f\"scaler. The current scale is {scaler.get_scale()}. This warning \"\n",
    "                    \"can be ignored if no longer occurs after autoscaling of the scaler.\"\n",
    "                )\n",
    "        scaler.step(optimizer)\n",
    "        scaler.update()\n",
    "\n",
    "        if ADV:\n",
    "            # rerun the model for adversarial training\n",
    "            output_dict = model(\n",
    "                input_gene_ids,\n",
    "                input_values,\n",
    "                src_key_padding_mask=src_key_padding_mask,\n",
    "                batch_labels=batch_labels if INPUT_BATCH_LABELS or config.DSBN else None,\n",
    "                CLS=CLS,\n",
    "                CCE=CCE,\n",
    "                MVC=MVC,\n",
    "                ECS=ECS,\n",
    "                do_sample=do_sample_in_train,\n",
    "                #generative_training=False\n",
    "            )\n",
    "\n",
    "            # TRAINING DISCRIMINATOR\n",
    "            loss_adv_D = criterion_adv(\n",
    "                discriminator(output_dict[\"cell_emb\"].detach()), batch_labels\n",
    "            )\n",
    "            if epoch > adv_D_delay_epochs:\n",
    "                discriminator.zero_grad()\n",
    "                loss_adv_D.backward()\n",
    "                optimizer_D.step()\n",
    "\n",
    "            # TRAINING ENCODER\n",
    "            loss_adv_E = -criterion_adv(\n",
    "                discriminator(output_dict[\"cell_emb\"]), batch_labels\n",
    "            )\n",
    "            # NOTE: the loss is negative here because we want to maximize\n",
    "            # the cross_entropy_loss, in other words, disguise against the discriminator\n",
    "            if epoch > adv_E_delay_epochs:\n",
    "                model.zero_grad()\n",
    "                discriminator.zero_grad()\n",
    "                loss_adv_E.backward()\n",
    "                optimizer_E.step()\n",
    "\n",
    "        wandb.log(metrics_to_log)\n",
    "\n",
    "        total_loss += loss.item()\n",
    "        total_mse += loss_mse.item() if MLM else 0.0\n",
    "        total_cls += loss_cls.item() if CLS else 0.0\n",
    "        total_cce += loss_cce.item() if CCE else 0.0\n",
    "        total_mvc += loss_mvc.item() if MVC else 0.0\n",
    "        total_ecs += loss_ecs.item() if ECS else 0.0\n",
    "        total_dab += loss_dab.item() if DAB else 0.0\n",
    "        total_adv_E += loss_adv_E.item() if ADV else 0.0\n",
    "        total_adv_D += loss_adv_D.item() if ADV else 0.0\n",
    "        total_zero_log_prob += loss_zero_log_prob.item() if explicit_zero_prob else 0.0\n",
    "        total_mvc_zero_log_prob += (\n",
    "            loss_mvc_zero_log_prob.item() if MVC and explicit_zero_prob else 0.0\n",
    "        )\n",
    "        total_error += error_rate\n",
    "        if batch % log_interval == 0 and batch > 0:\n",
    "            lr = scheduler.get_last_lr()[0]\n",
    "            ms_per_batch = (time.time() - start_time) * 1000 / log_interval\n",
    "            cur_loss = total_loss / log_interval\n",
    "            cur_mse = total_mse / log_interval\n",
    "            cur_cls = total_cls / log_interval if CLS else 0.0\n",
    "            cur_cce = total_cce / log_interval if CCE else 0.0\n",
    "            cur_mvc = total_mvc / log_interval if MVC else 0.0\n",
    "            cur_ecs = total_ecs / log_interval if ECS else 0.0\n",
    "            cur_dab = total_dab / log_interval if DAB else 0.0\n",
    "            cur_adv_E = total_adv_E / log_interval if ADV else 0.0\n",
    "            cur_adv_D = total_adv_D / log_interval if ADV else 0.0\n",
    "            cur_zero_log_prob = (\n",
    "                total_zero_log_prob / log_interval if explicit_zero_prob else 0.0\n",
    "            )\n",
    "            cur_mvc_zero_log_prob = (\n",
    "                total_mvc_zero_log_prob / log_interval\n",
    "                if MVC and explicit_zero_prob\n",
    "                else 0.0\n",
    "            )\n",
    "            cur_error = total_error / log_interval\n",
    "            # ppl = math.exp(cur_loss)\n",
    "            logger.info(\n",
    "                f\"| epoch {epoch:3d} | {batch:3d}/{num_batches:3d} batches | \"\n",
    "                f\"lr {lr:05.5f} | ms/batch {ms_per_batch:5.2f} | \"\n",
    "                f\"loss {cur_loss:5.2f} | \"\n",
    "                + (f\"mse {cur_mse:5.2f} | mre {cur_error:5.2f} |\" if MLM else \"\")\n",
    "                + (f\"cls {cur_cls:5.2f} | \" if CLS else \"\")\n",
    "                + (f\"err {cur_error:5.2f} | \" if CLS else \"\")\n",
    "                + (f\"cce {cur_cce:5.2f} |\" if CCE else \"\")\n",
    "                + (f\"mvc {cur_mvc:5.2f} |\" if MVC else \"\")\n",
    "                + (f\"ecs {cur_ecs:5.2f} |\" if ECS else \"\")\n",
    "                + (f\"dab {cur_dab:5.2f} |\" if DAB else \"\")\n",
    "                + (f\"adv_E {cur_adv_E:5.2f} |\" if ADV else \"\")\n",
    "                + (f\"adv_D {cur_adv_D:5.2f} |\" if ADV else \"\")\n",
    "                + (f\"nzlp {cur_zero_log_prob:5.2f} |\" if explicit_zero_prob else \"\")\n",
    "                + (\n",
    "                    f\"mvc_nzlp {cur_mvc_zero_log_prob:5.2f} |\"\n",
    "                    if MVC and explicit_zero_prob\n",
    "                    else \"\"\n",
    "                )\n",
    "            )\n",
    "            total_loss = 0\n",
    "            total_mse = 0\n",
    "            total_cls = 0\n",
    "            total_cce = 0\n",
    "            total_mvc = 0\n",
    "            total_ecs = 0\n",
    "            total_dab = 0\n",
    "            total_adv_E = 0\n",
    "            total_adv_D = 0\n",
    "            total_zero_log_prob = 0\n",
    "            total_mvc_zero_log_prob = 0\n",
    "            total_error = 0\n",
    "            start_time = time.time()\n",
    "\n",
    "\n",
    "def define_wandb_metrcis():\n",
    "    wandb.define_metric(\"valid/mse\", summary=\"min\", step_metric=\"epoch\")\n",
    "    wandb.define_metric(\"valid/mre\", summary=\"min\", step_metric=\"epoch\")\n",
    "    wandb.define_metric(\"valid/dab\", summary=\"min\", step_metric=\"epoch\")\n",
    "    wandb.define_metric(\"valid/sum_mse_dab\", summary=\"min\", step_metric=\"epoch\")\n",
    "    wandb.define_metric(\"test/avg_bio\", summary=\"max\")\n",
    "\n",
    "\n",
    "def evaluate(model: nn.Module, loader: DataLoader, return_raw: bool = False) -> float:\n",
    "    \"\"\"\n",
    "    Evaluate the model on the evaluation data.\n",
    "    \"\"\"\n",
    "    model.eval()\n",
    "    total_loss = 0.0\n",
    "    total_error = 0.0\n",
    "    total_dab = 0.0\n",
    "    total_num = 0\n",
    "    predictions = []\n",
    "    # with torch.no_grad():\n",
    "    for batch_data in loader:\n",
    "        input_gene_ids = batch_data[\"gene_ids\"].to(device)\n",
    "        input_values = batch_data[\"values\"].to(device)\n",
    "        target_values = batch_data[\"target_values\"].to(device)\n",
    "        batch_labels = batch_data[\"batch_labels\"].to(device)\n",
    "        mod_labels = batch_data[\"mod_labels\"].to(device)\n",
    "        celltype_labels = batch_data[\"celltype_labels\"].to(device)\n",
    "\n",
    "        src_key_padding_mask = input_gene_ids.eq(vocab[pad_token])\n",
    "        with torch.cuda.amp.autocast(enabled=config.amp):\n",
    "            output_dict = model(\n",
    "                input_gene_ids,\n",
    "                input_values,\n",
    "                src_key_padding_mask=src_key_padding_mask,\n",
    "                mod_types=mod_labels,\n",
    "                batch_labels=batch_labels if INPUT_BATCH_LABELS or config.DSBN else None,\n",
    "                CLS=CLS,  # evaluation does not need CLS or CCE\n",
    "                CCE=False,\n",
    "                MVC=False,\n",
    "                ECS=False,\n",
    "                do_sample=do_sample_in_train,\n",
    "                #generative_training = False,\n",
    "            )\n",
    "            output_values = output_dict[\"cls_output\"]\n",
    "            loss = criterion_cls(output_values, celltype_labels)\n",
    "\n",
    "            if DAB:\n",
    "                loss_dab = criterion_dab(output_dict[\"dab_output\"], batch_labels)\n",
    "\n",
    "        total_loss += loss.item() * len(input_gene_ids)\n",
    "        accuracy = (output_values.argmax(1) == celltype_labels).sum().item()\n",
    "        total_error += (1 - accuracy / len(input_gene_ids)) * len(input_gene_ids)\n",
    "        total_dab += loss_dab.item() * len(input_gene_ids) if DAB else 0.0\n",
    "        total_num += len(input_gene_ids)\n",
    "        preds = output_values.argmax(1).cpu().numpy()\n",
    "        predictions.append(preds)\n",
    "\n",
    "\n",
    "    if return_raw:\n",
    "        return np.concatenate(predictions, axis=0)\n",
    "\n",
    "    return total_loss / total_num, total_error / total_num\n"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "734a503c",
   "metadata": {},
   "source": [
    "## Step 4: Finetune scGPT with task-specific objectives"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "8e48b893",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "random masking at epoch   1, ratio of masked values in train:  0.0000\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[W AmpForeachNonFiniteCheckAndUnscaleKernelNpuOpApi.cpp:104] Warning: Non finite check and unscale on NPU device! (function operator())\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Gradient overflow. Skipping step\n",
      "Loss scaler reducing loss scale to 32768.0\n",
      "Gradient overflow. Skipping step\n",
      "Loss scaler reducing loss scale to 16384.0\n",
      "riceFM - INFO - | epoch   1 | 100/430 batches | lr 0.00010 | ms/batch 544.10 | loss  1.09 | cls  1.09 | err  0.37 | \n",
      "riceFM - INFO - | epoch   1 | 200/430 batches | lr 0.00010 | ms/batch 500.35 | loss  0.56 | cls  0.56 | err  0.18 | \n",
      "riceFM - INFO - | epoch   1 | 300/430 batches | lr 0.00010 | ms/batch 499.84 | loss  0.39 | cls  0.39 | err  0.14 | \n",
      "riceFM - INFO - | epoch   1 | 400/430 batches | lr 0.00010 | ms/batch 499.57 | loss  0.31 | cls  0.31 | err  0.09 | \n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[W VariableFallbackKernel.cpp:51] Warning: CAUTION: The operator 'aten::_nested_tensor_from_mask_left_aligned' is not currently supported on the NPU backend and will fall back to run on the CPU. This may have performance implications. (function npu_cpu_fallback)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - | end of epoch   1 | time: 234.32s | valid loss/mse 0.2917 | err 0.0950\n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - Best model with score 0.2917\n",
      "random masking at epoch   2, ratio of masked values in train:  0.0000\n",
      "riceFM - INFO - | epoch   2 | 100/430 batches | lr 0.00009 | ms/batch 525.57 | loss  0.26 | cls  0.26 | err  0.09 | \n",
      "riceFM - INFO - | epoch   2 | 200/430 batches | lr 0.00009 | ms/batch 499.06 | loss  0.24 | cls  0.24 | err  0.08 | \n",
      "riceFM - INFO - | epoch   2 | 300/430 batches | lr 0.00009 | ms/batch 498.97 | loss  0.24 | cls  0.24 | err  0.08 | \n",
      "riceFM - INFO - | epoch   2 | 400/430 batches | lr 0.00009 | ms/batch 501.83 | loss  0.21 | cls  0.21 | err  0.07 | \n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - | end of epoch   2 | time: 231.58s | valid loss/mse 0.2136 | err 0.0694\n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - Best model with score 0.2136\n",
      "random masking at epoch   3, ratio of masked values in train:  0.0000\n",
      "riceFM - INFO - | epoch   3 | 100/430 batches | lr 0.00008 | ms/batch 531.59 | loss  0.19 | cls  0.19 | err  0.06 | \n",
      "riceFM - INFO - | epoch   3 | 200/430 batches | lr 0.00008 | ms/batch 506.97 | loss  0.19 | cls  0.19 | err  0.06 | \n",
      "riceFM - INFO - | epoch   3 | 300/430 batches | lr 0.00008 | ms/batch 506.11 | loss  0.18 | cls  0.18 | err  0.06 | \n",
      "riceFM - INFO - | epoch   3 | 400/430 batches | lr 0.00008 | ms/batch 505.94 | loss  0.17 | cls  0.17 | err  0.06 | \n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - | end of epoch   3 | time: 234.75s | valid loss/mse 0.2325 | err 0.0753\n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "random masking at epoch   4, ratio of masked values in train:  0.0000\n",
      "riceFM - INFO - | epoch   4 | 100/430 batches | lr 0.00007 | ms/batch 535.88 | loss  0.15 | cls  0.15 | err  0.06 | \n",
      "riceFM - INFO - | epoch   4 | 200/430 batches | lr 0.00007 | ms/batch 506.78 | loss  0.16 | cls  0.16 | err  0.05 | \n",
      "riceFM - INFO - | epoch   4 | 300/430 batches | lr 0.00007 | ms/batch 506.70 | loss  0.15 | cls  0.15 | err  0.06 | \n",
      "riceFM - INFO - | epoch   4 | 400/430 batches | lr 0.00007 | ms/batch 506.59 | loss  0.15 | cls  0.15 | err  0.05 | \n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - | end of epoch   4 | time: 235.12s | valid loss/mse 0.2355 | err 0.0766\n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "random masking at epoch   5, ratio of masked values in train:  0.0000\n",
      "riceFM - INFO - | epoch   5 | 100/430 batches | lr 0.00007 | ms/batch 548.20 | loss  0.14 | cls  0.14 | err  0.05 | \n",
      "riceFM - INFO - | epoch   5 | 200/430 batches | lr 0.00007 | ms/batch 507.28 | loss  0.15 | cls  0.15 | err  0.05 | \n",
      "riceFM - INFO - | epoch   5 | 300/430 batches | lr 0.00007 | ms/batch 508.39 | loss  0.14 | cls  0.14 | err  0.05 | \n",
      "Loss scaler increasing loss scale to 32768.0\n",
      "riceFM - INFO - | epoch   5 | 400/430 batches | lr 0.00007 | ms/batch 509.14 | loss  0.14 | cls  0.14 | err  0.05 | \n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - | end of epoch   5 | time: 239.71s | valid loss/mse 0.1942 | err 0.0576\n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - Best model with score 0.1942\n",
      "random masking at epoch   6, ratio of masked values in train:  0.0000\n",
      "riceFM - INFO - | epoch   6 | 100/430 batches | lr 0.00006 | ms/batch 542.38 | loss  0.12 | cls  0.12 | err  0.05 | \n",
      "riceFM - INFO - | epoch   6 | 200/430 batches | lr 0.00006 | ms/batch 505.88 | loss  0.14 | cls  0.14 | err  0.05 | \n",
      "riceFM - INFO - | epoch   6 | 300/430 batches | lr 0.00006 | ms/batch 505.69 | loss  0.14 | cls  0.14 | err  0.05 | \n",
      "riceFM - INFO - | epoch   6 | 400/430 batches | lr 0.00006 | ms/batch 506.10 | loss  0.13 | cls  0.13 | err  0.05 | \n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - | end of epoch   6 | time: 237.34s | valid loss/mse 0.2006 | err 0.0576\n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "random masking at epoch   7, ratio of masked values in train:  0.0000\n",
      "Gradient overflow. Skipping step\n",
      "Loss scaler reducing loss scale to 16384.0\n",
      "riceFM - INFO - | epoch   7 | 100/430 batches | lr 0.00005 | ms/batch 537.73 | loss  0.11 | cls  0.11 | err  0.04 | \n",
      "riceFM - INFO - | epoch   7 | 200/430 batches | lr 0.00005 | ms/batch 506.26 | loss  0.14 | cls  0.14 | err  0.05 | \n",
      "riceFM - INFO - | epoch   7 | 300/430 batches | lr 0.00005 | ms/batch 505.52 | loss  0.12 | cls  0.12 | err  0.04 | \n",
      "riceFM - INFO - | epoch   7 | 400/430 batches | lr 0.00005 | ms/batch 506.00 | loss  0.12 | cls  0.12 | err  0.04 | \n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - | end of epoch   7 | time: 235.83s | valid loss/mse 0.1708 | err 0.0524\n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - Best model with score 0.1708\n",
      "random masking at epoch   8, ratio of masked values in train:  0.0000\n",
      "riceFM - INFO - | epoch   8 | 100/430 batches | lr 0.00005 | ms/batch 551.73 | loss  0.10 | cls  0.10 | err  0.04 | \n",
      "riceFM - INFO - | epoch   8 | 200/430 batches | lr 0.00005 | ms/batch 506.02 | loss  0.12 | cls  0.12 | err  0.04 | \n",
      "riceFM - INFO - | epoch   8 | 300/430 batches | lr 0.00005 | ms/batch 505.23 | loss  0.11 | cls  0.11 | err  0.04 | \n",
      "riceFM - INFO - | epoch   8 | 400/430 batches | lr 0.00005 | ms/batch 506.44 | loss  0.12 | cls  0.12 | err  0.04 | \n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - | end of epoch   8 | time: 236.60s | valid loss/mse 0.1677 | err 0.0517\n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - Best model with score 0.1677\n",
      "random masking at epoch   9, ratio of masked values in train:  0.0000\n",
      "riceFM - INFO - | epoch   9 | 100/430 batches | lr 0.00004 | ms/batch 531.57 | loss  0.10 | cls  0.10 | err  0.04 | \n",
      "riceFM - INFO - | epoch   9 | 200/430 batches | lr 0.00004 | ms/batch 505.51 | loss  0.12 | cls  0.12 | err  0.04 | \n",
      "riceFM - INFO - | epoch   9 | 300/430 batches | lr 0.00004 | ms/batch 506.47 | loss  0.11 | cls  0.11 | err  0.04 | \n",
      "riceFM - INFO - | epoch   9 | 400/430 batches | lr 0.00004 | ms/batch 505.67 | loss  0.11 | cls  0.11 | err  0.04 | \n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - | end of epoch   9 | time: 234.18s | valid loss/mse 0.1573 | err 0.0504\n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - Best model with score 0.1573\n",
      "random masking at epoch  10, ratio of masked values in train:  0.0000\n",
      "riceFM - INFO - | epoch  10 | 100/430 batches | lr 0.00004 | ms/batch 531.48 | loss  0.09 | cls  0.09 | err  0.03 | \n",
      "riceFM - INFO - | epoch  10 | 200/430 batches | lr 0.00004 | ms/batch 506.39 | loss  0.12 | cls  0.12 | err  0.04 | \n",
      "riceFM - INFO - | epoch  10 | 300/430 batches | lr 0.00004 | ms/batch 505.59 | loss  0.10 | cls  0.10 | err  0.04 | \n",
      "riceFM - INFO - | epoch  10 | 400/430 batches | lr 0.00004 | ms/batch 505.15 | loss  0.11 | cls  0.11 | err  0.04 | \n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - | end of epoch  10 | time: 234.02s | valid loss/mse 0.1533 | err 0.0498\n",
      "riceFM - INFO - -----------------------------------------------------------------------------------------\n",
      "riceFM - INFO - Best model with score 0.1533\n"
     ]
    }
   ],
   "source": [
    "best_val_loss = float(\"inf\")\n",
    "best_avg_bio = 0.0\n",
    "best_model = None\n",
    "define_wandb_metrcis()\n",
    "\n",
    "for epoch in range(1, epochs + 1):\n",
    "    epoch_start_time = time.time()\n",
    "    train_data_pt, valid_data_pt = prepare_data(sort_seq_batch=per_seq_batch_sample)\n",
    "    train_loader = prepare_dataloader(\n",
    "        train_data_pt,\n",
    "        batch_size=batch_size,\n",
    "        shuffle=False,\n",
    "        intra_domain_shuffle=True,\n",
    "        drop_last=False,\n",
    "    )\n",
    "    valid_loader = prepare_dataloader(\n",
    "        valid_data_pt,\n",
    "        batch_size=eval_batch_size,\n",
    "        shuffle=False,\n",
    "        intra_domain_shuffle=False,\n",
    "        drop_last=False,\n",
    "    )\n",
    "\n",
    "    if config.do_train:\n",
    "        train(\n",
    "            model,\n",
    "            loader=train_loader,\n",
    "        )\n",
    "    val_loss, val_err = evaluate(\n",
    "        model,\n",
    "        loader=valid_loader,\n",
    "    )\n",
    "    elapsed = time.time() - epoch_start_time\n",
    "    logger.info(\"-\" * 89)\n",
    "    logger.info(\n",
    "        f\"| end of epoch {epoch:3d} | time: {elapsed:5.2f}s | \"\n",
    "        f\"valid loss/mse {val_loss:5.4f} | err {val_err:5.4f}\"\n",
    "    )\n",
    "    logger.info(\"-\" * 89)\n",
    "\n",
    "    if val_loss < best_val_loss:\n",
    "        best_val_loss = val_loss\n",
    "        # best_model = copy.deepcopy(model)\n",
    "        best_model = model\n",
    "\n",
    "        best_model_epoch = epoch\n",
    "        logger.info(f\"Best model with score {best_val_loss:5.4f}\")\n",
    "\n",
    "    scheduler.step()\n",
    "    if DAB_separate_optim:\n",
    "        scheduler_dab.step()\n",
    "    if ADV:\n",
    "        scheduler_D.step()\n",
    "        scheduler_E.step()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "4a6ce176",
   "metadata": {},
   "outputs": [],
   "source": [
    "# %% inference\n",
    "def test(model: nn.Module, adata: DataLoader) -> float:\n",
    "\n",
    "    genes = adata.var[\"gene_name\"].tolist()\n",
    "    vocab.set_default_index(vocab[\"<pad>\"])\n",
    "    gene_ids = np.array(vocab(genes), dtype=int)\n",
    "    \n",
    "    all_counts = (\n",
    "        adata.layers[input_layer_key].A\n",
    "        if issparse(adata.layers[input_layer_key])\n",
    "        else adata.layers[input_layer_key]\n",
    "    )\n",
    "\n",
    "    celltypes_labels = adata.obs[\"celltype_id\"].tolist()  # make sure count from 0\n",
    "    celltypes_labels = np.array(celltypes_labels)\n",
    "\n",
    "    batch_ids = adata.obs[\"batch_id\"].tolist()\n",
    "    batch_ids = np.array(batch_ids)\n",
    "\n",
    "    mod_types = adata.obs['mod_type'].tolist()\n",
    "    mod_types = np.array(mod_types)\n",
    "\n",
    "\n",
    "    tokenized_test = tokenize_and_pad_batch(\n",
    "        all_counts,\n",
    "        gene_ids,\n",
    "        max_len=max_seq_len,\n",
    "        vocab=vocab,\n",
    "        pad_token=pad_token,\n",
    "        pad_value=pad_value,\n",
    "        append_cls=True,  # append <cls> token at the beginning\n",
    "        include_zero_gene=include_zero_gene,\n",
    "    )\n",
    "\n",
    "    input_values_test = random_mask_value(\n",
    "        tokenized_test[\"values\"],\n",
    "        mask_ratio=mask_ratio,\n",
    "        mask_value=mask_value,\n",
    "        pad_value=pad_value,\n",
    "    )\n",
    "    test_data_pt = {\n",
    "        \"gene_ids\": tokenized_test[\"genes\"],\n",
    "        \"values\": input_values_test,\n",
    "        \"target_values\": tokenized_test[\"values\"],\n",
    "        \"batch_labels\": torch.from_numpy(batch_ids).long(),\n",
    "        \"mod_labels\": torch.from_numpy(mod_types).long(),\n",
    "        \"celltype_labels\": torch.from_numpy(celltypes_labels).long(),\n",
    "    }\n",
    "    \n",
    "    test_loader = DataLoader(\n",
    "        dataset=SeqDataset(test_data_pt),\n",
    "        batch_size=eval_batch_size,\n",
    "        shuffle=False,\n",
    "        drop_last=False,\n",
    "        num_workers=min(len(os.sched_getaffinity(0)), eval_batch_size // 2),\n",
    "        pin_memory=True,\n",
    "    )\n",
    "\n",
    "    model.eval()\n",
    "    predictions = evaluate(\n",
    "        model,\n",
    "        loader=test_loader,\n",
    "        return_raw=True,\n",
    "    )\n",
    "\n",
    "    # compute accuracy, precision, recall, f1\n",
    "    from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score\n",
    "\n",
    "    accuracy = accuracy_score(celltypes_labels, predictions)\n",
    "    precision = precision_score(celltypes_labels, predictions, average=\"macro\")\n",
    "    recall = recall_score(celltypes_labels, predictions, average=\"macro\")\n",
    "    macro_f1 = f1_score(celltypes_labels, predictions, average=\"macro\")\n",
    "\n",
    "    logger.info(\n",
    "        f\"Accuracy: {accuracy:.3f}, Precision: {precision:.3f}, Recall: {recall:.3f}, \"\n",
    "        f\"Macro F1: {macro_f1:.3f}\"\n",
    "    )\n",
    "\n",
    "    results = {\n",
    "        \"test/accuracy\": accuracy,\n",
    "        \"test/precision\": precision,\n",
    "        \"test/recall\": recall,\n",
    "        \"test/macro_f1\": macro_f1,\n",
    "    }\n",
    "\n",
    "    return predictions, celltypes_labels, results"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "16236bf2",
   "metadata": {},
   "source": [
    "## Step 5: Inference with fine-tuned scGPT model\n",
    "In the cell-type annotation task, the fine-tuned scGPT predicts cell-type labels for query set as inference. The model performance is evaluated on standard classificaton metrics. Here we visualize the predicted labels over the scGPT cell embeddings, and present the confusion matrix for detailed classification performance on the cell-group level."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "79730e19",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "riceFM - INFO - Accuracy: 0.911, Precision: 0.904, Recall: 0.899, Macro F1: 0.897\n"
     ]
    }
   ],
   "source": [
    "#2m8.5s\n",
    "predictions, labels, results = test(best_model, adata_test)\n",
    "adata_test_raw.obs[\"predictions\"] = [id2type[p] for p in predictions]\n",
    "\n",
    "\n",
    "save_dict = {\n",
    "    \"predictions\": predictions,\n",
    "    \"labels\": labels,\n",
    "    \"results\": results,\n",
    "    \"id_maps\": id2type\n",
    "}\n",
    "with open(save_dir / \"results.pkl\", \"wb\") as f:\n",
    "    pickle.dump(save_dict, f)\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "2e419279",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'test/accuracy': 0.9106490233144298,\n",
       " 'test/precision': 0.9043977875526951,\n",
       " 'test/recall': 0.8994145177462004,\n",
       " 'test/macro_f1': 0.89748913473754}"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "results"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "id": "8403db3d",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAACdsAAAnbCAYAAAAKVp8WAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjUsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvWftoOwAAAAlwSFlzAAAYmwAAGJsBSXWDlAABAABJREFUeJzs3Xu01XWdP/7nPtwOIhcLmDICkYuYZqXDsUadyDQdsr5pmpkOIeHoOJGm0mUmDa1cFkPl5JQ/7UJJkAED1RoSKwklHWkmi9UFDplijSxBEc8R4SSwf3807u9xPPjRc2F/vp3Ho7XX+mzOa+/368xaZ/nPc57vSrVarQYAAAAAAAAAAADYp4Z6LwAAAAAAAAAAAABlJ2wHAAAAAAAAAAAABYTtAAAAAAAAAAAAoICwHQAAAAAAAAAAABQQtgMAAAAAAAAAAIACwnYAAAAAAAAAAABQQNgOAAAAAAAAAAAACgjbAQAAAAAAAAAAQAFhOwAAAAAAAAAAACggbAcAAAAAAAAAAAAFhO0AAAAAAAAAAACggLAdAAAAAAAAAAAAFBC2AwAAAAAAAAAAgALCdgAAAAAAAAAAAFBA2A4AAAAAAAAAAAAKCNsBAAAAAAAAAABAAWE7AAAAAAAAAAAAKCBsBwAAAAAAAAAAAAWE7QAAAAAAAAAAAKCAsB0AAAAAAAAAAAAUELYDAAAAAAAAAACAAsJ2AAAAAAAAAAAAUEDYjl6vWq1m48aNufXWW/PhD384J510Ul760pemUqmkUqnkkEMO6dHzly9fnrPOOitjx47NwIEDM3z48BxzzDGZM2dOHn744R49GwAAAAAAAAAAeGEq1Wq1Wu8loJ4uv/zyfPazn93nz8eMGZMHH3yw2899/PHHc84552TlypX7nBk6dGhuuummvOtd7+r28wEAAAAAAAAAgBeub70XgHrbs2fPs94fcMABmTBhQn7xi1/02Jm7du3KaaedlrvvvjtJMmLEiMycOTNHHnlkWlpasmzZstx+++154okn8p73vCcDBw7M2972th7bBwAAAAAAAAAAeH6a7ej1brrppvzmN7/J0UcfnaOPPjqTJk3K73//+4wdOzZJzzTbfeITn8hVV12VJJk4cWJWrVqVgw8++Fkz8+bNyxVXXJEkGTlyZDZu3JghQ4Z06x4AAAAAAAAAAMALI2wHHXjwwQd7LGzX0tKSgw8+ODt27EiS3HvvvWlqaupwdurUqfn+97+fJJkzZ04+/vGPd9seAAAAAAAAAADAC9dQ7wWgt/nOd75TC9odf/zx+wzaJcnll19ee164cGGP7wYAAAAAAAAAAHRM2A72sxUrVtSe3/rWtz7v7Bvf+MYMGjQoSdLc3JyNGzf26G4AAAAAAAAAAEDH+tZ7AUiSrVu3duv3jRgxolu/rzutW7eu9vx8rXZJ0rdv37zuda/LmjVrap+dMGFCj+4HAAAAAAAAAAA8l7AdpTBy5Mhu/b5qtdqt39ddqtXqs9rpxo4dW/iZsWPH1sJ269ev77HdAAAAAAAAAACAfXONLOxHTz75ZJ5++una++HDhxd+pv3M9u3be2ItAAAAAAAAAACgQGma7f7rv/6r3itAj2ttbX3W+4EDBxZ+pv1MS0tLt+8EAAAAAAAAAAAU02wHAAAAAAAAAAAABUrTbDf37FX1XoE62rJlS71X2C8GDx78rPc7d+58zr/9bzt37qw9DxkypEf2AgAAAAAAAAAAnl9pwnaPPNBaPMSfrREjRtR7hf3iwAMPTN++fbN79+4kyaOPPloYtnv00Udrz8OGDevJ9QAAAAAAAAAAgH1wjSzsR5VKJRMnTqy9f+CBBwo/035m0qRJPbIXAAAAAAAAAADw/ErTbFdpqNR7BdgvjjrqqPz6179OkqxduzYnnnjiPmd3796d++6771mfBQAAAAAAAAAA9j/NdrCfTZ06tfa8YsWK551dvXp1duzYkSSZMGFCJkyY0KO7AQAAAAAAAAAAHStPs51iO3qJt7/97Rk0aFB27NiRu+66K2vXrk1TU1OHs/Pmzas9v+c979lfKwIAAAAAAAAAAP+LZjvoRlOmTEmlUkmlUsmcOXM6nBk6dGhmz55dez9t2rQ8/PDDz5mbN29evv/97ydJhg8fng9+8IM9sjMAAAAAAAAAAFCsNM12qu2ol+3bt+ef//mfn/VvTzzxxLN+/rGPfew5n/vkJz/Z6TM/9KEP5bbbbst//Md/ZMOGDXnta1+bCy64IEceeWRaWlqybNmyrFy5MknSp0+f3HzzzRk6dGinzwMAAAAAAAAAALqmUq1Wq/VeIknePODqeq9AHf2o7eN1O/vBBx/M2LFjX/TnOvrTmTJlSlavXp0k+fjHP77Pdrsk2bZtW9797nfnBz/4wT5nhgwZkhtvvDHnnHPOi94PAAAAAAAAAADoPqVptlNsR2/zkpe8JLfffnuWLVuWb37zm/npT3+aRx55JIMGDcqYMWNy2mmn5cILL8wrXvGKeq8KAAAAAAAAAAC9Xmma7U4+4Jp6r0Ad/eCpq+q9AgAAAAAAAAAAwD411HsBAAAAAAAAAAAAKLvSXCPrHlkAAAAAAAAAAADKSrMdAAAAAAAAAAAAFChNs51iOwAAAAAAAAAAAMpKsx0AAAAAAAAAAAAUKFGznWo7AAAAAAAAAAAAykmzHQAAAAAAAAAAABQoTbNdFNsBAAAAAAAAAABQUqUJ21UapO0AAAAAAAAAAAAoJ9fIAgAAAAAAAAAAQIHyNNsptgMAAAAAAAAAAKCkNNsBAAAAAAAAAABAgdI026m2AwAAAAAAAAAAoKw02wEAAAAAAAAAAECB0jTbKbYDAAAAAAAAAACgrDTbAQAAAAAAAAAAQIHyNNs1qLYDAAAAAAAAAACgnEoTtnOPLAAAAAAAAAAAAGXlGlkAAAAAAAAAAAAoUJpmO8V2AAAAAAAAAAAAlJVmOwAAAAAAAAAAAChQomY71XYAAAAAAAAAAACUk2Y7AAAAAAAAAAAAKFCaZrsotgMAAAAAAAAAAKCkNNsBAAAAAAAAAABAgdI021UaVNsBAAAAAAAAAABQTqUJ27lGFgAAAAAAAAAAgLJyjSwAAAAAAAAAAAAUKE2zXaWi2g4AAAAAAAAAAIBy0mwHAAAAAAAAAAAABTTbAQAAAAAAAAAAQAHNdgAAAAAAAAAAAFCgNM12Yn8AAAAAAAAAAACUlYgbAAAAAAAAAAAAFChNs12lUqn3CgAAAAAAAAAAANChEoXt6r0BAAAAAAAAAAAAdMw1sgAAAAAAAAAAAFCgNM12qu0AAAAAAAAAAAAoK812AAAAAAAAAAAAUKA0zXaK7QAAAAAAAAAAACgrzXYAAAAAAAAAAABQoDzNdg2q7QAAAAAAAAAAACgnzXYAAAAAAAAAAABQoDTNdqlotgMAAAAAAAAAAKCcShO2k7UDAAAAAAAAAACgrFwjCwAAAAAAAAAAAAVK1Gyn2g4AAAAAAAAAAIBy0mwHAAAAAAAAAAAABUrTbCf2BwAAAAAAAAAAQFmJuAEAAAAAAAAAAECB0jTbVSqVeq8AAAAAAAAAAAAAHdJsBwAAAAAAAAAAAAU02wEAAAAAAAAAAECB8oTtdOwBAAAAAAAAAABQUiJuAAAAAAAAAAAAUKA0zXZxjSwAAAAAAAAAAAAlpdkOAAAAAAAAAAAACpSm2U6xHQAAAAAAAAAAAGWl2Q4AAAAAAAAAAAAKlKfZrkG1HQAAAAAAAAAAAOWk2Q4AAAAAAAAAAAAKlKbZLhXNdgAAAAAAAAAAAJRTacJ2snYAAAAAAAAAAACUlWtkAQAAAAAAAAAAoEB5mu0aVNsBAAAAAAAAAABQTprtAAAAAAAAAAAAoEBpmu1S0WwHAAAAAAAAAABAOWm2AwAAAAAAAAAAgAKlabZTbAcAAAAAAAAAAEBZabaD/7Fq1apMnz4948ePz6BBg3LQQQfl1a9+dWbPnp2NGzf2yJnNzc35yEc+kje84Q0ZPnx4+vXrl0GDBmXs2LH5P//n/+QrX/lKdu7c2SNnAwAAAAAAAAAAL1ylWq1W671Ekrz3uP+v3itQR1//yYV1O7utrS0zZ87MggUL9jnT2NiY6667Lpdcckm3nFmtVvOxj30sn/nMZ7J79+7nnR09enQWLlyY4447rlvOBgAAAAAAAAAAXrzSXCMb18hSB9VqNeeee26WLl2aJDnwwAMzY8aMTJ48OW1tbVm5cmWWLFmSXbt25dJLL02/fv1y8cUXd/ncj370o/n0pz9de3/iiSfmlFNOyStf+cq0tLTkN7/5Tb7+9a9n+/bteeihh3LyySfnpz/9aY444ogunw0AAAAAAAAAALx45Wm2O16zXW/29TX1aba75ZZbMm3atCTJiBEjsnr16hx++OHPmlm8eHHOPvvsVKvVDBgwIOvXr88hhxzS6TN///vfZ+zYsdmzZ0/69OmT5cuX57TTTnvOXEtLS972trflzjvvTJK8853vzJIlSzp9LgAAAAAAAAAA0HkN9V7gGZVKxasXv+qhWq3myiuvrL2/4YYbnhO0S5KzzjorF110UZI/XTl79dVXd+nc22+/PXv27EmSnH766R0G7ZJkyJAhueGGG2rvf/zjH3fpXAAAAAAAAAAAoPNKE7aD/W3NmjXZtGlTkmTMmDE588wz9zl7+eWX156XLl2atra2Tp/7yCOP1J4nTpz4vLPtf/7kk092+kwAAAAAAAAAAKBr+tZ7gWdUGurTbkbvtWLFitrzqaeemoaGfWdPx40bl4kTJ6a5uTmtra258847c/LJJ3fq3L/4i7+oPW/YsOF5Z9v//IgjjujUeQAAAAAAAAAAQNeVJmxH77Z169Zu/b4RI0YUzqxbt6723NTUVDjf1NSU5ubm2mc7G7abOnVq+vfvnz/+8Y9ZtmxZvve97+Vtb3vbc+ZaWloya9as2vsrrriiU+cBAAAAAAAAAABdV5qw3Utf1ljvFaijkSNHduv3VavVwpn2rXFjx44tnG8/s379+s4tluTlL3955s2blw984APZu3dv3v72t+fNb35zTjnllIwaNSqtra35zW9+k/nz52f79u3p169f/vmf/znnnHNOp88EAAAAAAAAAAC6pjRhu3M+9Kp6r0Adzfv2/j/z8ccfrz0PHz68cL79zPbt27t09vvf//6MGTMmV1xxRZqbm/OjH/0oP/rRj541U6lU8v73vz+zZs3KxIkTu3QeAAAAAAAAAADQNQ31XgDqpbW1tfY8cODAwvn2My0tLV0+/61vfWu+8IUv5Mgjj+zw59VqNV/96lfzqU99Ko888kiXzwMAAAAAAAAAADpP2A7q4A9/+EPe8IY35JRTTsnvf//7zJ07Nxs3bkxbW1taWlpy11135ZxzzslTTz2Vb3zjG2lqasovf/nLeq8NAAAAAAAAAAC9VmmukV00b329V6COtmzZst/PHDx4cLZt25Yk2blzZ+F8+5khQ4Z0+twtW7bk2GOPzcMPP5xhw4blnnvuyaRJk2o/79+/f44//vgcf/zxec1rXpOPfOQjeeihh3LGGWfkV7/6Vfr169fpswEAAAAAAAAAgM4pTdhu+5Zd9V6BOhoxYsR+P3PYsGG1sN2jjz5aON9+ZtiwYZ0+91Of+lQefvjhJMkVV1zxrKDd/zZ79ux89atfTXNzczZu3Jjvfve7eec739npswEAAAAAAAAAgM5xjSy9VvuQ2wMPPFA4337m+QJyRb7zne/Unt/ylrc872xDQ0NOOumk2vt77rmn0+cCAAAAAAAAAACdV56wXaXi1ZtfdXDUUUfVnteuXVs4336m/WdfrGda7ZIX1pA3dOjQ2nNra2unzwUAAAAAAAAAADqvPGE72M+mTp1ae77tttuyd+/efc7ef//9aW5uTpIMHjw4J5xwQqfPHTx4cO35oYceKpzftGlT7Xn48OGdPhcAAAAAAAAAAOi80oTtKpWKVy9+1cNxxx2X0aNHJ/lToG3JkiX7nJ03b17t+YwzzkhjY2Onz23fivfNb37zeWe3bduWf//3f6+9P/bYYzt9LgAAAAAAAAAA0HmlCdvB/tbQ0JBrrrmm9n7WrFlZv379c+aWLFmSG2+8MUkyYMCAXHXVVfv8zilTptQChHPmzOlw5rzzzqs9z58/PzfddFOHcy0tLTn77LPzxBNPJElGjRqVk08+ufD3AgAAAAAAAAAAul/fei/wjIrYH3Uwbdq0LF++PMuXL8+WLVvS1NSUGTNmZPLkyWlra8vKlSuzePHiVKvVJMncuXNz6KGHdunM6dOn55Zbbsnq1atTrVZz4YUX5pZbbsk73vGOjB49Om1tbfnFL36RW265JY888kiSpE+fPvnSl76UgQMHdvl3BgAAAAAAAAAAXrzShO1Sp6tE6d0qlUoWLVqUGTNmZNGiRWltbc3111//nLkBAwbk2muvzaxZs7p8Zp8+ffK9730vF1xwQW699dYkyZo1a7JmzZoO54cPH54vf/nLOe2007p8NgAAAAAAAAAA0DnlCdtBnTQ2NmbhwoWZOXNm5s+fn5/85CfZvHlz+vfvn1GjRuWUU07JhRdemIkTJ3bbmYMHD863vvWtXH755VmwYEHuvvvu/O53v0tLS0v69euXl770pXnNa16Tv/mbv8l5552XoUOHdtvZAAAAAAAAAADAi1epPnM/Zp1dOHV+vVegjv6/FdPrvQIAAAAAAAAAAMA+NdR7AQAAAAAAAAAAACi70lwjW2mo1HsFAAAAAAAAAAAA6JBmOwAAAAAAAAAAAChQmma7VDTbAQAAAAAAAAAAUE6a7QAAAAAAAAAAAKBAaZrtFNsBAAAAAAAAAABQVuUJ2zVI2wEAAAAAAAAAAFBOrpEFAAAAAAAAAACAAqVptnOPLAAAAAAAAAAAAGWl2Q4AAAAAAAAAAAAKlKbZTrEdAAAAAAAAAAAAZaXZDgAAAAAAAAAAAAqUp9muQbUdAAAAAAAAAAAA5aTZDgAAAAAAAAAAAAqUp9muotkOAAAAAAAAAACAcipN2C6ydgAAAAAAAAAAAJSUa2QBAAAAAAAAAACgQGma7SoNqu0AAAAAAAAAAAAoJ812AAAAAAAAAAAAUKA8zXYVzXYAAAAAAAAAAACUk2Y7AAAAAAAAAAAAKFCaZrs0aLYDAAAAAAAAAACgnDTbAQAAAAAAAAAAQIHSNNtVFNsBAAAAAAAAAABQUiUK20nbAQAAAAAAAAAAUE6ukQUAAAAAAAAAAIACpWm2S4NmOwAAAAAAAAAAAMpJsx0AAAAAAAAAAAAUKE2zXUWxHQAAAAAAAAAAACWl2Q4AAAAAAAAAAAAKlKfZrkG1HQAAAAAAAAAAAOWk2Q4AAAAAAAAAAAAKlKbZLhXNdgAAAAAAAAAAAJRTacJ2FWE7AAAAAAAAAAAASso1sgAAAAAAAAAAAFCgPM12Yn8AAAAAAAAAAACUlIgbAAAAAAAAAAAAFChPs12lUu8VAAAAAAAAAAAAoEOa7QAAAAAAAAAAAKBAaZrtotkOAAAAAAAAAACAktJsBwAAAAAAAAAAAAVK02xXEfsDAAAAAAAAAACgpMoTtnONLAAAAAAAAAAAACWlTw4AAAAAAAAAAAAKlKbZLg2a7QAAAAAAAAAAACgnzXYAAAAAAAAAAABQoDTNdpWKZjsAAAAAAAAAAADKSbMdAAAAAAAAAAAAFChRs129NwAAAAAAAAAAAICOabYDAAAAAAAAAACAAqVptkuDajsAAAAAAAAAAADKqTRhu4p7ZAEAAAAAAAAAACgp18gCAAAAAAAAAABAgRI129V7AwAAAAAAAAAAAOiYZjsAAAAAAAAAAAAoUJpmuzSotgMAAAAAAAAAAKCcNNsBAAAAAAAAAABAgdI021Uqmu0AAAAAAAAAAAAoJ812AAAAAAAAAAAAUKA8zXYNmu0AAAAAAAAAAAAop9KE7SJrBwAAAAAAAAAAQEm5RhYAAAAAAAAAAAAKlKbZrlJRbQcAAAAAAAAAAEA5abYDAAAAAAAAAACAAuVptmvQbAcAAAAAAAAAAEA5abYDAAAAAAAAAACAAuVptqtotgMAAAAAAAAAAKCcNNsBAAAAAAAAAABAgdI020WxHQAAAAAAAAAAACVVmrCda2Spt1WrVuXrX/961qxZk82bN6d///4ZNWpUTj311Pzd3/1dJkyY0GNn33vvvbn11ltzxx135L//+7/T0tKSl770pXnZy16WpqamTJkyJWeccUb69+/fYzsAAAAAAAAAAAD7VqlWq9V6L5Ek13z8h/VegTq66uqT6nZ2W1tbZs6cmQULFuxzprGxMdddd10uueSSbj1769atef/7359vf/vbhbMPPPBADjnkkG49HwAAAAAAAAAAeGFK1GxX7w3ojarVas4999wsXbo0SXLggQdmxowZmTx5ctra2rJy5cosWbIku3btyqWXXpp+/frl4osv7paz//CHP+Skk07Khg0bkiRjx47NO97xjhxxxBEZOnRoWltb89vf/jZ33HFH1q5d2y1nAgAAAAAAAAAAnVOaZrtPzNFs15tdOac+zXa33HJLpk2bliQZMWJEVq9encMPP/xZM4sXL87ZZ5+darWaAQMGZP369V1umHv66afz+te/Pj/72c9SqVTyqU99KrNnz07fvh3nX7ds2ZKhQ4dmwIABXToXAAAAAAAAAADonIZ6L/CMSsWrN7/qoVqt5sorr6y9v+GGG54TtEuSs846KxdddFGSP105e/XVV3f57E9/+tP52c9+liT55Cc/mY9+9KP7DNolyciRIwXtAAAAAAAAAACgjkrTbPfJqzXb9WYf+/j+b7a766678td//ddJkjFjxuR3v/tdGho6zp/ef//9GT9+fJJk8ODB2bp1a6fDb7t27cqoUaPy2GOPZcyYMbn//vvTp0+fzv0SAAAAAAAAAADAfrHvKq39rFKvejN6rRUrVtSeTz311H0G7ZJk3LhxmThxYpqbm9Pa2po777wzJ598cqfOXbZsWR577LEkybnnnitoBwAAAAAAAAAA/w8oTdiO3m3r1q3d+n0jRowonFm3bl3tuampqXC+qakpzc3Ntc92Nmy3evXq2vPrX//6VKvVLFiwIN/4xjeybt26bN++PS996Uvz2te+Nqeffnre+973pn///p06CwAAAAAAAAAA6B6lCdsdOHjfrWL8+Rs5cmS3ft8LuR15w4YNteexY8cWzrefWb9+fecWS7J27dra85AhQ3LiiSfmxz/+8bNmNm/enM2bN+f73/9+5s6dm+XLl+dVr3pVp88EAAAAAAAAAAC6pjRhu+P/eki9V6CXefzxx2vPw4cPL5xvP7N9+/ZOn7t58+ba84UXXpgNGzZk0KBBOf/889PU1JSGhobcd999+cpXvpLt27dn48aNmTJlSv7rv/4rr3zlKzt9LgAAAAAAAAAA0HmlCdvB/tba2lp7HjhwYOF8+5mWlpZOn9s+5Ldhw4aMHj06d9xxR8aNG1f793PPPTeXXXZZ3vzmN2f9+vXZunVr/uEf/iHf/e53O30uAAAAAAAAAADQee5uhf3sf19x+5WvfOVZQbtnHHzwwfnmN79Ze/+9730vv/3tb3t8PwAAAAAAAAAA4LlK02x395rW4iH+bG3ZsmW/nzl48OBs27YtSbJz587C+fYzQ4Z0/trjwYMH57HHHkuSjB8/PieddNI+Z48++ugce+yxuffee5MkP/zhDzN+/PhOnw0AAAAAAAAAAHROacJ2T+2oFg/xZ2vEiBH7/cxhw4bVwnaPPvpo4Xz7mWHDhnX63IMOOqgWtvvLv/zLwvm//Mu/rIXtNNsBAAAAAAAAAEB9uEaWXmvSpEm15wceeKBwvv1M+8925dyhQ4cWzrcP9rW0tHT6XAAAAAAAAAAAoPNKE7arVLx686sejjrqqNrz2rVrC+fbz7T/7Iv12te+tvb8xBNPFM5v37699vxCwnkAAAAAAAAAAED3K03YDva3qVOn1p5vu+227N27d5+z999/f5qbm5MkgwcPzgknnNDpc0877bTa83/+538WzrefOeywwzp9LgAAAAAAAAAA0HmlCdtV/K9X/68ejjvuuIwePTpJsmnTpixZsmSfs/Pmzas9n3HGGWlsbOz0uccee2wmTJiQJPntb3+bH/7wh/uc/dnPfpZ77703SdKnT5+ccsopnT4XAAAAAAAAAADovPKE7UpwlalX/V710NDQkGuuuab2ftasWVm/fv1z5pYsWZIbb7wxSTJgwIBcddVV+/zOKVOmpFKppFKpZM6cOfucu+6662rP73vf+/K73/3uOTObN2/OueeeW3t/7rnn5pWvfOXz/k4AAAAAAAAAAEDP6FvvBaCepk2bluXLl2f58uXZsmVLmpqaMmPGjEyePDltbW1ZuXJlFi9enGq1miSZO3duDj300C6fe8YZZ2T69OmZP39+HnrooRx11FGZMWNGmpqa0tDQkPvuuy9f/vKXs3379iTJuHHj8vnPf77L5wIAAAAAAAAAAJ1TmrBdvdrN6N0qlUoWLVqUGTNmZNGiRWltbc3111//nLkBAwbk2muvzaxZs7rt7C9/+cs58MAD86//+q/ZsWNHvvCFL3Q491d/9VdZvHhxDjrooG47GwAAAAAAAAAAeHFKc40s1EtjY2MWLlyYH/3oR/nbv/3bHHrooRk4cGCGDh2aI444IpdddlnWrVuXyy67rFvP7dOnT77whS/k3nvvzd///d/nsMMOy+DBg9PY2JjRo0fnrLPOyr/9279lzZo1Ofjgg7v1bAAAAAAAAAAA4MWpVJ+5H7POPvuZ1fVegTq67ENvrPcKAAAAAAAAAAAA+6TZDgAAAAAAAAAAAAr0rfcCz6hU6r0BAAAAAAAAAAAAdEyzHQAAAAAAAAAAABQoTbOdajsAAAAAAAAAAADKqjRhO1k7AAAAAAAAAAAAyso1sgAAAAAAAAAAAFCgRM12qu0AAAAAAAAAAAAoJ812AAAAAAAAAAAAUKBEzXb13gAAAAAAAAAAAAA6ptkOAAAAAAAAAAAACpSo2U61HQAAAAAAAAAAAOWk2Q4AAAAAAAAAAAAKlKjZrt4bAAAAAAAAAAAAQMfKE7ar9wIAAAAAAAAAAACwD66RBQAAAAAAAAAAgALlabZzjywAAAAAAAAAAAAlpdkOAAAAAAAAAAAACpSo2a7eGwAAAAAAAAAAAEDHNNsBAAAAAAAAAABAgRI126m2AwAAAAAAAAAAoJw02wEAAAAAAAAAAECBEjXb1XsDAAAAAAAAAAAA6FiJwnbSdgAAAAAAAAAAAJSTa2QBAAAAAAAAAACgQIma7eq9AQAAAAAAAAAAAHRMsx0AAAAAAAAAAAAU0GwHAAAAAAAAAAAABTTbAQAAAAAAAAAAQIESNduptgMAAAAAAAAAAKCcNNsBAAAAAAAAAABAgRI129V7AwAAAAAAAAAAAOhYicJ20nYAAAAAAAAAAACUk2tkAQAAAAAAAAAAoEBpmu2i2A4AAAAAAAAAAICS0mwHAAAAAAAAAAAABUrTbFepqLYDAAAAAAAAAACgnDTbAQAAAAAAAAAAQAHNdgAAAAAAAAAAAFBAsx0AAAAAAAAAAAAUKFGzXb03AAAAAAAAAAAAgI6VKGwnbQcAAAAAAAAAAEA5uUYWAAAAAAAAAAAACpSo2a7eGwAAAAAAAAAAAEDHNNsBAAAAAAAAAABAgRI126m2AwAAAAAAAAAAoJw02wEAAAAAAAAAAEABzXYAAAAAAAAAAABQQLMdAAAAAAAAAAAAFChRs129NwAAAAAAAAAAAICOlShsJ20HAAAAAAAAAABAOblGFgAAAAAAAAAAAAqUp9muQbMdAAAAAAAAAAAA5aTZDgAAAAAAAAAAAAqUp9lOsR0AAAAAAAAAAAAlpdkOAAAAAAAAAAAACpSo2U61HQAAAAAAAAAAAOWk2Q4AAAAAAAAAAAAKlKjZrt4bAAAAAAAAAAAAQMdKFLaTtgMAAAAAAAAAAKCcXCMLAAAAAAAAAAAABTTbAQAAAAAAAAAAQAHNdgAAAAAAAAAAAFCgRM129d4AAAAAAAAAAAAAOqbZDgAAAAAAAAAAAAqUptlOtR0AAAAAAAAAAABlpdkOAAAAAAAAAAAACpSm2a6i2Q4AAAAAAAAAAICSKlHYrt4bAAAAAAAAAAAAQMdcIwsAAAAAAAAAAAAFytNs16DaDgAAAAAAAAAAgHLSbAf/Y9WqVZk+fXrGjx+fQYMG5aCDDsqrX/3qzJ49Oxs3btxve/zjP/5jKpVK7TV9+vT9djYAAAAAAAAAANCx8jTbKbajTtra2jJz5swsWLDgWf/+1FNPZfv27fnlL3+ZG264Idddd10uueSSHt3lpz/9aT7zmc/06BkAAAAAAAAAAMCLV5qwHdRDtVrNueeem6VLlyZJDjzwwMyYMSOTJ09OW1tbVq5cmSVLlmTXrl259NJL069fv1x88cU9sktbW1vOP//87NmzJ4MGDcqOHTt65BwAAAAAAAAAAODFK03YrqLajjpYsGBBLWg3YsSIrF69Oocffnjt5+973/uyePHinH322alWq7nssssyderUHHLIId2+y5w5c/KrX/0qQ4YMyezZs3PllVd2+xkAAAAAAAAAAEDnNNR7AaiXarX6rEDbDTfc8Kyg3TPOOuusXHTRRUn+1D539dVXd/sua9euzdy5c5Mkc+fOzahRo7r9DAAAAAAAAAAAoPNKE7arVCpevfhVD2vWrMmmTZuSJGPGjMmZZ565z9nLL7+89rx06dK0tbV12x5tbW2ZPn169uzZkze96U254IILuu27AQAAAAAAAACA7lGia2TrvQH1tHXr1m79vhEjRhTOrFixovZ86qmnpqFh39nTcePGZeLEiWlubk5ra2vuvPPOnHzyyd2y61VXXZXf/OY3OeCAA3LzzTfXLXwIAAAAAAAAAADsW2nCdn367K33CtTRyJEju/X7qtVq4cy6detqz01NTYXzTU1NaW5urn22O8J29957b+bNm5ck+cQnPpFx48Z1+TsBAAAAAAAAAIDuV5qw3fDhO+u9Ar3Mhg0bas9jx44tnG8/s379+i6fv2vXrtr1sccee2wuvfTSLn8nAAAAAAAAAADQM/Z9byb8mXv88cdrz8OHDy+cbz+zffv2Lp9/5ZVXZv369enfv3++8pWvPO81tgAAAAAAAAAAQH1J99Brtba21p4HDhxYON9+pqWlpUtn33PPPfnsZz+bJPnYxz6WI444okvfBwAAAAAAAAAA9KzSXCO7bdsB9V6BOtqyZUu9V9hvdu3alfPPPz979+7NUUcdlY985CP1XgkAAAAAAAAAAChQmrDd3r196r0CdTRixIj9fubgwYOzbdu2JMnOnTsL59vPDBkypNPn/tM//VM2bNiQPn365Ktf/Wr69evX6e8CAAAAAAAAAAD2D9fI0msNGzas9vzoo48Wzrefaf/ZF+Puu+/O5z//+STJ5ZdfnmOOOaZT3wMAAAAAAAAAAOxfpWm2q1Qq9V6BXmbSpEn53e9+lyR54IEH8qY3vel55x944IFnfbYzbr755uzduzd9+vRJv3798slPfrLDufvuu6/2vG7dutpcY2Njrrjiik6dDQAAAAAAAAAAdJ6wHb3WUUcdlRUrViRJ1q5dmxkzZjzv/Nq1a5/12c6oVqtJkj179uRTn/rUC/rMfffdVwvfDR06VNgOAAAAAAAAAADqwDWy9FpTp06tPd92223Zu3fvPmfvv//+NDc3J0kGDx6cE044ocf3AwAAAAAAAAAAyqM0YbtKxas3v+rhuOOOy+jRo5MkmzZtypIlS/Y5O2/evNrzGWeckcbGxk6dOX/+/FSr1cLX1772tdpn3vve99b+ffv27Z06FwAAAAAAAAAA6JrShO1gf2toaMg111xTez9r1qysX7/+OXNLlizJjTfemCQZMGBArrrqqn1+55QpU1KpVFKpVDJnzpxu3xkAAAAAAAAAAKiPvvVe4BmVhjrVm9GrTZs2LcuXL8/y5cuzZcuWNDU1ZcaMGZk8eXLa2tqycuXKLF68ONVqNUkyd+7cHHrooXXeGgAAAAAAAAAA2N9KE7aDeqhUKlm0aFFmzJiRRYsWpbW1Nddff/1z5gYMGJBrr702s2bNqsOWAAAAAAAAAABAvZUmbFdRbEedNDY2ZuHChZk5c2bmz5+fn/zkJ9m8eXP69++fUaNG5ZRTTsmFF16YiRMn1ntVAAAAAAAAAACgTirVZ+7HrLM7Vt1f7xWooxPfNK7eKwAAAAAAAAAAAOxTeZrtotoOAAAAAAAAAACAcipN2E7WDgAAAAAAAAAAgLJqqPcCAAAAAAAAAAAAUHalabarVFTbAQAAAAAAAAAAUE6a7QAAAAAAAAAAAKBAiZrt6r0BAAAAAAAAAAAAdEyzHQAAAAAAAAAAABQoUbOdajsAAAAAAAAAAADKSbMdAAAAAAAAAAAAFChRs129NwAAAAAAAAAAAICOlShsJ20HAAAAAAAAAABAOblGFgAAAAAAAAAAAAqUqNmu3hsAAAAAAAAAAABAxzTbAQAAAAAAAAAAQIESNduptgMAAAAAAAAAAKCcNNsBAAAAAAAAAABAgRI129V7AwAAAAAAAAAAAOiYZjsAAAAAAAAAAAAooNkOAAAAAAAAAAAACpQnbBdpOwAAAAAAAAAAAMrJNbIAAAAAAAAAAABQoDzNdortAAAAAAAAAAAAKCnNdgAAAAAAAAAAAFCgRM12qu0AAAAAAAAAAAAoJ812AAAAAAAAAAAAUKBEzXb13gAAAAAAAAAAAAA6ptkOAAAAAAAAAAAACpSo2U61HQAAAAAAAAAAAOVUorBdvTcAAAAAAAAAAACAjrlGFgAAAAAAAAAAAAqUqNlOtR0AAAAAAAAAAADlpNkOAAAAAAAAAAAACpSm2S6K7QAAAAAAAAAAACgpzXYAAAAAAAAAAABQoDTNdpWKajsAAAAAAAAAAADKSbMdAAAAAAAAAAAAFChRs129NwAAAAAAAAAAAICOlShsJ20HAAAAAAAAAABAOblGFgAAAAAAAAAAAAqUp9mu3gsAAAAAAAAAAADAPmi2AwAAAAAAAAAAgALlabar6LYDAAAAAAAAAACgnDTbAQAAAAAAAAAAQIESNdvVewMAAAAAAAAAAADomGY7AAAAAAAAAAAAKFCiZjvVdgAAAAAAAAAAAJRTicJ29d4AAAAAAAAAAAAAOuYaWQAAAAAAAAAAACig2Q4AAAAAAAAAAAAKaLYDAAAAAAAAAACAAiVqtlNtBwAAAAAAAAAAQDlptgMAAAAAAAAAAIACJWq2q/cGAAAAAAAAAAAA0DHNdgAAAAAAAAAAAFCgRM12qu0AAAAAAAAAAAAoJ812AAAAAAAAAAAAUEDYDgAAAAAAAAAAAAq4RhYAAAAAAAAAAAAKaLYDAAAAAAAAAACAAiVqtqv3BgAAAAAAAAAAANAxzXYAAAAAAAAAAABQQNgOAAAAAAAAAAAACgjbAQAAAAAAAAAAQIG+9V7gGZVKvTcAAAAAAAAAAACAjpUnbBdpOwAAAAAAAAAAAMrJNbIAAAAAAAAAAABQoDTNdortAAAAAAAAAAAAKCvNdgAAAAAAAAAAAFCgNM12Fc12AAAAAAAAAAAAlJRmO/gfq1atyvTp0zN+/PgMGjQoBx10UF796ldn9uzZ2bhxY7eetW3btixatCgXXXRRXv/612f48OHp169fhgwZksMOOyznnntuvvvd72bPnj3dei4AAAAAAAAAANA5lWq1Wq33Ekmy6cHH670CdTTmkIPqdnZbW1tmzpyZBQsW7HOmsbEx1113XS655JIun3fZZZflC1/4Qnbv3l04e/TRR2fBggU5/PDDu3wuAAAAAAAAAADQeaW5RhbqoVqt5txzz83SpUuTJAceeGBmzJiRyZMnp62tLStXrsySJUuya9euXHrppenXr18uvvjiLp3561//uha0Gzt2bE488cQcffTRGT58eHbs2JF77rknCxcuzI4dO/Kzn/0sb3zjG3P33Xdn/PjxXf59AQAAAAAAAACAzilPs90mzXa92Zgx9Wm2u+WWWzJt2rQkyYgRI7J69erntMgtXrw4Z599dqrVagYMGJD169fnkEMO6fSZU6dOzQEHHJBLL700xx9/fIczDzzwQE455ZTa9bUnnXRSfvCDH3T6TAAAAAAAAAAAoGtKE7Z7SNiuVxtdh7BdtVrN2LFjs2nTpiTJrbfemne9610dzl588cX50pe+lCSZPn16vva1r3X63G3btuUlL3lJ4dx9992Xo48+uvb+wQcfzJgxYzp9LgAAAAAAAAAA0HkN9V4A6mXNmjW1oN2YMWNy5pln7nP28ssvrz0vXbo0bW1tnT73hQTtkuR1r3tdDjvssNr7devWdfpMAAAAAAAAAACga/rWe4FnVCqVeq9AHW3durVbv2/EiBGFMytWrKg9n3rqqWlo2Hf2dNy4cZk4cWKam5vT2tqaO++8MyeffHK37Pp8hgwZUnt+6qmnevw8AAAAAAAAAACgY6UJ2+3e/cd6r0AdjRz5F936fS/kduT2TXFNTU2F801NTWlubq59tqfDdn/84x9r5yXJIYcc0qPnAQAAAAAAAAAA+1aasN22x39f7xXoZTZs2FB7Hjt2bOF8+5n169f3yE7tLVy4ME888USS5GUve1kmT57c42cCAAAAAAAAAAAd2/e9mfBn7vHHH689Dx8+vHC+/cz27dt7YqWaRx55JLNnz669/6d/+qfnveYWAAAAAAAAAADoWdI79Fqtra2154EDBxbOt59paWnpkZ2SpK2tLaeffnoeffTRJMnxxx+fiy66qMfOAwAAAAAAAAAAipXmGtmXvuSV9V6BOtqyZUu9VyiFPXv25Lzzzss999yTJHn5y1+eb33rW+nbtzR/qgAAAAAAAAAA0CuVJsHTr9+Aeq9AHY0YMXS/nzl48OBs27YtSbJz587C+fYzQ4YM6fZ99u7dm+nTp2fJkiVJkpe97GW544478opXvKLbzwIAAAAAAAAAAF4c18jSaw0bNqz2/MyVrc+n/Uz7z3aHvXv35vzzz8+CBQuS/N+g3aRJk7r1HAAAAAAAAAAAoHOE7ei12gfZHnjggcL59jPdGYLbs2dP3vve9+Yb3/hGkj9dHbtq1aocfvjh3XYGAAAAAAAAAADQNaUJ21UqXr35VQ9HHXVU7Xnt2rWF8+1n2n+2K/bs2ZO//du/rTXaHXzwwfnxj3+s0Q4AAAAAAAAAAEqmNGE72N+mTp1ae77tttuyd+/efc7ef//9aW5uTpIMHjw4J5xwQpfP3717d97znvdk0aJFSZJRo0Zl9erVmThxYpe/GwAAAAAAAAAA6F59673AMyr1qjej1zruuOMyevToPPTQQ9m0aVOWLFmSd73rXR3Ozps3r/Z8xhlnpLGxsUtn7969O+ecc06WLFmSJBk9enRWrVqVQw89tEvfCwAAAAAAAAAA9AzNdvRaDQ0Nueaaa2rvZ82alfXr1z9nbsmSJbnxxhuTJAMGDMhVV121z++cMmVKKpVKKpVK5syZ0+HM7t278+53v7sWtDvkkEOyevVqQTsAAAAAAAAAACix0jTbQT1MmzYty5cvz/Lly7Nly5Y0NTVlxowZmTx5ctra2rJy5cosXrw41Wo1STJ37twuh+LOP//8LF26NEnSr1+/fPCDH8zPf/7z/PznP3/ez02aNCmTJk3q0tkAAAAAAAAAAEDnCNvRq1UqlSxatCgzZszIokWL0tramuuvv/45cwMGDMi1116bWbNmdfnMu+66q/b89NNP55JLLnlBn/v4xz++z7Y8AAAAAAAAAACgZ5UmbFep1HsDeqvGxsYsXLgwM2fOzPz58/OTn/wkmzdvTv/+/TNq1KiccsopufDCCzNx4sR6rwoAAAAAAAAAANRJpfrM/Zh19sjm1nqvQB39xcsH13sFAAAAAAAAAACAfWqo9wIAAAAAAAAAAABQdqW5RjaukQUAAAAAAAAAAKCkNNsBAAAAAAAAAABAgdI021U02wEAAAAAAAAAAFBSmu0AAAAAAAAAAACgQHma7eq9AAAAAAAAAAAAAOyDZjsAAAAAAAAAAAAoUJpmu1R02wEAAAAAAAAAAFBOpQnbidoBAAAAAAAAAABQVq6RBQAAAAAAAAAAgALlabZTbQcAAAAAAAAAAEBJabYDAAAAAAAAAACAAqVptlNtBwAAAAAAAAAAQFlptgMAAAAAAAAAAIACpWm202sHAAAAAAAAAABAWWm2AwAAAAAAAAAAgALlabZTbQcAAAAAAAAAAEBJlSZs5yJZAAAAAAAAAAAAyso1sgAAAAAAAAAAAFCgNM12rpEFAAAAAAAAAACgrDTbAQAAAAAAAAAAQAFhOwAAAAAAAAAAACggbAcAAAAAAAAAAAAF+tZ7gWdUKvXeAAAAAAAAAAAAADqm2Q4AAAAAAAAAAAAKlKbZLlFtBwAAAAAAAAAAQDmVJmznGlkAAAAAAAAAAADKyjWyAAAAAAAAAAAAUEDYDgAAAAAAAAAAAAoI2wEAAAAAAAAAAECBvvVeoKZS7wUAAAAAAAAAAACgY5rtAAAAAAAAAAAAoEBpmu0qqu0AAAAAAAAAAAAoKc12AAAAAAAAAAAAUEDYDgAAAAAAAAAAAAqU5xpZt8gCAAAAAAAAAABQUprtAAAAAAAAAAAAoICwHQAAAAAAAAAAABQQtgMAAAAAAAAAAIACfeu9QE2lUu8NAAAAAAAAAAAAoEOa7QAAAAAAAAAAAKBAaZrt9NoBAAAAAAAAAABQVprtAAAAAAAAAAAAoEBpmu1U2wEAAAAAAAAAAFBWpQnbydoBAAAAAAAAAABQVq6RBQAAAAAAAAAAgAKlabZLRbcdAAAAAAAAAAAA5aTZDgAAAAAAAAAAAAoI2wEAAAAAAAAAAEABYTsAAAAAAAAAAAAo0LfeCzyjUu8FAAAAAAAAAAAAYB802wEAAAAAAAAAAECB0jTbqbYDAAAAAAAAAACgrEoTtqtI2wEAAAAAAAAAAFBSrpEFAAAAAAAAAACAAqVptlNsBwAAAAAAAAAAQFlptgMAAAAAAAAAAIACpWm2U2wHAAAAAAAAAABAWWm2AwAAAAAAAAAAgAKlabZTbQcAAAAAAAAAAEBZabYDAAAAAAAAAACAAuVptlNtBwAAAAAAAAAAQEmVJmwnagcAAAAAAAAAAEBZuUYWAAAAAAAAAAAACpSm2U61HQAAAAAAAAAAAGWl2Q4AAAAAAAAAAAAKlKbZTrEdAAAAAAAAAAAAZaXZDgAAAAAAAAAAAAqUptkuFd12AAAAAAAAAAAAlJNmO/gfq1atyvTp0zN+/PgMGjQoBx10UF796ldn9uzZ2bhxY4+du3z58px11lkZO3ZsBg4cmOHDh+eYY47JnDlz8vDDD/fYuQAAAAAAAAAAwAtXqVar1XovkSRP/3FPvVegjvr171O3s9va2jJz5swsWLBgnzONjY257rrrcskll3TbuY8//njOOeecrFy5cp8zQ4cOzU033ZR3vetd3XYuAAAAAAAAAADw4pUmbLf7aWG73qxvv/qE7arVas4666wsXbo0SXLggQdmxowZmTx5ctra2rJy5cosWbIkz/yZ/Ou//msuvvjiLp+7a9euvPnNb87dd9+dJBkxYkRmzpyZI488Mi0tLVm2bFluv/32JEmfPn2ybNmyvO1tb+vyuQAAAAAAAAAAQOcI21EK9Qrb3XLLLZk2bVqSPwXeVq9encMPP/xZM4sXL87ZZ5+darWaAQMGZP369TnkkEO6dO4nPvGJXHXVVUmSiRMnZtWqVTn44IOfNTNv3rxcccUVSZKRI0dm48aNGTJkSJfOBQAAAAAAAAAAOqeh3gtAvVSr1Vx55ZW19zfccMNzgnZJctZZZ+Wiiy5K8qcrZ6+++uoundvS0pJPf/rTtfe33HLLc4J2SXL55Zfnb/7mb5IkW7Zsyec+97kunQsAAAAAAAAAAHSesB291po1a7Jp06YkyZgxY3LmmWfuc/byyy+vPS9dujRtbW2dPvc73/lOduzYkSQ5/vjj09TU9ILOXbhwYafPBAAAAAAAAAAAuqY0YbtKpeLVi1/1sGLFitrzqaeemoaGff85jBs3LhMnTkyStLa25s477+yWc9/61rc+7+wb3/jGDBo0KEnS3NycjRs3dvpcAAAAAAAAAACg8/rWewFIkq1bt3br940YMaJwZt26dbXn52uXaz/T3Nxc++zJJ5/cqd1ezLl9+/bN6173uqxZs6b22QkTJnTqXAAAAAAAAAAAoPNKE7bbuWtnvVegjkaOHNmt31etVgtnNmzYUHseO3Zs4Xz7mfXr13d6r/btdC/03GfCdp09FwAAAAAAAAAA6JrShO02bBAiYv96/PHHa8/Dhw8vnG8/s3379k6d+eSTT+bpp5/e7+cCAAAAAAAAAABdU5qw3THHHFPvFehlWltba88DBw4snG8/09LS0uUz9+e5AAAAAAAAAABA1zTUewEAAAAAAAAAAAAou9I029G7bdmyZb+fOXjw4Gzbti1JsnPnzsL59jNDhgzp9Jn/+zv/97/1xLkAAAAAAAAAAEDXCNtRCiNGjNjvZw4bNqwWtnv00UcL59vPDBs2rFNnHnjggenbt292795d+86isF13nAsAAAAAAAAAAHSNa2TptSZNmlR7fuCBBwrn28+0/+yLUalUMnHixP1+LgAAAAAAAAAA0DXCdvRaRx11VO157dq1hfPtZ9p/tifP3b17d+67775uORcAAAAAAAAAAOg8YTt6ralTp9aeb7vttuzdu3efs/fff3+am5uTJIMHD84JJ5zQLeeuWLHieWdXr16dHTt2JEkmTJiQCRMmdPpcAAAAAAAAAACg84Tt6LWOO+64jB49OkmyadOmLFmyZJ+z8+bNqz2fccYZaWxs7PS5b3/72zNo0KAkyV133fW87Xbtz33Pe97T6TMBAAAAAAAAAICuEbaj12poaMg111xTez9r1qysX7/+OXNLlizJjTfemCQZMGBArrrqqn1+55QpU1KpVFKpVDJnzpwOZ4YOHZrZs2fX3k+bNi0PP/zwc+bmzZuX73//+0mS4cOH54Mf/OAL+r0AAAAAAAAAAIDu17feC0A9TZs2LcuXL8/y5cuzZcuWNDU1ZcaMGZk8eXLa2tqycuXKLF68ONVqNUkyd+7cHHrooV0+90Mf+lBuu+22/Md//Ec2bNiQ1772tbngggty5JFHpqWlJcuWLcvKlSuTJH369MnNN9+coUOHdvlcoPc4c+Ln6r0CL8DwUQfkgrl/WXt/8+z/zKN/eKqOG/FC3PrrS+q9Ai/Azl07s2HD//1/pDjssEkZ2DiwjhvxQjzZuqveK/ACtP2xLb//w+9q71856tAM6D+gjhvxQh04uPMt7ew//hsGPcffF/Qsf2PQc/x9Qc/x9wU9y9/Y/5v69NXd9nyE7ejVKpVKFi1alBkzZmTRokVpbW3N9ddf/5y5AQMG5Nprr82sWbO65dyBAwfm3//93/Pud787P/jBD7J169Zce+21z5kbMmRIbrzxxrzjHe/olnMBAAAAAAAAAIDOEbaj12tsbMzChQszc+bMzJ8/Pz/5yU+yefPm9O/fP6NGjcopp5ySCy+8MBMnTuzWc1/ykpfk9ttvz7Jly/LNb34zP/3pT/PII49k0KBBGTNmTE477bRceOGFecUrXtGt5wIAAAAAAAAAAC+esB38jxNPPDEnnnhil77jxz/+8Yv+zOmnn57TTz+9S+cCAAAAAAAAAAA9yyW7AAAAAAAAAAAAUEDYDgAAAAAAAAAAAAoI2wEAAAAAAAAAAEABYTsAAAAAAAAAAAAoIGwHAAAAAAAAAAAABYTtAAAAAAAAAAAAoICwHQAAAAAAAAAAABQQtgMAAAAAAAAAAIACwnYAAAAAAAAAAABQQNgOAAAAAAAAAAAACgjbAQAAAAAAAAAAQAFhOwAAAAAAAAAAACggbAcAAAAAAAAAAAAFhO0AAAAAAAAAAACggLAdAAAAAAAAAAAAFBC2AwAAAAAAAAAAgALCdgAAAAAAAAAAAFBA2A4AAAAAAAAAAAAKCNsBAAAAAAAAAABAAWE7AAAAAAAAAAAAKCBsBwAAAAAAAAAAAAWE7QAAAAAAAAAAAKCAsB0AAAAAAAAAAAAUELYDAAAAAAAAAACAAsJ2AAAAAAAAAAAAUEDYDgAAAAAAAAAAAAoI2wEAAAAAAAAAAEABYTsAAAAAAAAAAAAoIGwHAAAAAAAAAAAABYTtAAAAAAAAAAAAoICwHQAAAAAAAAAAABQQtgMAAAAAAAAAAIACwnYAAAAAAAAAAABQQNgOAAAAAAAAAAAACgjbAQAAAAAAAAAAQAFhOwAAAAAAAAAAACggbAcAAAAAAAAAAAAFhO0AAAAAAAAAAACggLAdAAAAAAAAAAAAFBC2AwAAAAAAAAAAgALCdgAAAAAAAAAAAFBA2A4AAAAAAAAAAAAKCNsBAAAAAAAAAABAAWE7AAAAAAAAAAAAKCBsBwAAAAAAAAAAAAWE7QAAAAAAAAAAAKCAsB0AAAAAAAAAAAAUELYDAAAAAAAAAACAAsJ2AAAAAAAAAAAAUEDYDgAAAAAAAAAAAAoI2wEAAAAAAAAAAEABYTsAAAAAAAAAAAAoIGwHAAAAAAAAAAAABYTtAAAAAAAAAAAAoICwHQAAAAAAAAAAABQQtgMAAAAAAAAAAIACwnYAAAAAAAAAAABQQNgOAAAAAAAAAAAACgjbAQAAAAAAAAAAQAFhOwAAAAAAAAAAACggbAcAAAAAAAAAAAAFhO0AAAAAAAAAAACggLAdAAAAAAAAAAAAFBC2AwAAAAAAAAAAgALCdgAAAAAAAAAAAFBA2A4AAAAAAAAAAAAKCNsBAAAAAAAAAABAAWE7AAAAAAAAAAAAKCBsBwAAAAAAAAAAAAWE7QAAAAAAAAAAAKCAsB0AAAAAAAAAAAAUELYDAAAAAAAAAACAAsJ2AAAAAAAAAAAAUEDYDgAAAAAAAAAAAAoI2wEAAAAAAAAAAEABYTsAAAAAAAAAAAAoIGwHAAAAAAAAAAAABYTtAAAAAAAAAAAAoICwHQAAAAAAAAAAABQQtgMAAAAAAAAAAIACwnYAAAAAAAAAAABQQNgOAAAAAAAAAAAACgjbAQAAAAAAAAAAQAFhOwAAAAAAAAAAACggbAcAAAAAAAAAAAAFhO0AAAAAAAAAAACggLAdAAAAAAAAAAAAFBC2AwAAAAAAAAAAgALCdgAAAAAAAAAAAFBA2A4AAAAAAAAAAAAKCNsBAAAAAAAAAABAAWE7AAAAAAAAAAAAKCBsBwAAAAAAAAAAAAWE7QAAAAAAAAAAAKCAsB0AAAAAAAAAAAAUELYDAAAAAAAAAACAAsJ2AAAAAAAAAAAAUEDYDgAAAAAAAAAAAAr0rfcCAAAAAAAAAAAA0N7TTz+dO++8M+vWrcv69euzffv2tLS0JEmGDBmSYcOGZdKkSTnqqKNywgknpH///j2+k7AdAEA3e9cH3pCTzjoyBwwekN/96pHcPOeO/H7jYx3OHnDAARk1alQGDhyYf172qvxq7X/na5/6cR59uDVJMmBg30z78F9n8pvHpXFQv2z5Q0sW3/Afuff23+7PXwlKoVqt5oZ/vSFLlizOk08+mVe96lW56sqrMmHCxA7nn3zyycyfPz/33Xdf+vTpkze+cUqu/NiVGTJkSG1m5cqV+ZcvXJ+HH344r3jFK3LJBy7JySe/ZX/9SlAa1Wo1N3/5xiz/zr/lySefzKRJh+fDs/8x48aN73D+29/+du6777784Q9/yKtedUS+cvPXn/Xzf1/xvSxbtjQPbnogSSXjx43P31/0/rzmNa/t+V8GSsh/w6Dn+PuCnuVvDHqOvy/oOf6+oGf5G2N/eOihh3L11Vdn6dKlaW1tfUGfGTx4cN75znfmyiuvzCGHHNJju7lGFgCgG739fcfkxHcekU++798y49gvZf3PHs7HvnpGGg/o95zZSiUZP358nnrqqaxbty7/9J5F2fP03nzws1NrM2df8lc58vWvzD+e/a2895gvZumX1uaDn5uaUeNesj9/LSiFr37tq1m27N9y801fzk/W3J3Xve7oXPB3F2THjh0dzn/xi1/ME088kc997nNZvvw7eeyxR/PRf/xo7ee/WPeLfPgjH8olH7g0a+/9aT4w65J86MMfyi9/+cv99StBaSz45jfy3e99J//y+S/m9ttW5TVHvTYfuPTiPPXUUx3Ojxw5MmeeeWZOPPHEDn/+1FNP5X3v+7t8Z9mK3LbiB3njG9+USz74D3lkyyM9+WtAaflvGPQcf1/Qs/yNQc/x9wU9x98X9Cx/Y/S0G2+8MZMmTcr8+fPT0tKSarX6gl4tLS2ZP39+XvWqV+WLX/xij+0nbAcA0I1Oec9r8t2v/Fcean4sf2zbk299/u707deQppOf2ww0cFD/9OvXL4899liq1Wradj6d1ct/nUNeNbI28/LRw3Lf6gfz6MOtqVaTe77fnKee/GNGHzZ8f/5aUArf+tainD/9/EycODGNjY35wKwP5Omnn86PfvTD58xu3vxwfv7zn+e8887LkCFDctBBB+VDsz+cVavuyMMPP5wkufXWW3PCCSfkLW95S/r165e3vOUtOf744/OtWxft718N6m7J0m/nvHOnZfz4CWlsbMyFf3dxnn766fx49R0dzk+ZMiXHHHNMBg8e3OHPzzrz7Lzh9X+VQYMGpW/ffjnn3eemoaEhv/71r3ry14DS8t8w6Dn+vqBn+RuDnuPvC3qOvy/oWf7G/vxt3bq1W18vxr/8y7/kH/7hH9LW1pZqtZokmThxYs4777xceeWVuf7663PTTTflpptuyvXXX58rr7wy5513XiZO/FOzYrVaza5duzJr1qxcf/313f5/m8Q1sgDwZ2n4qAPqvUKv1Pj/s3fvcVbV9f7H3wMzyoAgKJqA3BQBtbzhsYuKVEqWWllaeeuiR8vsaFlamngqf13OyTKvBaXh+WlhXrv8SjvaTVLTFEM00FIgQeMiKMyFy7B/f5CTI5y+eJrNBub5fDx4PNqbtdd8l6sPiz28Zu1eW+VVg7fNwmeXdjgH855cnD0OGJjHHpzTYfute3XLggUL0r9//zz99NMZMLx33nLCXvnD1KfaX3/3Tx/Luz/yuow+YKcsfnZZ9h+3a7p1S579y2LnuUZaWltqvYQuadmyZZk3b15GjhrZ4RyMHDkqj8x4JIeN73g7+RkzHk1DQ0OGDh2aJFmxYkWGDhuahoaGTH9kevpt1y9//ONjOfTQwzrsb/To0bnzzruc5xpZsXJlrZfQJS1fvjzPPDM/u+02MitWrmh/frcRI/PYHx/Nm998WIftV63qeJ4qlUqH163Po4/NSEtLS4YNG1bclurp3lqp9RK6JNcwqB7zBdVlxqB6zBdUj/mC6jJjXcOOO+5Y3ugVeDGaK/nTn/6Uz3zmM6lUKqmrq8vpp5+eT37yk9lll1026PVPPvlkLr744kycODGVSiXnnXdejjjiiIwYse5NUf4ZYjsA2AKd+tX9a72ELqmhYe1HxR55+m5pbR3c/vxOw3tlx2GN6bfLuudlyZIlGTJkSPbdd9/s+51909LSkieeeKL9HHbv3j3bbN8t/+e641KpVLJmzZrMnj077z1/z41zUKxj1qyZtV5Cl7R48eIkyaJFizqcg7q6ZN68eeuclzlzZqexsbH98ezZTyVJGhsb86c/PZGddnpVlixZkubmpg6vbW5uztKlS5xnupQX52t509L85ekn25/vXt8tCxY82+G59Vm5csU/3Gbx4sX5/Oc/n6OOOipta/7xtrAlcg2D6jFfUF1mDKrHfEH1mC+oLjNGNU2aNCmtra3p1q1bpkyZkmOOOeYVvX6XXXbJVVddlTe96U1573vfmxUrVmTSpEn5z//8z05dp4+RBQDoJG1tbUnWBnIvVV9f3/57L7X11ltnt912y8KFCzNt2rQ8/PDDWbp0aUaPHp1u3db+NW3XXXdNfX19pk+fnoceeihPPPFEhgwZkj59+lT/gGAT8uKb8aampg7PNzU1dXij/tLtW1rW/Ym35ubm9u0bGxvT3Ny8QfuDLdkrna9X4tlnn80XvvCFvO51r8t73vOef2pfsLlyDYPqMV9QXWYMqsd8QfWYL6guM0Y13X777amrq8sxxxzzikO7lzrmmGNy7LHHplKp5Pbbb+/EFa4ltgMA6CRr1qzJihUr0rNnx493Xd+bhBefr1QqWbhwYftd6/76179m6623To8ePZIkPXv2zKJFi7Jq1aoka99cLF++PH379q368cCmpGfPntlhhx3y5JN/vyNWW1tb5syZk2HDhq2z/dChQ7Nq1arMnTu3/bm5c+dm9erV7berHzp0aIf9JclTTz213v3BluyVzteGmjt3br7whS/kkEMOyfHHH98JK4XNk2sYVI/5guoyY1A95guqx3xBdZkxqunpp59OkrzlLW/5p/f14j5e3Gdn8jGyALAF+vY5v6/1Erqsw967Km98556ZfOV9WTj/hbztxP3SNHRlvnHGL7KidXWHbYfvuX3OuXR4+vfvn0WLFuW2Sx/LAW/aPbuPWplvf/q+NC9fmY99eYesWVOfKV+dkWVLWzN89x1zxpf2zE+u/V1++1O3zq6Fi390Yq2X0GUdf/zxmTLlhhxxxJHZeeedc/XV38nWW2+d448/YZ3Iddiw4dlnn31y/fXX54wzzsiQIUNz6aWXZezYsTnwwIOSJCeffHJOO+20zJs3PwcffHDuvvvuTJ8+Pd/+9ncyatToWhxil9fctLLWS+iyjjnmvbn55hvz5je9JYMGDcq1//XdbLXV1nnnO45dZ75WrVqZp+fNzpo1a7JmzZo0NGyVHXcYlGTtXVuT5JFHpueLX/xSPviBk/Oe97xvox8P69ez11a1XkKX5RoG1WO+oLrMGFSP+YLqMV9QXWZsy7dgwYKafN2VK9f+G8GL32f+Z7y4jxf32ZnqKpVKpdP3CgDU1DEjL6n1Erq09575+hz63tek5zZb5c8z/prvfP4Xmfv44vQf0DuX/PT9+dKpt+WPv5+X/jv3zNnfemMGDhyYHj16ZEVLW+bMXJgp37gnjz0wL0my7fY98/5PH5y93jA0PXo2ZOmipvzylsdyy7fur/FRdl03PHZWrZfQZVUqlVxxxeX5wY03pqlpefbcc89MuODCjBw5MvPnz89Rbz8qEydOzP5j9k9La0sefPD3mTx5cqZNm5Zu3bpn3LhxmXDBhA4fw3z7Hbfn8ssvy7x58zJo0KCcdebHM378+BoeZde2fFlrrZfQZVUqlUz69jdz6223pKmpKbvvvnvO/dR5GTFitzz77DN573HvzjcuuSL77rNfVqxckc9ecG5+85vfrLOf+++bliQ5/aOn5qFpD7bfqfVFH/zAKfnQB0/ZKMfEurbp3aO8EVXhGgbVY76guswYVI/5guoxX1BdZmzL172+Nh+Uuttuu+XJJ5/MmWeemUsu+ef+vfsTn/hELr300uy666554oknOmmFa4ntAGALJLbbPPTfuWdO/er+7Y+/fc7vs+jpdT9ulk2L2G7z0NLaklmz/n73x1GjRqexR2MNV8SGENttHlasXJG/PP33j3UYvPMu2Xqrf/4nDak+sd3mwTUMqsd8QXWZMage8wXVY76guszY5qlWsd2HPvShXHvttendu3ceeOCBjBw58n+1n1mzZuWAAw7I8uXLc9JJJ2Xy5Mmdus7a/NcBAAAAAAAAAACAJB/4wAeSJMuXL8/YsWNz8803v+J93HTTTRk3blyWLVuWJPngBz/YmUtMktR3+h4BAAAAAAAAAABgA40bNy4nnHBCrr/++ixcuDDvec97MnTo0Bx++OE54IADMnz48PTv3z+NjWvvjtjS0pJFixblqaeeyv3335/bb789c+bMSaVSSV1dXY477riMGzeu09cptgMAAAAAAAAAAKCmrr766rS0tOSWW25JksyZMycTJ07MxIkTN+j1lUolSfKOd7wj11xzTVXW6GNkAQAAAAAAAAAAqKmtttoqN910U6699toMHTo0lUrlFf0aMmRIJk+enFtuuSVbbbVVVdboznYAAAAAAAAAAABsEk466aScdNJJmTp1an76059m+vTpmTVrVpYsWZJly5YlSXr37p2+fftm9OjR2WuvvfK2t70tBx54YOrq6qq6NrEdAAAAAAAAAAAAm5SDDjooBx10UK2X0YGPkQUAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAICC+lovAADofDc8dlatl8AGaGltyaxZM9sfX/yjE9PYo7GGK2JDnPvhW2u9BDbAtv0acsRxg9off/1zd+X5JatquCI2xH9OPLrWS2ADdG+tdHjcs9dWaezRo0arAQAAgE3T7DlLar0ENkBb28oOj+fPfyHdu7fWaDW8EoMG9qn1EtgAq1etWefxqu5tNVoNG6p7vXu3/SP+6wAAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAX1tV4AAMCWpFKp5Iorr8hNN92Y5cuXZ4899siFEy7MbruNXO/2y5cvz+TJkzNt2rR07949hxwyLhMumJA+ffq0b3PHHXfksssvzfz58zNo0KCcdeZZOeyw8RvrkGCT8pZ37J7Xjh2eHo0NeXrOktxy3cN5dt4L62zXp29j9tlnn/bHr3n13unWrS7dunfL5z7xkzQtX5kk2WvMoLz16D3Tb/ueeW5RU35266N55KH5G+twYJPh+gXVZcageswXVJcZg+oxX1A9lUol119/dW6//UdpalqeESNG5YwzPpVhw3ZdZ9ulS5dk0qRvZubMmXnhhRey7bZ9c8gh43PiiaekoWGrdba/997f5KKLPpM3vnF8zjnncxvhaGDTU6lU8s1vXplbbr05y5atvYadf/4F2W3Ebuvd/uXXsLEHH5Lzzvts+zXsgQfuz7+eenIaGxvbX9O7d5/898/v2ijHw5ZrwoQJ+dKXvpS6urqsXr260/brznYAAJ3omu9ek1tvvSXfnvSd/HbqPdl33/1y6mmnpqmpab3bX3XVVXn++edzySWX5LbbfpjFixflvPPPa//9P0z/Qz79mXNz1pkfz/2/eyBn/ttZOffT52bGjBkb65BgkzHuLSNzwEHDMunrU3PhWT/O7CcW57RPHJSttu6+zrYvLG3Jww8/3P7rK5/9aR6b/mxmPvJse2g3ZHi/HH/qv+Snt87I+R/7YX5226M54dQDsvPQvhv5yKD2XL+guswYVI/5guoyY1A95guq5+abv5ef//wnueiiSzJlys+yxx57ZcKET6SlpXmdbVtbWzJgwICcf/75ufrqq3PRRV/LAw/ck2uuuWqdbZ9/fmkmTbo0e+yx18Y4DNhkXXvtd3PbD2/NN6+amF//6u7ss88+Of30D6e5ed0ZSzpew26+6bYsfm5xJlz42XW2m3r3vbnv3gdy370PCO1qbOHChZ36q5YqlUoqlUqn7tOd7QBgC9TS2lLrJXRZ3//+93LCCSdm8JDBqaSSU089NTfddGN+dvtPc8QRR3bYdvbs2Xn44Yfz5S9/OX369EnPnj1z1lkfz3HHvS9PPvVkBgwYkO997/q84Q0H5uCxB2d12+ocPPbgvP71b8j137suF1747zU6yq5t234NtV5Cl3XQobvkvt/8OS3NLem1Tffc8+sn8rpDhueAg4bkkYee7rDtNn06vtXZaVCv7LnPgNzw3fvbz+EhbxmRP89ckLlPLkzvPvWZ++TC/HnWgox7y4j8+MY/bLTj4u9cv2rH9Quqy4xB9ZgvqC4zBtVjvrZ8bW0ra72ELusnP7k5Rx/9ngwZMjhJcsIJH8gdd/woU6f+Im96U8e7Pe6www555zvf2f74Va/aIYceenjuvPP2dc7hZZd9JUcd9a48+eSfsmZNm3NcY62trbVeQpc15YYpOf74EzN48JAkySknn5pbbrk5t9/xs7ztrUd02Hbu3Dkvu4Y15mMfOzMnnXR8Zs+enZ122ikrV66dpdbW1tTXy5g2BTvuuGOn7q+zY7da8/9SANgCzZo1s9ZL6JKam5szf/789O69TYdzMHjw4Nx7770ZMWJEh+1///vfp6GhIUOHDk2SzJ79VJKkvr4+v/zlLzJmzJhMnz49r3vd6zrsb8cdd8j999/vPNfIEccNqvUSuqRu3bql33a9Mmx0Q3YY/Pdz0LZmRd7w5kEZMuofv1E78r27Z/XqVRm1b0NG7bv29SN3758lS5Z0OKe9t1uTIbvukDX1znMt+HOtNly/oLrMGFSP+YLqMmNQPeYLqqe5uTkLFjybQYP6Z9my+e3PDx06JDNnTsu//MurC69fmAcemJohQwZ1eP3UqVPz3HN/zbhxp+Xxxx9JW1tbh99n41u2rNYr6Jqam5vzzDPzs+22vfPnJx9vf37w4MG5//77MmpUx4+Sffk17C9Pz0n37muvYb+5+5cZM2ZM5j+z9ofpj3r727J69eoMHjw473rXu7L77rtvvAODV8DHyAIAdJKWlrV3ZOrVq1eH53v16tX+ey/fvrGxcZ3ne/bs2b59S0tLevbsuUH7gy1Z9+5rPyq2ra2tw/OrV69u/71/pH///uvcqrx79+5ZvXr1Ovvr1s3bJLoW1y+oLjMG1WO+oLrMGFSP+YLqeaXz9XK33HJL5syZk2OPPbb9ueeeey7f//73c9ppp/neIV1eNa5hAwcOzJe//OVceumlueSSS7L33nvnK1/5SmbPnt35BwCdwJ3tAAA6yYtvFpqamjo839TUlH79+q13+/W98Whubm7fV2NjY5qbm9fZ3/remMCW7MXI7uVhXX19ffGbZP369Ut9fX0WLVq0zj5ffkv6+vr6rFmzphNWDJsP1y+oLjMG1WO+oLrMGFSP+YLqeaXz9VI/+MEP8pvf/CYXXHBBtt9++/bnJ02alLe97W0ZMGBA5y8YNjPVuIb17ds3ffv2bd/+yCOPzLRp03Lfffdl2LBhnXsAbPJ22WWXTtvX0qVLO21fLyW2A4At0KhRo2u9hC5r4MCBWb68qf0crF69Ok8//XSOPfbYdc7L1lv3yNe//vXMnTs3Q4YMybBhwzN37pysXr06b3zjmzJgwIDstddeWbBgQYfXLly4MHvvvbfzXCNf/9xdtV5ClzVs6MjMnrkq90+dlySp61aXsy98de6560955KF5Hbbdpk99DjniVUmSHXbYIX98ZH5+dN3cDtt0f+8O2bpHQ/7f9//+2mM/MDALnl6Y/3djx/2xcZz9uTfXegldlusXVJcZg+oxX1BdZgyqx3xt+ebPf6HWS+iSevdOdtxxp8ybtzhjxgxMkrS1rc7cuX/JoYceld69B3bYvq1tVZqaFmTy5MmZPn16vvzlSzNw4OAO20yfPj1PPTU7P/rRj5Mkra0tf3v+kVx77U1paNhqIxwZL7fjDtvUegld1oABA/PC88uz6y4jk/z9Gvaudx3T/tyLGuq37nANG7zz0MyZMzerV6/O2IPfmJ122mm9X6Nnz17p23e7dfbHxrFgwYKafe3Zs2enrq4ulUoldXV1NVvHPyK2A4AtUGMPP61YK8cdd3yuu+66HHzQQRk8eEiuvvrqNDQ05K2Hv22d8zJs2LDss88+uf7663PGGWekubk5l112Wd447o3ZZfjan9o4/rjj84EPfiC/nTo1hxwyLr/+9a9yzz335L+u/b/Oc408v2RVrZfQZU2988kc9OZd88hDz2bRguU57Mjds3rVmtw/dW5Wrmhb72t69OiR3r17575fT1vn3P3653/OR88dm8HDdshj05/JHnsNyIhRO+bK//i181wj/lyrHdcvqC4zBtVjvqC6zBhUj/na8nXv3lrrJXRZRx757tx22w+y774HZMCAnTNlyn+lvr4+Bx30pnTv3jGMa2tbnSuvvDJz5szJhRdemIEDB6+zzX/9120dHk+adFnWrGnLRz7yifToIfiqlR49etR6CV3W+977vnzv+9flDQcemME7D853v3t16usbcvhb3rrOeRkyZOjLrmEtueLKy3LIIePa71r323t+m6FDh2bggIFZsWJFbrnlpkyf/od88uxPOc810rexd62XkCSpVCq1XsJ61VU21ZUBAP9rbat9BGKtVCqVXHHF5fnBjTemqWl59txzz0y44MKMHDky8+fPz1FvPyoTJ07M/mP2T0trSx588PeZPHlypk2blm7dumfcuHGZcMGE9OnTp32ft99xey6//LLMmzcvgwYNyllnfjzjx4+v4VF2bed++NZaL6FLe8s79sjrDhmeHj3q85c5S3LLdQ/n2XkvpO92jTn3ovH59jem5qknFmfbfg054rhBGTx4cHr16pWv/ftd6w3o9tp/UN76zj3Tr3/PLFnUnJ/eOiOPPDi/BkdGkvznxKNrvYQuy/ULqsuMQfWYL6guMwbVY762fLPnLKn1ErqsSqWS6677Tm6//Ydpbm7KiBGj89GPfirDh++aBQuezUc+ckK+8IWv5dWv3icPP3x/zj//42loaEi3bt3+dheltXdSuuWW9X/Kyde//n/S1rY655zzuY13UKxj0MA+5Y2oikqlkqu+eWVuvvnGNDU1ZY899sz55302u+02Ms8880yOftfbc9WV38p++41Ja2tr/jD9oZdcw7rlkLHjct55n22/hk2c9K3ccstNWbr0+fTosXVGjNgtp532kbz2gNfW+Ei7rh6NDTX72v3798+SJUuy995759Zb/7l/k/va176WK664InV1dWlrW/9NG/43xHYAsAUS220eWlpbMmvWzPbHo0aN9lOmmwGx3ebhxdjuRf/v+/PcrW4zILbbPLh+QXWZMage8wXVZcageszX5klst3loa1uZZcv+/sO3vXsPXOfOdmyaxHabh9bW1vz5ycfbH++6y0h3q9sM1DK2O+yww3LXXXelV69eeeGFF/6pj5KdMGFCvvjFL3Z6bNet0/YEAAAAAAAAAAAA/wtjxoxJkjQ3N2fmzJmFrWtDbAcAAAAAAAAAAEBN7bfffu3/+6GHHqrhSv5nYjsAAAAAAAAAAABqanOI7eprvQAAAAAAAAAAAAC6thEjRuTss89OW1tbRo8e/U/t61Of+lT+9V//tZNW9ndiOwAAAAAAAAAAAGru4osv7pT9bLvtttl22207ZV8v5WNkAQAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoKC+1gsAAIDNyX9OPLrWS2ADtLS2ZNasme2Pz/7cm9PYo7GGK2JDjO91Ua2XwAZ41fBt8snrx7U/Pm3/b+WvTy2v3YLYYD9vmlDrJcAWq1Kp1HoJbIiXn6dKxbnbDNTV1dV6CQDwvzJsaL9aL4ENsPb7iPPbHw8c2Mf3EaETrW7reA+w+oZuaWjoXqPVQOdwZzsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcA0IkqlUouv+LyHDJubMbsv19Oev+JeeKJx//H7ZcvX54rrrgip5xySg45ZGzOOfecvPDCCx22ueOOO3LEkW/LvvvtkyOPOiL//d8/r/ZhwCbJfEF1fWDCuNzw1Nn5yXPn5ZI7P5hhe+zwP27bo0ePjBgxInvvvXcm3ffRfGri29O4zVYdtnn3ma/LtY9+LD9e9JlcP+usnHje2GofAmyyOvsaNnPmzJz24dNy8NiDs8eeu+eee+/ZGIcBm6RKpZIrrrg84954SPb/lzF5/wdOyhNPPPE/bt9hvsYdknM/ve58ffgjp2XsIQdnz1fvkXvNF12c92FQPeYLqsd8QXWZMTaWu+++O1deeWW++tWv5sYbb8zzzz+/wa+dNm1aTj755Jxyyimdvi6xHQBAJ7rmu9fk1ltvybcnfSe/nXpP9t13v5x62qlpampa7/ZXXXVVnn/++VxyySW57bYfZvHiRTnv/PPaf/8P0/+QT3/m3Jx15sdz/+8eyJn/dlbO/fS5mTFjxsY6JNhkmC+onvec/YYc/oF98pkjr8u7Bn41M+79S/7jJyemR6+GdbZt7LVVdttttzQ3N2f69On51BGTM3CXfvn01e9s3+b1R4zMv1705nztIz/OUf2/ks8e/b2886MH5IhT9tuIRwWbjs6+hjU0NOSwQw/NN6/65sY6BNhkffe71+TWW2/NpInfztS7f5t999k3p3341DQ1b8h83ZbFixfn/M92nK9DDz0sV11pviDxPgyqyXxB9ZgvqC4zRrX97ne/yx577JFx48blzDPPzGc+85m8733vy0477ZRPf/rT/+P/115q7ty5mTx5ciZPntzp6xPbAQB0oilTvp8PffBDGTlyZHr06JEz/+3MrFq1Knfddec62z7zzPw8/PDDOfHEE9OnT5/069cv557z6fzyl7/I/PnzkyQ33HBDDj744IwfPz4NDQ0ZP358DjrooEy54fsb+9Cg5swXVM/bP7x/bvzGvXnq0QVZ2bo63/3cL1K/Vfcc9I7d19l25H4D071798yfPz+VSiUvLG7OdV/+TQ58++jssHOfJMnAXbfLnJkLM/3uOUmS2Y8tzCNT52TEPgM26nHBpqKzr2G77rprjj32PXn1q1+9sQ8FNjlTbpiSD75kvv7txfm68651tn3mmWc6zlfffjnnU+fml7/8ZeY/85L5OuZY8wV/430YVI/5guoxX1BdZmzLt3Dhwk799Urce++9OfTQQzNr1qxUKpUOv1asWJGLL744+++/fx599NEqHX1Zfc2+MgBQNS2tLbVeQpe0bNmyzJs3LyNHjexwDkaOHJVHZjySw8aP77D9jBmPpqGhIUOHDk2SrFixIkOHDU1DQ0OmPzI9/bbrlz/+8bEceuhhHfY3evTo3HnnXc4zXYr56hpeNXybWi+hS2rcZqsMGNYvC+Y91+EczH18YfY+ZHAeuffJDtv36d8jdXV17Y+3H9Qz2w3smW7d6rL/+GF56JdP5pF7n8xRp47Jm47bI4/9bm6GjNohex08NN8672fOcw35s602qnENe7mVK1c6v7VWqdR6BV3SsuVr52vUqJFp7TBfIzNjxiMZP/6wDtvPeHTGOvM17G/z9cj06dmu3/rnq9V81dZL/t7BxuV9GFSP+YLqMV9QXWasa9hxxx07dX+VDfy+ycqVK/P+97+//c51jY2NGTt2bPr06ZMZM2bkj3/8YyqVSmbNmpUDDzwwP/vZz/L617++U9e6IcR2ALAFmjVrZq2X0CUtXrw4SbJo0aIO56CuLpk3b94652XOnNlpbGxsfzx79lNJ1v7F8U9/eiI77fSqLFmyJM3NTR1e29zcnKVLlzjPdCnmq2v45PXjar2ELqmhYe1Hxb5nwt5pbR3V/vzOw/tk4Kht8qrXdPwH7u7du6etrS2DBg3K/Pnzc+olB2b48OFJknefu0/e+K9DkiTdeq3Iede8uz3Me/bZZ3PY6cNz2OnDN8ZhsR7+bKuNalzDXu4vf5mbvn23rcbyYZPWPl+LF2XW47Pan6+rq8u8+fM6PJckc+bM6Thfc2Yn+dt8/flP2WnATut8jb88/Zf07de38xcPmwHvw6B6zBdUj/mC6jJjVNNNN92UP//5z6mrq8trX/va3HzzzRkw4O+flvKrX/0qp59+embNmpUXXnghhx9+eH72s5/lDW94w0Zdp4+RBQDoJC++WXjxpy1e1NTU1OGNxEu3b2lZ9ydympub27dvbGxMc3PzBu0PtmTmC6qnra0tydqI7qXq6+vbf+/l2z/xxBPp2bNnXvOa12TkyJF57rnnkiSrV69OkgwYMCD9+/fPzJkz89BDD2XGjBnp06dPBg0aVOWjgU1PNa5hwFrmC6rL+zCoHvMF1WO+oLrMGNV06623Jkm22267/PjHP+4Q2iXJuHHj8tBDD+Xoo49OsvZOi29961tz3333bdR1iu0AADpJz549s8MOO+TJJ//+cXttbW2ZM2dOhg0bts72Q4cOzapVqzJ37tz25+bOnZvVq1e330576NChHfaXJE899dR69wdbMvMF1bNmzZqsWLEiPXv27PD8+r7J9aKWlpY88cQTmT59eh599NGsWrUqbW1tWb58eZK1M7t06dL2b6StXLkyixcvzrbbuvMWXU81rmHAWuYLqsv7MKge8wXVY76guswY1fTQQw+lrq4uJ510Urbffvv1btPY2Jibb745H//4x5OsDe4OP/zw/O53v9to6/QxsgCwBRo1anStl9BlHX/88Zky5YYcccSR2XnnnXP11d/J1ltvneOPP2GdiGHYsOHZZ599cv311+eMM87IkCFDc+mll2Xs2LE58MCDkiQnn3xyTjvttMybNz8HH3xw7r777kyfPj3f/vZ3nGe6HPO15Ttt/2/Vegld1pEnL8/4E/fNDZ/7Rf469/kcffprM3zn1nzh3T/OiuZVHbbdflDPfPSKcWltbc2aNWty9//9S47/1Ijc8LWp+ck1v1+7v1Oacuhxe2fKvz+SeX9anO0H9M5Z3zgqDz78VK4691c1OEKSZNLvP1LrJXRZnX0Nq1QqWblyZftrBgwYmGHDhqd79+6pr/ftvpqoVGq9gi7r+OOOz5QbpuSItx3xt/m6eu18HXf8uvM1dFjH+Ro8JJfeemnGHjw2B77hwCTrma+dBmTY0GHmq5bq6srbUDXeh0H1mC+oHvMF1WXGtnwLFiyo6dcdM2ZMcduvf/3r6dGjR77yla+0f6Tsf//3f2f//fev9jJTV6n4ThAAbGnaVq+p9RK6rEqlkiuuuDw/uPHGNDUtz5577pkJF1yYkSNHZv78+Tnq7Udl4sSJ2X/M/mlpbcmDD/4+kydPzrRp09KtW/eMGzcuEy6YkD59+rTv8/Y7bs/ll1+WefPmZdCgQTnrzI9n/PjxNTxKqA3zteUb3+uiWi+hS/vgheNyxClj0rPP1nn8wfm57Kyf5qlHF2THwX1yzcNn5Ly3X59Hfjs3rxq+Tb7xm5Oy3XbbpVu3bpn/5+dyw9fuyU+/O619X9261eX9E8blzce9Jv126JWmF1bkgTv+lG995udZvrS1hkfZtf28aUKtl9BldfY1bN68eTls/KHrfJ2PfvSMfOyMj23swyNrzzG1UalUcsWVV+TGG3+Qpqam7LnnnrngsxPWztcz8/P2tx+Vid+amDFj9k9ra0sefOjBl8xXt4wbNy4XfLbjfI1/y2HrfJ2Pnv7RnGG+aqJObFdT3odB9ZgvqB7zBdVlxrZ83etr80GpW2+9dVavXp0pU6bk2GOP3aDXXHDBBfnSl76UJOnbt2/uvPPO7LfffvnhD3+Yo48+OnV1dWlra+vUdYrtAGALJLbbPLS0tmTWrJntj0eNGp3GHo01XBFsOczX5klst3l41fBt8snrx7U//toJv8pfn1peuwWxwcR2mwfXsM2Tb7FuHlpbWzLr8Vntj0eNHJUe5muTJ7bbfLiGQfWYL6ge8wXVZcY2T7WK7XbaaacsXLgwl156aT72sQ3/gbfzzz8/X/nKV5Ik/fr1yy9+8YvMnj27arFdbf7rAAAAAAAAAAAAQJKRI0cmSR544IFX9LovfelLOeecc5IkS5YsyaGHHpo//OEPnb6+F4ntAAAAAAAAAAAAqJkxY8akUqnkrrvuesWfIvAf//EfOfvss5Mkzz33XD7/+c9XY4lJxHYAAAAAAAAAAADU0Lhx45IkzzzzTO68885X/PqLL744Z5111isO9V4psR0AAAAAAAAAAAA1c+ihh6ZHjx6pVCq5+OKL/1f7uOSSS3LmmWdWNbirr9qeAQAAAAAAAAAAoKBXr1754he/mMcffzx1dXVZunRp+vbt+4r3841vfCO9e/fO1KlTO3+REdsBAAAAAAAAAABQY5/4xCc6ZT8XXXRRp+xnfXyMLAAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABTU13oBAAAAkCQ/b5pQ6yWwAVpaWzJr1sz2x5N+/5E09mis4YrYUB86eFKtl8AG2G6nHjnpgte0P77gpBvz3LOtNVwRG+Ka35xa6yUAALCJWb7M3+M3BytWruzwuLlpZdpW1dVoNbwSPRobar0ENkDb6jXrPF69uq1Gq2FDda9377Z/xH8dAAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AID/z96dh8lZ1vke/naSNgsQCLJkIQtLOmxKIDgImAUMUdlUFpUIIiJhk2VmIMjmmdHjcs5xRiUhLLIosiMER0RAICoIDopACEgIShJIBwgR0HSns9b5A2loOvjEsSuVpO/7unJdVtWbt56X8pe3qvrTVQAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAXdar0AAAAAAAAAAAAA+Hs9/fTTeeWVV7L55ptnm222qfr9+WQ7AIAOVKlUMmnypIweMyojdt8tR33myMya9fQ7br9o0aJMnjw5xx57bEaPHpUzJ56ZP//5z222ufPOO3PAgftn192G58CDDsjPfnZXtQ8D1krmC6qno+frqaeeyoTjJ2TkqJHZcacd8sCDD6yJw4C12sc/v3u+819H5bv3HptzphycAdv0ecdte/XqlYaGhuyyyy7539//VE79xofy7r4btt7+4U+9N/9+5aG5+GfHZPJPjs6/fPMjGbD1O+8P1meVSiWTJ0/KmH1GZ/f3jchnjj4qs2bNesft25zDxozOxLPan8OOP2FCRo0emZ123jEPOofRyXkdBtVjvqB6KpVKLv3uRdn/wP0yasyemXDC5/KHPzzzjtvfeOONOfvss3PUUUflpJMntLv9J7f/OJ8/7rMZO250xo4bkxNO/Hwee+zRKh4BrN0qlUounDI5Hxy7T/5pj/fls8ccvdqvw/b94Jh88eyz2p3D3nD1NT/Ie3fZOZMmX1Ct5bOOmDp1ao455ph85CMfyTHHHJPbb7+93TZXXHFFBg4cmB122CF77bVXhg4dmq222irf+MY3smLFiqqtTWwHANCBrrjyikydeku+e+ll+dX9D2TXXXfLcROOS1NT0yq3nzJlSl577bV861vfyq23/igLF76cs885u/X2x6Y/lrO+ODGnnXp6Hvrv3+TUU07LxLMmZsaMGWvqkGCtYb6gejp6vurr67Pf2LG5aMpFa+oQYK22/6d3yagDt8//++ef5KSPfC+zHn8hZ377wHTv2f5LJ+rqku222y7Nzc2ZPn16vnL8D7Ni+cqc9OWxrdvUd++aa779q5xy4FX550OuTuOcV3LWBQemvnvXNXlYsFa48sorMnXq1Fx6yXdz/32/yq7Dd82E449LU/PqnMNuzcKFC3POuW3PYWPH7pcpFzqHQeJ1GFST+YLqufqaq/JfP/5RLvj2lNx1x7Ts8t7hOfX0k9Lc3LzK7bfYYoscdthh2XfffVd5e3Nzc449dkJ+NPX23HH7zzJ69D457Z9PzosvvVjNw4C11ve+f2VuvXVqLrrokvzyF/dl1+HDc+KJx7/jjL31HHbLza+/Djvv/HPbbffs7GdzzTXXZOjQodU+BNZiLS0t2X///XPYYYflqquuyl133ZWrrroqBx10UE499dTW7SZNmpTjjjsujY2NqVQqrX8aGxtz7rnnZv/998+yZcuqskaxHQBAB7r++utyzGePSUNDQ3r06JFTTzk1y5Ytyz333N1u2/nzG/Poo4/myCOPTO/evdOnT59MPPOsTJt2bxobG5MkN9xwQ0aOHJlx48alvr4+48aNywc+8IFcf8N1a/rQoObMF1RPR8/Xtttum8MP/0R23nnnNX0osFb64CE75afXPpbn//CnLFuyIjdf+pt069YlI0Zv3W7bHr3elfr6+ixcuDCVSiVLWpbnVz+dmcENm7Vu8+PvP5KnH3shy5asyLIlK/JfV/4um2y2QfoP3mQNHhWsHa6/4fp89i3nsFPeOIfdfU+7befPn9/2HLZJn5x5xsRMmzYtjfPfcg477HDnMPgrr8OgeswXVM8Pb74xR376M9luu6Hp0aNHjp9wUpYtW5af/+LeVW4/ZsyYjBgxIhtttNEqbz/8sE9mz/fvlQ022CDdutXniE99Ol26dMmTTz5RzcOAtdYNN9yQo4/+bBqGvn4OO/nkU7Js+Tudw9q+Dttkk01yxr+ekZ//fFrmz5/fut2KFSty7rnn5MwzzszGG2+8Jg+HVViwYEGH/vl7nHDCCbnjjjvaBHRv/Lnwwgtz7bXX5sknn8wZZ5yRSqWSJNlxxx2zxx57ZLPNXn//rFKp5O677855553X4f9tkqT9r88CAOu8xS2La72ETukvf/lL5s2bl4ZhDW0eg4aGYXl8xuPZb9y4NtvPmPFE6uvrM3jw4CTJkiVLMnjI4NTX12f649PTZ9M++f3vn8zYsfu12d/222+fu+++x+NMp2K+oHqqMV9vt3TpUnO1Fti0b49aL6FT6tGrPpv3752XX3y1zWPQOOdP2X7Xvnnq0efabP+unnV56aWXstlmm+X555/PlgM3zL6H7Jjpv57zjo/h8L2GZMniZVmyZInHuUZa/BtXE39Z9Po5bNiwhjaPQUNDQ2bMeDzjxu3XZvsZT8xodw4b8tdz2OPTp2fTPqs+h3l8a6yurtYr6LS8DoPqMV+dw5KlS2u9hE5p0aJFmT+/MUOHNmTJ0iWt1w/driFP/v6JfPCDbZ8jLlvW9nGqVCpt/t6qPPHkjCxevDhDhgwpbksV1VXvKyJ5Z4sWLUpj47w0NAxLS0tL6/UNQxsy44kZ2W+/tuewJ5582zls6ZIMHjzk9ddhj09Pn7++Drv8isvSv3//7LXX3rnqB9/P8uXL2+yfNWuLLbbo0P29EcWVPPbYY7nqqqtSV1eXPn365JxzzsnOO++cp59+Ol/72tfy4osv5pvf/GbGjRuXZcuWZfTo0bnyyiszZMiQ1vu58cYbc9xxx2XRokX5zne+k9NPPz39+vXr0OMR2wHAemjmzKdqvYROaeHChUmSl19+uc1jUFeXzJs3r93jMmfO7PTs2bP18uzZzyZJevbsmWeemZW+fbfMK6+8kubmpjZ/t7m5Oa+++orHmU7FfEH1VGO+3u655+Zmk038RmqtHXXee2q9hE6pvr4+SbLfUUPS0tK39fotBvXMZlt1z0YD2j8ur7zySgYNGpRdd901u+66axYvXpxZs2at8jHs2bNnGhoa8vzzz+eTZ+5QvQPhb5r59MxaL6FTaj2HLXy5zWNQV1eXeY3z2j0uc+bMaXsOmzM7yV/PYX94Jn379c3bPff8c9mkzyYdv3hYB3gdBtVjvqB63pivRU2v5rnn/9h6fdduXfLSSy+0uW5Vli5d8je3WbhwYf793/89Bx10UFas/NvbwvrojRl75ZWFeeYPT7deX9elLvPnN7a5Lkmee67t67DnnpuT5PVz2B+f/UP6D+iX2bNn54c/vClf+9rX8swfns7ixYvzyit/arcv1n9XXXVVkqRLly656667sttuuyVJxo0bl7Fjx2b48OF57LHHMnv27GyzzTa5/fbb2/z/q66uLp/85CfTvXv3HHLIIVm2bFmuv/76/PM//3OHrtPXyAIAdJA3nsw1NTW1ub6pqanNE723br94cfvfKm1ubm7dvmfPnmlubl6t/cH6zHxB9VRjvoA3rVjx+m/ad+3atc313bp1a73trbp3756hQ4dmwYIFeeSRR/Loo4/m1Vdfzfbbb58uXdq+lderV68MHTo0jY2NrW92Q2fiHAbV5XUYVI/5gur5e+fr7/HCCy/ky1/+ct7//vfnE5/4xD+0L1hXdfQ5bPny5bnoooty1FFHveNXOdN5PPDAA6mrq8uBBx7YGtq9Yfvtt8+hhx6aSqWS1157LWecccY7/rv+sY99LMOHD0+S/OIXv+jwdYrtAAA6SK9evbL55pvnj3988zfZVqxYkTlz5rR+fPFbDR48OMuWLcvcuXNbr5s7d26WL1/e+nHagwcPbrO/JHn22WdXuT9Yn5kvqJ5qzBfwppUrV2bJkiXp1atXm+tX9cPQN66vVCpZsGBBKpVKVq5cmRdffDHdu3dPjx5vfkXsRhttlKFDh2bevHlZsGBB1Y8D1kbOYVBdXodB9ZgvqJ6/d75W19y5c/PlL385o0ePzvjx4ztgpbBu6uhz2CuvvJLnnnsul112WSZMmJAJEybk6aefzu23354zzzxzTRwSa5FnnnkmSTJmzJhV3r733nu3/u+xY8f+zX195CMfSaVSyYwZMzpsfW/wNbIAsB4aNmz7Wi+h0xo/fnyuv/6GHHDAgdlqq61y+eWXpXv37hk//tPtfsA6ZMjWGT58eK655pqcfPLJGTRocL7znQsyatSo7L33B5Ikn/vc5zJhwoTMm9eYkSNH5r777sv06dPz3e9e5nGm0zFfUD0dPV+VSiVLly5t/Tv9+vXPkCFbp2vXrunWzVsRtXLeUTfVegmd1j4fW5lR+++Qa6/6XRa+8Jfsd/h70zRkaS4651dZ2rK8zbaDGvrk9K9vnc022ywvv/xybr/smew+qiEt2y/LVV99JIubluY9ewzKp099T37wn/flsQfn1OioeKv/fdVhtV5CpzX+iPG5/obrc8D+B/z1HHb56+ewI8a3P4cNHtL2HDZwUL4z9TsZNXJU9t7r9Tfs253D+vbLkMFDnMNqqa6u1ivo1LwOg+oxX+u/5qal5Y2oisMO+2RuvvmmfHDfD2XAgAH5/lVX5l3v6p6PffTwdvO1bNnSPD9vdlauXJmVK1emvv5d2WLzAUle/+TxJHn88en56le/ls8e/bl84hOfWuPHw6p17+75ea186pNH5MabbshHPrx/BgzYKldc+frrsE9+4oh2MzZw4OA257ABAwZm6i1TM3LkyLx/j72yYsWK/Pi/ftLm75x9zhez04475eijP5vNNttsTR4af/XSSy/V5H7//Oc/J0n69++/ytu32GKL1v89cODAv7mvhoaGJMnLL7/cQat7U12lUql0+F4BgJpasXxlrZfQaVUqlUyePCk33nRTmpoWZaeddsr5530pDQ0NaWxszEEHH5RLLrkku4/YPYtbFufhh3+b733ve3nkkUfSpUvXjBkzJuefd3569+7dus877rwjkyZdkHnz5mXAgAE57dTTM27cuBoeJdSG+YLq6ej5mjdvXvYb1/43C0866eR84eQvrOnD46+OGXlprZfQqR1y3O4Z89Ed03OD+jz7+wW56j/uz/N/+FPeveWG+fq1n8w3/+UnefqxF7Jp3x456f/slf79+6dHjx5Z2rIic2ctzM3f/U1mPjI/SfIfN4/Pu7fcMEuXtP0a2iv/zy/z4F2zanF4nd4Vvzyu1kvotCqVSiZfODk33XRjmpqastNOO+W8c89//Rw2vzEHH3xQLrn4kowYsXtaWhbn4d89/JZzWJeMGTMm553b9hw27kP7tbufk048KSc7h9VEndiuprwOg+oxX+u/RX9pqfUSOq1KpZJLv3tRpt56S5qamrLDDjtk4hlnZ7vthuaFF+bnk0ccmm9/a3J2Hb5blixdknPPm5hf/vKX7fbz0K8fSZKceNJx+d0jD7f5tPEk+ezRx+aYzx67Ro6J9nr0rK/1EjqtSqWSKVMuzA9vvilNTU3Zccedcs4556ZhaEPmz5+fj3384EyZcnFG7DYiLS0tefSx37V5HTZ69Jicc/a5bc5hb/W5Yz+bXXfdLad84dQ1fGS8oXuP2szXxhtvnEWLFuXGG2/MoYce2u72H/3oR/n4xz+eurq6rFixYhV7eNMtt9ySww47LF27ds2yZcs6dJ1iOwBYD4nt1g2LWxZn5synWi8PG7Z9evboWcMVwfrDfEH1mK91l9hu3bBp3x456rz3tF7+wf9+PH96wQ/p1nZiu3VDS8vizHx6ZuvlYQ3D0sM5bK0ntlt3eJ4I1WO+1k1iu3XDkqVL8tzzb34d5sCttkn3d3Wv4YpYXWK7dUNLS0ue+cPTrZe327ahXbjK2qdWsd22226b2bNn55JLLsnnP//5drf/PbHdNddck6OOOiq9evXKokWLOnSdPlcTAAAAAAAAAACAmtl8880ze/bsPPvss6u8vW/fvtl///1X65eyXnzxxdZ9djSxHQAAAAAAAAAAADWz884756GHHspjjz22ytv32GOP3Hbbbau1r+nTpyd5/dPyOlqXDt8jAAAAAAAAAAAArKbdd989SfLQQw/9w/u6++67U1dXl1133fUf3tfbie0AAAAAAAAAAAComfe9733p3bt3li1blieeeOJ/vJ977rknjY2NSZKRI0d21PJa+RpZAAAAAAAAAAAAambEiBF59dVX/+H9zJo1K0cffXSSZN999/2H9/d2YjsAAAAAAAAAAADWeSeccEJOOOGEqu3f18gCAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAQbdaLwAAAACA6rv03mNrvQRWQ0tLS/7wx6dbL//bFYemR48eNVwRq+Pjg75Z6yWwGjYftEFOnrJn6+VTx34vC+Y21XBFrI5bnzuz1ksAgP+RDTbsXuslsBq6tqxsc7lXr/r06OGxWxfU1dXVegmshq7durS73K1b1xqtBjqGT7YDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAXdar0AAAAAAAAAAAAA+HssW7YsixcvTpL07t17jdynT7YDAAAAAAAAAABgnfJ//+//TZ8+fbLpppuusfv0yXYAAAAAAAAAAACscyqVyhq9P59sBwDQgSqVSiZNnpTRY0ZlxO675ajPHJlZs55+x+0XLVqUyZMn59hjj83o0aNy5sQz8+c//7n19qeeeioTjp+QkaNGZseddsgDDz6wJg4D1kodPV9Jcuedd+aAA/fPrrsNz4EHHZCf/eyuah8GrJXMF1RXpVLJlCmTM3a/fbLH+9+XYz53dGY9M+sdt3/rjH1w7JicffZZbWbsN795KLsM3znv3/N9rX/2G/fBNXEosFYaf8YH8r1HTspNf/jnfH3q+Awattk7btujR49st9122WWXXXLBXZ/Pqf/5kfTc4F2tt4/Yd5t85YZP5uoZp+S6p07Lf9x+VN6337Zr4jBgreR5IlSP+YLqqVQqmTx5UsbsMzq7v29EPnP0UZk1a/Veg40eMzoTz2r/Pv3xJ0zIqNEjs9POO+ZB79PTyflZGJ2d2A4AoANdceUVmTr1lnz30svyq/sfyK677pbjJhyXpqamVW4/ZcqUvPbaa/nWt76VW2/9URYufDlnn3N26+319fXZb+zYXDTlojV1CLDW6uj5emz6YznrixNz2qmn56H//k1OPeW0TDxrYmbMmLGmDgnWGuYLquv7378yt/5oai6ackl+8fP7Mnz48Jx44vFpbm5e5fZvnbGbf3hrFv5pYc7/0rnttrv/vgfz6wd/k18/+Jv87K57qn0YsFb6+In/lLGfek/+1xE35tM7Tcrvf/N8vnzdJ9KjV327bXtsUJ+hQ4emubk506dPz3mfuib9hmyS0y/Yv3WbDTfpkZ9e9UiO3/vSfHrHCzL14t/ki5d+LNvt0ndNHhasNTxPhOoxX1A9V155RaZOnZpLL/lu7r/vV9l1+K6ZcPxxaWpenfm6NQsXLsw557Z9n37s2P0y5ULv00PiZ2GdwYIFCzr0z+rYd999/0d/rrjiinfcxwc/WJ1fTvU1sgCwHlrcsrjWS+i0rrvu2nz600dm4KCBqaSS4447Lj/84U356R2354ADDmyz7ezZs/Poo4/m61//enr37p1evXrltNNOzxFHfCp/fPaP6devX/oP6J/+A/q3/p2lS5d6fOm0Onq+rr32muy1194ZOWpklq9YnpGjRmbPPffKNddenS996X/V6CihNsxX57B82cpaL6HTuv6G6zN+/JEZOHBQkuTYzx2XW265OXfc+dPs/5ED2mw7d+6ct81Yz3zhC6fmqKPGZ/bs2enbt2+WLl2aJGlpaUm3bt7eWxtsPmiDWi+h0zro8yPys+sfTXPz4my8Rffced3v8uEjh2fcUe/Jgz+d2Wbb3cYMSdeuXdPY2Jgk6dY9ueOa3+VfLvhoGnbfMq+8tChP/nZOkqTXxt3Sa+Numfnoc3lh7iv5pw9tk9de+csaPz68x1FrnidC9ZivTmANf6Udb7ru+uvy6SM/nUGDBiZvzNfNP8wdP/1pDjig7Wuw2XPeNl89e+W0U0/LEeOPyLOz/5h+fftlwID+GfC29+lbPEepvbq6Wq+g0/KzsPXfFlts0aH7W52vef35z3+eun9griuVSn7xi1+0ufyP7O9v8W4cAKyHZs58qtZL6JSam5vT2NiYjTbasM1jMHDgwDz44IPZbrvt2mz/29/+NvX19Rk8eHCSZPbsZ5Mk3bp1y7Rp92bEiBHt7uO55+Zmk002ruJRwNqpGvM1ffr0vP/972+zvy222DwPPfSQf0fpVMwXVFdzc3Pmz2/MxhtvlD/88c2vVBk4cGAeeujXGTZsaJvt3z5jzz0/J127vj5jv7xvWkaMGJHG+c8nSQ46eP8sX748AwcOzCGHHJIddthhzR0YbZw8Zc9aL6FT6tKlSzbv3zu77b9Fho1+8zGodF2Wg098b3Y7aNM22/fu3bvNG+1HnD88G220Ubp0qcuJ3/pAXnvttXb3UV9fn622e3d223+LNIzyONeC5w6143kiVI/5gup5c742ysyn3/zli4EDB+bBXz+Y7YYW5mvO7CRvzNe0Vb9P//xz2aTPJlU7Blib+VkY1bY6YV41/u7fQ2wHANBBFi9+/bdsNtig7adabLDBBq23vX37nj17tru+V69eq9weOrNqzNfixYvTq1ev1dofrM/MF1RXNWasf//++frXv56tttoqS5cuzT333JNvfOMb+fd///cMGTKk4w8C1lJdu3ZNkqxYsaLN9cuXL2+97a2ampqyYsWKDBgwII2NjenWrVv69evXZl9v1aVLl2y77bZ55ZVXsmjRoiocAazdPE+E6jFfUD3ep4fqMmNU24YbbpivfOUrGT58+Gpt/4Mf/CBXXHFF6urqcu+991Z3cX8ltgMA6CBvvFhoampqc31TU1P69Omzyu1X9UKiubl5lS88oDOrxnz17Nkzzc3N7fZn/uhszBdUVzVmbJNNNskmm2zSuv2BBx6YRx55JL/+9a/FdnQqb0R2bw/lunXrtso5WrFiRWbNmpWtttoq73nPe7JixYq8+OKL2WijjbJ8+fJ2+9huu+2yZMmSzJ49u2rHAGszzxOheswXVI/36aG6zBjVcvHFF+fMM8/MX/7yl0ycODETJ07Ml770pdTX1//Nv3f//fe3/u/Ro0dXe5lJxHYAsF4aNmz7Wi+h0+rfv38WLWpqfQyWL1+e559/Pocffni7x6V79x75z//8z8ydOzeDBg3KkCFbZ+7cOVm+fHn22Wff1k9YeKuBAwd5fOm0Onq+3vve9+all15q83cXLFiQXXbZxZzR6ZivzmH5spW1XkKn1a9f//z5tUXZdpuGJG/O2CGHHNZ63Rvqu3VvM2MDtxqcOXPmZvny5Rk1cp/07dt3lffRq9cG2WSTTdvtjzXjXz78/VovodP6P1O3ze9+8lLuvnF6kqRL17r852075L8ump4Hfzqzzbab9uuZI84fnlmzZiVJrvvKoxk0tG+O/3L/fPv4aVnctDRJ0meLDXPGpI/mtz+bnau+MS1r6FtoeAcX3HNMrZfQqXmeCNVjvjoBTyJq5vX5WpRhDcOSvGW+Dju89bo3dO/e9jXYkMFDMnfu3L/O1z7p13cV79NvNbDdfqiBurpar6DT8rOw9d9LL720xu9zwoQJ+fCHP5xjjz0299xzT772ta9l6tSpueyyy/L+979/ja/nbxHbAcB6qGcPvwlSK0ccMT5XX311Rn7gAxk4cFAuv/zy1NfX5yMf3r/d4zJkyJAMHz4811xzTU4++eQ0NzfnggsuyD5j9sk2W2+TJKlUKlm6dGnr3+nSpUu61HVJ165d062bp3J0Lh09X+OPGJ+jP3t0fnX//Rk9ekx+8Yuf54EHHshV3/+Bf0fpdMxX57Cs64ryRlTFpz75qVx73dXZa++9M3CrgbnyysvTrVt9Pvyhj6RHjx5tth00aPDbZmxxJl94QUaPHtP6qXW/euBXGTx4cPr3658lS5bkllt+mOnTH8u//ssZ7fbHmrFgblN5I6rix5c9nIOOHZFf/XhWXpjzaj55+p5ZtmRF7vrB42lpXtZu+169eqWlpSUrV67Mu7fcJJ88Ze9c8//uz9zfv5IkGbDtpvnixYfkV7fNzOX/tma+foa/zXOH2vI8EarHfK3/KmK7mjniU0fk6quvzgf2HpmBAwe2zteHP/KR9Hj7fA1+23wtfn2+xozZJ1sPeef36eu8T19zdWK7mvGzsPXfhhtuUN6oCgYNGpSf/exnueSSSzJx4sQ8+eSTGTlyZL7whS/ka1/72lrzaYh1FWd5AFjvrFjuU0tqpVKpZPLkSbnxppvS1LQoO+20U84/70tpaGhIY2NjDjr4oFxyySXZfcTuWdyyOA8//Nt873vfyyOPPJIuXbpmzJgxOf+889O7d+8kybx587LfuLHt7uekk07OF07+wpo+PKipjp6vJLnjzjsyadIFmTdvXgYMGJDTTj0948aNq+FRQm2Yr85h2TKxXa1UKpVMuejC3HzzTWlqasqOO+6Uc84+N0OHNmT+/Pn5+CEHZ8qFF2e33UakpaUlj03/3VtmrEtGjxqTs88+t3XGLrn04txyyw/z6quvpUeP7tluu6GZMOGE7PFPe9T4SDuvw7f5z1ovoVP79JkfyIeO3CW9NuqeWY+9kIvPvitznno5mw/YKBf+4vP5t0/flCf/+/lsPmiDfOPHn8imm26aLl26ZP7sV3LLhQ/lrmunt+7rtG/tn7Gfek/rp9y94ec3P5EpZ921pg+NJLc+d2atl9CpeZ4I1WO+1n9+DF87lUolky+cnJtuujFNTU3Zaaedct65578+X/Mbc/DBB+WSiy/JiBG7p6VlcR7+3cNtXoONGTMm553b9n36cR/ar939nHTiSTnZ+/Q1I7arHT8LW/917dal1kvInDlz8rnPfS7Tpk1LXV1dtt5661x66aXZd99922z31a9+Neeff37q6uqyYsWaef9TbAcA6yGx3bphccvizJz5VOvlYcO291um0EHMF1SP+Vp3ie3WDS0tLfnDH59uvbztNg0+rW4dILZbN2w+aIOcPGXP1ssXnvSgTyVcB4jt1h2eJ0L1mK91kx/DrxtaWhZn5tMzWy8PaxjW7tPvWDuJ7dYNzmHrprUhtnvDRRddlLPOOiuLFi1KXV1djj322Hzzm99sjTVrEdutPf91AAAAAAAAAAAAIMmJJ56Y6dOnZ8yYMalUKrn88suz44475rbbbqvZmsR2AAAAAAAAAAAArHWGDBmSe++9N5MmTUqvXr3S2NiYj370ozniiCOyYMGCNb4esR0AAAAAAAAAAABrrZNPPjnTp0/P6NGjU6lUcuONN2bSpElrfB1iOwAAAAAAAAAAANZqW2+9daZNm5bvfOc76dmzZyqVyhpfQ7c1fo8AAAAAAAAAAADwP3DKKafkgAMOyNVXX73GgzuxHQAAAAAAAAAAAOuMbbbZJl/60pfW+P36GlkAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAo6FbrBQAAAABQffX1XWu9BFbD8hVtfze2W30Xj9064Nbnzqz1ElgNi1sWZ+bMp1ovX3DPMenZo2cNV8TqGLfBV2q9BFbTlltvmH+9Zkzr5Qm7X5wXn11UuwWxWu5qOr/WSwAAgHWKT7YDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAALBOaWlpyQsvvJAlS5assfsU2wEAdKBKpZJJkydl9JhRGbH7bjnqM0dm1qyn33H7RYsWZfLkyTn22GMzevSonDnxzPz5z39us82dd96ZAw7cP7vuNjwHHnRAfvazu6p9GLBWMl9QPeYLqsuMQfWYL6i+o88fkxue/Zfc9qez8627P5shO27+jtv26NEj2223XXbZZZdc+uuTcsYlB6fnhu9qs82hp74/33/iC/nxy1/MNTNPy5Fnj6r2IcBayTkMqqdSqWTy5EkZs8/o7P6+EfnM0Udl1qxZ77h9m/kaMzoTz2o7X0899VSOP2FCRo0emZ123jEPPvjAmjgMWGs5h1Fr8+fPz+mnn55tttkmG2ywQQYMGJBevXplyJAhOfXUU/P8889X9f7FdgAAHeiKK6/I1Km35LuXXpZf3f9Adt11txw34bg0NTWtcvspU6bktddey7e+9a3ceuuPsnDhyzn7nLNbb39s+mM564sTc9qpp+eh//5NTj3ltEw8a2JmzJixpg4J1hrmC6rHfEF1mTGoHvMF1fWJf9krHz56eL544NU5pP//y4wHn8v/ue3I9Nigvt22PTd4V4YOHZrm5uZMnz49ZxzwvfTfpk/OuvxjrdvseUBDPv+VD+Y/TvhxDtrsGzn349fmYyf9Uw44drc1eFSwdnAOg+q58sorMnXq1Fx6yXdz/32/yq7Dd82E449LU/PqzNetWbhwYc459835qq+vz9ix+2XKhRetqUOAtZpzGNUyZcqUTJkyJXPmzHnHbX70ox+loaEhkyZNypw5c1KpVFr/PPfcc7nwwgszbNiw3HzzzVVbp9gOAKADXX/9dTnms8ekoaEhPXr0yKmnnJply5blnnvubrft/PmNefTRR3PkkUemd+/e6dOnTyaeeVamTbs3jY2NSZIbbrghI0eOzLhx41JfX59x48blAx/4QK6/4bo1fWhQc+YLqsd8QXWZMage8wXVdfDxu+embz+YZ594KUtblufKf7s33d7VNR/46A7ttm3YrX+6du2axsbGVCqV/Hlhc67++i+z98HbZ/OteidJ+m+7aeY8tSDT73v9h2ezn1yQx++fk+2G91ujxwVrA+cwqJ7rb7g+n33LfJ3yxnzdfU+7befPn992vjbpkzPPmJhp06alcf7r87Xtttvm8MMOz84777ymDwXWSs5h678FCxZ06J/V9YUvfCGnnHJKHnvssVXePm3atBx66KFpampqDeze6o3rFi9enE996lO59957/6H/Du+kW1X2CgDU1OKWxbVeQqf0l7/8JfPmzUvDsIY2j0FDw7A8PuPx7DduXJvtZ8x4IvX19Rk8eHCSZMmSJRk8ZHDq6+sz/fHp6bNpn/z+909m7Nj92uxv++23z9133+NxplMxX1A95guqy4xB9ZivzmHLrTes9RI6rZ4bviv9hvTJS/P+1OZxmPv0guwyemAef/CPbbbvvVmP1NXVtV5+94Be2bR/r3TpUpfdxw3J76b9MY8/+MccdNyI7HvEjnnyv+dm0LDN896Rg3Px2T/1WNeIf9tqwzmsk3hbAMCa8ZdFr8/XsGENaWkzXw2ZMePxjBu3X5vtZzwxo918DfnrfD0+fXo27dOn3X0sXbq0zb6pkbc872DNcQ7rHLbYYosO3d/bo7j/iWXLluXoo4/OypUrkyTDhg3Leeedl3333Tebb755Xn755dx777356le/mt///vdZsWJFjj322DzzzDPp2rXrP3z/byW2A4D10MyZT9V6CZ3SwoULkyQvv/xym8egri6ZN29eu8dlzpzZ6dmzZ+vl2bOfTZL07NkzzzwzK337bplXXnklzc1Nbf5uc3NzXn31FY8znYr5guoxX1BdZgyqx3x1Dv96zZhaL6HTqq9//atiP3H+LmlpGdZ6/VZb907/YRtmy/e0/QF3165ds2LFigwYMCCNjY057lt7Z+utt06SHDpxePb5/KAkSZcNluTsKw5tDfNeeOGF7Hfi1tnvxK3XxGHxNv5tqw3nMKie1vla+HJmPj2z9fq6urrMa5zX5rokmTNnTtv5mjM7yV/n6w/PpG+/vu3u47nnn8smfTbp+MXDOsA5jFq59tpr8/zzz6euri6jR4/Obbfdll69erXe3rdv34wfPz4f//jHc+CBB2batGmZO3dubrnllhx++OEduhZfIwsA0EHeeLHQ1NTU5vqmpqY2LyTeuv3ixe1/I6e5ubl1+549e6a5uXm19gfrM/MF1WO+oLrMGFSP+YLqWrFiRZK0+xSIbt26td729u1nzZqVXr165T3veU8aGhrypz/9KUmyfPnyJEm/fv2y2Wab5amnnsrvfve7zJgxI717986AAQOqfDSwdnEOg+qpxnwBb3IOo1Z+8pOfJHn9/y/XXXddm9DurXr27Jlrr7229fbbbrutw9citgMA6CC9evXK5ptvnj/+8c2vUVmxYkXmzJmTIUOGtNt+8ODBWbZsWebOndt63dy5c7N8+fLWj9MePHhwm/0lybPPPrvK/cH6zHxB9ZgvqC4zBtVjvqC6Vq5cmSVLlrT7Idaqfhj6hsWLF2fWrFmZPn16nnjiiSxbtiwrVqzIokWLkrw+t6+++mrrD1yXLl2ahQsXZuONN67uwcBaxjkMqqca8wW8yTmMWnn44YdTV1eXQw89NFtuueXf3HbLLbfMYYcdlkqlkt/+9rcdvhZfIwsA66Fhw7av9RI6rfHjx+f662/IAQccmK222iqXX35ZunfvnvHjP93uzekhQ7bO8OHDc8011+Tkk0/OoEGD853vXJBRo0Zl770/kCT53Oc+lwkTJmTevMaMHDky9913X6ZPn57vfvcyjzOdjvmC6jFfUF1mDKrHfK3/Jux+ca2X0Kkd+LlFGXfkrrnh3+7Ni3Nfy8dP3CNbb9WSLx/64yxpXtZm23cP6JWTJo9JS0tLVq5cmft+8FzGn7FdbviP+3PbFa//gOvAY5sy9ohdcv3/ejzznlmYd/fbKKd9+6A8/OizmTLx5zU4Qi797Qm1XkKn5RzWCVQqtV5BpzX+iPG5/obrc8D+B/x1vi5/fb6OGN9+vgYPaTtfAwflO1O/k1EjR2XvvfZOklQqlSxdurT17/Tr2y9DBg9J165d062b5KJm6urK21AVzmHrv5deeqnWS2hnwYIFSZI999xztbbfc889c9VVV+WFF17o8LXUVSrO8gCwvlmxfGWtl9BpVSqVTJ48KTfedFOamhZlp512yvnnfSkNDQ1pbGzMQQcflEsuuSS7j9g9i1sW5+GHf5vvfe97eeSRR9KlS9eMGTMm5593fnr37t26zzvuvCOTJl2QefPmZcCAATnt1NMzbty4Gh4l1Ib5guoxX1BdZgyqx3yt/8Zt8JVaL6HT++yXxuSAY0ekV+/uefrhxlxw2u159omXssXA3rni0ZNz9sHX5PFfzc2WW2+Yb//yqGy66abp0qVLGv/wp9zwHw/k9isfad1Xly51+cz5Y/LBI96TPptvkKY/L8lv7nwmF3/xrix6taWGR9l53dV0fq2X0Gk5h63//Bi+diqVSiZfODk33XRjmpqastNOO+W8c89/fb7mN+bggw/KJRdfkhEjdk9Ly+I8/LuH3zJfXTJmzJicd+6b8zVv3ryM+9B+7e7npBNPysknf2FNHx5/VSe2qxnnsPVf1261+aLULl26pK6uLlOnTs3BBx/c5raePXtm6dKlufHGG3PooYcW93XzzTfn8MMPT319fZYsWdKh6xTbAcB6SGy3bljcsjgzZz7VennYsO3Ts0fPGq4I1h/mC6rHfEF1mTGoHvO1bhLbrTu23HrD/Os1Y1ov/8enf54Xn11UuwWxWsR26wbnsHWTH8OvG1paFmfm0zNbLw9rGJYe5mudILZbNziHrZvWxthu6623zty5c/ODH/wg48ePL+7r2muvzZFHHpk+ffpk4cKFHbpOn2kKAAAAAAAAAABAzb300kuZO3dum+saGhoyd+7czJ49e7X28cbXx2622WYdvTyxHQAAAAAAAAAAALV3/PHHv+NtDz300GrtY/r06UmSvn37dsia3qo2n/sHAAAAAAAAAAAAf1WpVP7mn3vvvTfNzc3Ffdx1112pq6vLbrvt1uFr9Ml2AAAAAAAAAAAA1MzRRx+9Wts99dRTfzOiu/POO/PCCy+krq4ue+yxR0ctr5XYDgAAAAAAAAAAgJq58sorO2Q/L730Uk477bQkyZgxYzpkn28ltgMAAAAAAAAAAGCd95nPfCaf+cxnqrb/LlXbMwAAAAAAAAAAAKwnxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKxHYAAAAAAAAAAABQILYDAAAAAAAAAACAArEdAAAAAAAAAAAAFIjtAAAAAAAAAAAAoEBsBwAAAAAAAAAAAAViOwAAAAAAAAAAACgQ2wEAAAAAAAAAAECB2A4AAAAAAAAAAAAKutV6AQAAAAAAAKtyx1/Oq/USWE0tLYvz9KyZrZcveej49OjRs4YrYnWM6/XlWi+B1bDl1hvmX6/dp/XyhBEX5cVnF9VwRayOu5q/VOslsDrq6tpdrnv7dQDwFj7ZDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAA/H/27j3M6rre//5rOA8KbjyggAomDpipqKSmoniidh7a7VJTME9Bns3KU261e3f3syxzp2bhuRQ8VVaaph08pukGUcQD4lakGBVEPDDDmXX/4Y/RCexj957FQubxuK65rtZan/nO59vqzcysnvNdAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAo6FTrDQAAAAAAAAAAAMAHcc899+T3v/99nn/++bz11ltZZ511MmjQoHz605/OzjvvXNWvLbYDAAAAAAAAAACgZubNm5fHHnssSbL99ttn7bXXXmHNE088kSOPPDKTJ09e6TG+/e1vZ4899sg111yT/v37V2Wf3kYWAKANVSqVXHLpJdlj+O7ZYej2OfyLozJt2nPvu37evHm59NJLc8wxx2SPPXbPaaeflrfeeqvVmrvuuiv77f/pbLf9kOx/wH75/e/vrvZpwGrJfEH1mC+oLjMG1WO+oLoqlUou/dEl2XOvPfLxHXfIEUccnmnTpr3v+vfO2PA998gZZ7SesV//5tcZdfjI7LLrztl1t0/kyKOOyGOTHlsVpwKrpSPOHZ6bpn8tt8/9Ri7641EZsFXv913brVu3DBw4MNtuu20uf+SEfP3yz6R+7S6t1nzu5J3z06dOym1zzsq4576SUd/YvdqnAKslPyNCdZkxquUXv/hFhg8fnn333TeVSmWFx//85z9nl112yeTJk1OpVN734957782OO+74D393+d8Q2wEAtKGrr7k6t976y1xx+ZX584MPZbvtts/oMaPT1NS00vWXXXZZ3nzzzVx00UX51a9+nTlzXstZ3zir5fEnJj+RM848Paec/JU8+sh/5+STTsnpZ5yeKVOmrKpTgtWG+YLqMV9QXWYMqsd8QXVdc+3VufXWWzP2J1fkgfv/nCHbbZcvHzs6zc0fYMZu/VXmzJmTs89+d8aam5py3LHH5fd3/zH33nN/9t5r7xx77Ji88sorq+qUYLVx8Fd3yaeO2C5n7n9d/r3PBZny8Ix89/ZR6bZWlxXW1q/VJVtssUWam5szefLkfP3T16TvR3rljKs/27LmE/s15Ev/7z658Njf5ID1zs/Znx2ffzt+p+x3zA6r8rRgteBnRKguM7bmmz17dpt+fFD33ntvkmTYsGHp0aNHq8eamppy0EEHZf78+alUKunWrVsOOeSQfO9738sVV1yR733veznkkEPSrVu3lnP47Gc/m2XLlrXZfy/LeRtZAFgDzV8wv9ZbaLduuGF8Ro4clU023SSVVDJ69Oj8/Oe35M7f3ZH99tu/1drp06fn8ccfz/nnn5+ePXume/fuOeWUr+TQQ7+QF158IX369Mn48eOyyy67Ztjuw7Jk6ZIM231YPvGJXTJu/PU599zzanSWUBvmC6rHfEF1mTGoHvO15qssW/FqBqw6N954Q0aNHJlNN90kSSWjvzQ6v/jFz3Pn7+7Mfp/er9Xal15qPWP19d1z8smn5LCRh2b69Bey0UZ98tnPvhsGLV26JAcddFB+dNmlmTRpYvbcc69VfHYkyYabrfjWXKwanz1xp/zupxPT3NScXn265Y5rH83+X9oh+40ekgd//XSrtUNHfCQdO3ZMY2NjkqRzt+S3Vz+aM68+KFt+ok9ef+XtDPp4nzS+MCev/m1ONtxs7cxvbs5zj83MNrtvkgl/mlqLU2z3vE5fO35GhOoyY2u+3r3f/2q7/3+s7Cp1K/PYY4+lrq4uu+++4tV5x44dm1deeSV1dXXZddddc9NNN6VPnz4rrGtsbMwXvvCFPPjgg3nmmWcyfvz4jBo16n99Du8ltgOANdDUqc/WegvtUnNzcxobG9Ojx9qtnoNNNtkkDz/8cAYOHNhq/YQJE9K5c+f0798/STJ9+otJkk6dOuWee/6UHXbYIZMnT87OO+/c6ni9e2+QRx991PNMu2K+oHrMF1SXGYPqMV9QXS0z1rNHnpv2bqizySab5C9/eThbbPGPZ+ylGdOTuv87Y/fekx12WPHqWs8//3yam5vTqXOnVl+DVedr4/es9RbapQ4dOqT3xutkx8/1zVafWufdBzotyee+tkN2OmTDVut79uyZurq6lttHfm+n9OjRIx061OUrV+2dN998M506dcqGm/XMN287MG+//Xbq6+uzxRabZfr06Z7nGvGzQ234GRGqy4xRTa+++mqS5CMf+cgKj91+++1Jkr59++aOO+7I2muv/I9G+vbtm9/+9rfZcsst8/LLL+eWW25p89jO28gCALSR+fPf+UvFtdZaq9X9a621Vstjf7++vr5+hfu7d+/esn7+/Pnp3r37BzoerMnMF1SP+YLqMmNQPeYLqqsaM/Zec+bMycUXX5wDDzwwG2644QqPw5qsY8eOSZKlS5e2un/JkiUtj71XU1NTli5dmn79+qWuri6dO3duuZLL8vVLlizJ66+/noEDB2b77bfPlltumddeey1vvfVWlc8GVi9+RoTqMmNU09y5c5Nkhf89JMnTTz+durq6HH300e8b2i3Xo0ePHHPMMalUKpk4cWKb71NsBwDQRpb/stDU1NTq/qamppX+IlFfX7/SXxSam5tb1tfX16e5ufkDHQ/WZOYLqsd8QXWZMage8wXVVY0ZW+6VV17Jf/7nf2bnnXfOwQcf3Ia7hg+H5ZHd34d1nTp1WiHAW75+2rRp6d69e7beeus0NDTk9ddfT/JOZJckffr0yfrrr59nn302jz32WKZMmZKePXumX79+VT4bWL34GRGqy4xRTeuuu26Sd6O793rjjTeSJFtttdUHOtbydXPmzGmbzb2Ht5EFgDXQoEGDa72Fdqtv376ZN6+p5TlYsmRJ/va3v+Wggw5a4Xnp2rVbfvCDH2TGjBnZdNNNM2DAZpkx46UsWbIke+65V/r06ZNtttkms2bNavW5s2fPzrbbbut5pt0xX1A95guqy4xB9ZivNV9lWaXWW2jX+vbtm3lvz0vDFoOSvDtjn//8QS33Lde1S9dWM9Z/0wGZMWPGOzM2fM9stNE7V+GaNm1avv3tb+egzx+UL31p9Co/J1r78sd/UusttFv/9ceP5JGfz8xd101KknToWJcfPbBlfnHhxDz466dbrV2vX/cc+b2dMm3atCTJtac9kgGD++SE7/fNd75wZ+bPW5Sv//izmfy3KfnZt+9p+bxPHr5d9jl0SC487J6w6l0+8bhab6Hd8jMiVJcZW/PNmjWrJl93k002yaxZs/LYY4/lmGOOafXY+uuvn5dffvkDX/FwwYIFSVZ+lbz/LbEdAKyB6rv5S49aOfTQw3L99ddn2G67ZZNNNs1VV12Vzp07518/9ekVnpcBAwZkyJAhGTduXE444YQ0Nzfn4osvzp7D98xHNvtIkuSwQw/LEUcekT8/+GD22GN47rvv3jz00EP52U+v8zzT7pgvqB7zBdVlxqB6zNeab5nYrqa+8IVDc/2467PrrsOyySab5Kqrl8/Yv6bb381E//6tZ2z+/OZcfMnFGT58zwwY8M6MTXp8Uk444bgcd+zxOfzwL9bilPg7r744r9ZbaLduvfSR/NvxO+X+X0xN4/+8nlHf2D2LFyzJb694PAuaFq2wvnv37lmwYEGWLVuW9TfqlcNOG56f/uc9mf7kO1e4m/D7F3LAmKHpVv+XvPT07PTeZJ3sOGJQnnlkpue5RvzsUDt+RoTqMmNrvrXXXqu8qAo++clPZsKECbn11ltz4YUXplu3bi2PDR06NLfddlseeOCBHHHEEcVj3XfffUmSjTfeuM33WVepVPymCgBrmKVLltV6C+1WpVLJpZdekptvuSVNTfOy1VZb5Zz/ODcNDQ1pbGzMAQcekLFjx2boDkMzf8H8TJw4Iddee20mTZqUDh06Zvjw4TnnP85Jz549W475u7t+l0suuTgzZ85Mv379csrJX8mIESNqeJZQG+YLqsd8QXWZMage87XmE9vVVqVSyY9+dGlu+fnNaWpqylYf3Spnn31OGhoa8vLLjTnwMwfkJz8emx12GJoFC+bnsUkT3zNjHTJ8j+E5++x3Z+yoo4/MhAn/3er/NEuS0aPHZMzoL9fiFNu9T639rVpvoV078rw9s98xO6R7z655bmJjLj7lt3lxyqz03mSdXP3ECTnrgOvz5J9nZMPN1s5/PfDFrLvuuunQoUMa/+f13PT9h3LHNY+1HKtDh7p88dzh2fsL26RX77XS9NbC/Pdd0/KTM+7OvDcW1PAs26+7m8+t9RbaLT8jQnWZsTVfx04davJ1n3nmmWy99dapVCo5+eSTc9FFF7U8dtttt+Uzn/lMunTpkoceeijbb7/9+x5n4sSJ2WWXXbJkyZJ87WtfywUXXNCm+xTbAcAaSGz34TB/wfxMnfpsy+1Bgwb7Cx1oI+YLqsd8QXWZMage8/XhJLb78FiwYH6emza15XbDFoNWuPodqx+x3YfDhputna+N37Pl9oWH3eNqdR8CYrsPBz8jQnWZsQ+nWsV2SXLcccdl7Nixqaury5gxY3LBBRekR48eSZJDDjkkt9xyS3r16pXvf//7GTVqVDp37tzyuYsXL851112X0047LXPnzk3Xrl3zxBNPpKGhoU336G1kAQAAAAAAAAAAqKkLL7wwkydPzsMPP5zLL78848aNy2c+85nsvPPOOeiggzJ58uRMnTo1X/rSl3LKKadk6623zjrrrJM333wzTz75ZJqamlKpVFJXV5dzzz23zUO7RGwHAAAAAAAAAABAjXXv3j133nlnRo0aldtvvz3z5s3L+PHjM378+JY1dXV1qVQqmTdvXv7yl7+03L/8zV3r6upy3nnn5ayzzqrKHmt33T8AAAAAAAAAAAD4v3r27Jnf/OY3+dnPfpatttoqlUql1UfyTlCXpNX9HTp0yD777JNHH300555bvbdzd2U7AAAAAAAAAAAAVhujRo3KqFGj8pe//CX33XdfHn300bz88suZO3duKpVKevTokV69emXw4MEZMmRIPv3pT2ejjTaq+r7EdgAAAAAAAAAAAKx2dt555+y888613kYLbyMLAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABZ1qvQEAAAAAAICV6dChrtZb4AOq+7vnqq5DnefvQ+Du5nNrvQU+gPkL5mfq1Gdbbl8+8bjUd6uv4Y74IEas9a1ab4EPYMPN1s7Xxg1vuT1m6E/y6ovzarchPrC7m86p9RaAdsqV7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAoGZmzJiRBQsW1HobRWI7AIA2VKlUcsmll2SP4btnh6Hb5/Avjsq0ac+97/p58+bl0ksvzTHHHJM99tg9p51+Wt56661Wa+66667st/+ns932Q7L/Afvl97+/u9qnAasl8wXV09bz9eyzz2bMl8dk2O7D8tGttsxDDz+0Kk4DVlu+h0H1mC+oLjMG1WO+oLqOOGd4bnrxq7n99bNy0R+OzICPbvC+a7t165aBAwdm2223zeV/OT5fH3tg6tfu0mrN507eOT996sTc9tqZGTf1lIw6a/dqnwKstryWSLUMGDAgG220UY4//vhMmjSp1tt5X2I7AIA2dPU1V+fWW3+ZKy6/Mn9+8KFst932GT1mdJqamla6/rLLLsubb76Ziy66KL/61a8zZ85rOesbZ7U8/sTkJ3LGmafnlJO/kkcf+e+cfNIpOf2M0zNlypRVdUqw2jBfUD1tPV+dO3fOvvvskx9f9uNVdQqwWvM9DKrHfEF1mTGoHvMF1XPwV3fJp44YkjP3vz7/3vd7mfLwX/Pd20el21qdV1hbv1aXbLHFFmlubs7kyZPz9f2uTd+P9MoZV/1by5pP7NeQL31r71x47G05YP3v5OzPjs+/Hb9j9jtm+1V4VrD68Foi1fT2229n7NixGTp0aIYOHZorrrgi8+bNq/W2WhHbAQC0oRtvvCFHHXlUGhoa0q1bt5x80slZvHhx/vjHP6yw9uWXG/P4449n1KhR6dmzZ3r16pXTTzsj99zzpzQ2NiZJbrrppgwbNiwjRoxI586dM2LEiOy222658aYbVvWpQc2ZL6ietp6vzTffPAcddHA+9rGPrepTgdWS72FQPeYLqsuMQfWYL6ieA788NLf818N58alZWbRgSa755p/SqUvH7PaZLVdY27B933Ts2DGNjY2pVCp5a05zrj///ux64OBssHHPJEnfzdfNS8/OzuQHXkqSTH96dp588KUMHNJnlZ4XrC68lrjmmz17dpt+/LMqlUoqlUomTZqUY489Nn369Mno0aPz6KOPVuFs/3mdar0BAKDtzV8wv9ZbaJfefvvtzJw5Mw2DGlo9Bw0Ng/LklCez74gRrdZPmfJUOnfunP79+ydJFi5cmP4D+qdz586Z/OTk9Fq3V5555unss8++rY43ePDg/OEPf/Q8066YL6ieaszX31u0aJG5ot3yPQyqx3xBdZkxqB7z1T5suNnatd5Cu1S/dpf0GdArs2a+3uo5mPHc7Gy7xyZ58uEXWq3vuX631NXVtdxer1/3rNu3ezp0qMvQEQPy2D0v5MmHX8gBo3fIXod+NE8/MiObDtog2wzrn5+cdafnuYb821YbXktsH3r37t2mx6tUKv/U+u233z5PPfVUFi5cmCRpamrK1Vdfnauvvjof+9jH8uUvfzkjR47MOuus06b7/KDEdgCwBpo69dlab6FdmjNnTpLktddea/Uc1NUlM2fOXOF5eeml6amvr2+5PX36i0mS+vr6PP/8tGy00YaZO3dumpubWn1uc3Nz3nhjrueZdsV8QfVUY77+3l//OiP/8i+1eeEDas33MKge8wXVZcagesxX+/C1ccNrvYV2qXPnd94q9uBzts2CBYNa7t94s57pO2jtbLh1Xav1HTt2zNKlS9OvX780NjZm9EW7ZrPNNkuSfO70IdnzS5smSTqstTBnXf25ljDvlVdeyb7HbZZ9j9tsVZwWK+HfttrwWiKrwrnnnpvddtstP/3pT3PVVVfl6aefbgn2pkyZkpNOOimnnXZaDjrooIwePTq77rrrKt2ft5EFAGgjy39ZaGpqanV/U1NTq18k3rt+/vwV/zKnubm5ZX19fX2am5s/0PFgTWa+oHqqMV/Au3wPg+oxX1BdZgyqx3xB9SxdujTJOxHde3Xq1Knlsb9fP23atHTv3j1bb711Ghoa8vrrrydJlixZkiTp06dP1l9//Tz77LN57LHHMmXKlPTs2TP9+vWr8tnA6sdriawq6667bk499dRMmTIlDz74YL74xS+me/fuLW8xO3/+/Fx33XXZfffds9VWW+WHP/xhy7/f1Sa2AwBoI927d88GG2yQF1549zL0S5cuzUsvvZQBAwassL5///5ZvHhxZsyY0XLfjBkzsmTJkpbLaffv37/V8ZLkxRdfXOnxYE1mvqB6qjFfwLt8D4PqMV9QXWYMqsd8QfUsW7YsCxcuTPfu3Vvdv7Igdbn58+dn2rRpmTx5cp566qksXrw4S5cuzbx585K8M7NvvPFGSzC0aNGizJkzp2ZvXwi15LVEamGXXXbJtddem8bGxlx66aUZMmRIkrSEd88++2y++tWvpl+/fhk5cmTuvffequ7H28gCwBpo0KDBtd5Cu3XYYYflxhtvyn777Z+NN944V111Zbp27ZrDDhu5wi/3AwZsliFDhmTcuHE54YQTsumm/fPDH16c3XffPbvuuluS5Oijj86YMWMyc2Zjhg0blgceeCCTJ0/OFVdc6Xmm3TFfUD1tPV+VSiWLFi1q+Zw+ffpmwIDN0rFjx3Tq5KUI2h/fw6B6zBdUlxmD6jFfa74xQ39S6y20W/sfPS8jRm2Xm775p7w648189ridstnGC/Kfn7stC5sXt1q7Xr/uOf7S4VmwYEGWLVuWB677aw77+sDcdOGDuf3qCe8c75im7HPotrnxvCcz8/k5Wa9Pj5zyXwdk4uMv5rLT763BGZIkl084ttZbaLe8lrjmmzVrVq23sFI9e/bM8ccfn+OPPz4TJ07M5ZdfnhtvvDFvv/12kmThwoW58cYbc+ONN2bgwIEZPXp0jjjiiGywwQZtuo+6yvI3tQUA1hhLlyyr9RbarUqlkksvvSQ333JLmprmZauttso5/3FuGhoa0tjYmAMOPCBjx47N0B2GZv6C+Zk4cUKuvfbaTJo0KR06dMzw4cNzzn+ck549e7Yc83d3/S6XXHJxZs6cmX79+uWUk7+SESNG1PAsoTbMF1RPW8/XzJkzs++IfVb4Oscff0JOPOHEVX16UHO+h0H1mC+oLjMG1WO+1nwj1vpWrbfQrh157vDsd8wO6d6za56b2JiLT7kjLz41K7036ZmrHz8hZx04Lk/+eUY23Gzt/Nf9h2fddddNhw4d0vg/r+emCx/KHddMajlWhw51+eI5w7P3oVun1wZrpemthfnvu57PT868O/PeWFDDs2zf7m46p9ZbaLe8lrjm69ipNm+U2qFDh9TV1eXWW2/NgQce+IE+p7m5OTfccEOuvPLKPPLIIy3319XVJUm6dOmy0rcy/t8Q2wHAGkhs9+Ewf8H8TJ36bMvtQYMGp75bfQ13BGsO8wXVY76guswYVI/5guoyY1A95uvDSWz34bDhZmvna+OGt9y+cOS9efXFebXbEB+Y2O7DwfewD6cPU2z3XlOmTMnll1+ecePGZe7cuUneie6WLl3atvts06MBAAAAAAAAAADAKvSxj30sF198cRobG3Pddddl2LBhVfk6YjsAAAAAAAAAAAA+9Lp27ZqRI0fmvvvuy7PPPlv+hH+S2A4AAAAAAAAAAIA1yhZbbNHmxxTbAQAAAAAAAAAAQEGnWm8AAAAAAAAAAACA9uu8885LkgwePLjGO/nHxHYAAAAAAAAAAADUzPLYbnXnbWQBAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgoFOtNwAAAAAAAAAAa5KbZ3691lvgA1i0aGEaX36x5faFfzoqXbp0reGO+KBGDb2s1lvgA1ivb32O+taQlttnHnxj5jTOr92G+EBuePzEWm9htebKdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAADxqlMAAC1YSURBVAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABZ1qvQEAAAAAAAAAAABYrrm5OfPmzUvv3r1X+vgbb7yRa665Jg8//HDefvvtrL/++tlpp51y8MEHv+/ntAVXtgMAaEOVSiWXXHpJ9hi+e3YYun0O/+KoTJv23PuunzdvXi699NIcc8wx2WOP3XPa6aflrbfearXmrrvuyn77fzrbbT8k+x+wX37/+7urfRqwWjJfUD3mC6rLjEH1mC+oLjMG1WO+oHoqlUquvPLHOfDAEdlr711y3PHH5H9eeP591998880566yzcvjhh+fEk778D499083js8uu22fs5T9q623Dh8rnj9sxl919VK55+Ms596rPZuPN133ftd27d09DQ0O23XbbfOeGkfnqD/416/fp0fL47gcOzrjHTsg1D41p+fh/fvq5VXEarKb+9Kc/Zccdd0yPHj3Sp0+fbLjhhrngggtarbnvvvvS0NCQr3/96/nFL36Ru+++O+PHj88pp5ySgQMH5vLLL6/a/sR2AABt6Oprrs6tt/4yV1x+Zf784EPZbrvtM3rM6DQ1Na10/WWXXZY333wzF110UX71q19nzpzXctY3zmp5/InJT+SMM0/PKSd/JY8+8t85+aRTcvoZp2fKlCmr6pRgtWG+oHrMF1SXGYPqMV9QXWYMqsd8QfWMH/+z3P7b3+Sii36UO+/4U7bZekhOPfWENDc3r3R979698/nPfz577bXXPzzuSy9Nzy233JDNNx9YjW3Dh8b+R2yX4Z/ZMucf/5uMGX5lnnv8lZz14wPTtb7zCmvr6pKBAwemubk5kydPznlH35ylS5blpO+MaLVu7uymHLXL5S0f5x3xi1V1OqxmfvWrX+WTn/xkJk6cmEqlkkqlktmzZ+ess87K2WefnSR5/vnn8+///u+ZM2dOy5r3fsybNy/HHXdcrrrqqqrsUWwHANCGbrzxhhx15FFpaGhIt27dcvJJJ2fx4sX54x//sMLal19uzOOPP55Ro0alZ8+e6dWrV04/7Yzcc8+f0tjYmCS56aabMmzYsIwYMSKdO3fOiBEjsttuu+XGm25Y1acGNWe+oHrMF1SXGYPqMV9QXWYMqsd8QfX88tZbcuihh2fzzbdI167dMnr0cVmyZHHuu/9PK10/fPjw7LDDDunRo8dKH0+SpUuX5lv/77k56aSvpmfPdaq1dfhQ2PfgrXP7zyblr8/PyeKFS3PzZX9Jp04d8/G9PrLC2m5rdUnnzp1boqiF8xfn/tumZsCgDWqwcz6o2bNnt+nHB/XGG29k9OjRWbp0aSqVSjp16pSNNtoonTp1SqVSyXe/+9088cQTOeOMMzJ37txUKpXsvffeueCCCzJ27Nh84xvfyOabb57knaucnnrqqf/U1/+gOrX5EQGAmpu/YH6tt9Auvf3225k5c2YaBjW0eg4aGgblySlPZt8Rrf9KZ8qUp9K5c+f0798/SbJw4cL0H9A/nTt3zuQnJ6fXur3yzDNPZ5999m11vMGDB+cPf/ij55l2xXxB9ZgvqC4zBtVjvqC6zBhUj/lqHxYtWljrLbRL8+bNy8svN6Zhi4ZWz8HAgQ159pmns/de+7Zav3jxola3K5XKSp+7n113Tfps1Cef2HmX3Hjj9Vm6dKnnuMbW61tf6y20S926d07vfj3z2itvtnoOZk5/PVsO7ZNnJs1otb5r9w6ZNWtW1l9//fztb3/LRgN6ZN9DtsoTD09v+fy1/6VL1lm3Pj/541FJkhnTXsttP5uYxulzV92J0Urv3r3b9HiVSuUDrbvuuusyZ86c1NXV5eCDD85ll12WXr16Ze7cuTn22GPz85//PN///vdz++23p2PHjrn++utzyCGHtDrGN7/5zRx11FEZN25cmpqacu211+a0005r0/MR2wHAGmjq1GdrvYV2ac6cOUmS1157rdVzUFeXzJw5c4Xn5aWXpqe+/t1fRKZPfzFJUl9fn+efn5aNNtowc+fOTXNzU6vPbW5uzhtvzPU8066YL6ge8wXVZcageswXVJcZg+oxX1A9y+eref6baXz5xZb7O3XqkNmvvdLqvpVZvHjhCmumT5+eX//6F/k//+f/pPHlF7No0YLMm/dG8VhU11HfGlLrLbRLnTu/81axnzrmI1mwoG/L/RsOqM8Gm3bNOpsOWeFz5s6dm0033TTbbbddtvvRdpk/f36mTZvW8hx26dIlz059JgsXLkzHjh3TZ7M+Of3iA/P0009n8eLFq+K0+Ds/urM2X/eOO+5IkvTp0yc/+9nPWv731qtXr1x33XV58MEHM378+CTJCSecsEJolySdOnXK1VdfnYceeijTp0/PH/7whzaP7byNLABAG1n+gldTU1Or+5uamlq9GPbe9fPnr/hXpc3NzS3r6+vr09zc/IGOB2sy8wXVY76guswYVI/5guoyY1A95guq55+dr5IlS5bkxz/+cQ4//PB/+Daz0F4sXbo0SdKxY8dW93fq1Knlsffq2rVrtthii8yePTuTJk3K448/njfeeCODBw9Ohw7vJEuLFi3KwoULW47/t7/9LUuXLs0663jL5vbmqaeeSl1dXY444oiW0G65Ll265Igjjmi5St5RRx31vsfp3LlzRo4cmUqlkqeeeqrN9ym2AwBoI927d88GG2yQF154oeW+pUuX5qWXXsqAAQNWWN+/f/8sXrw4M2a8e0ntGTNmZMmSJS1vCdG/f/9Wx0uSF198caXHgzWZ+YLqMV9QXWYMqsd8QXWZMage8wXV88/OV8ncuXPz17/+NVdeeWXGjBmTMWPG5Lnnnssdd9zR5ldKgg+DZcuWZeHChenevXur+1cWfS+/v1KpZPbs2alUKlm2bFleffXVdO3aNd26dVtV2+ZDYvnVSQcOHLjSx7fYYouW/7zlllv+w2N99KMfTZK8/vrrbbS7d3kbWQBYAw0aNLjWW2i3DjvssNx4403Zb7/9s/HGG+eqq65M165dc9hhI1f4xWPAgM0yZMiQjBs3LieccEI23bR/fvjDi7P77rtn1113S5IcffTRGTNmTGbObMywYcPywAMPZPLkybniiis9z7Q75guqx3xBdZkxqB7zBdVlxqB6zNea7+23FtZ6C+3W5/794Pzy1p9n+PAR6de3X6677tp06dIlBxzw+XSvbz1fixcvysuvvJRly5Zl2bJl6dSpS9Zb7523xuzapWs27L1pbr75160+55vfPDtbDv5oDjvs8Ky33vqr7Lxo7dtfurXWW2i39v7c0uxxwEdz3ZUT8trLb+VTXxiSpv6LcsnX7s+iBUtarR2w5Xr56vc2y/rrr5/XXnstt/3kuXx8j0HZctCiXH3ehMyftyjbfKJ/Xpo6O2++3pz6tbpkxMHbZmlDcuW5D2fu7Kb32QXVNGvWrJp83eVXR+zatetKH3/v/cuvjPh+lj++/Ep4bamuUo2jAgA1tXTJslpvod2qVCq59NJLcvMtt6SpaV622mqrnPMf56ahoSGNjY054MADMnbs2AzdYWjmL5ifiRMn5Nprr82kSZPSoUPHDB8+POf8xznp2bNnyzF/d9fvcsklF2fmzJnp169fTjn5KxkxYkQNzxJqw3xB9ZgvqC4zBtVjvqC6zBhUj/la873xxopv/cuqUalUcuWVP8mvf/PLNDU1ZfDgLfP1r52ZzTffIq+88nJGjvp8Lvz+JRkyZPssWrQw5553Ru6///4VjvPQnx9b6fFPOHF0ttlmSL485oRqnwr/wIn7XFPrLbRrnz9ux+z9ua1Sv1aXvPD0rFxz/v356/Nzst5Ga+f7vzws3znhtkyd9HLW61ufk/9rWPr27Ztu3bpl0fyleem513LLZY/kmYmNSZKjv7FHhu75kdSv3SULmhblhadn55bLHsn0Z2fX+CzbrxseP7EmX3ejjTbK7Nmz8+1vfztnnnnmCo9/5zvfyTe+8Y3U1dVlypQp//Dqdueff37OPvvs9O7dO6+88kqb7lNsBwBrILHdh8P8BfMzdeqzLbcHDRqc+m71NdwRrDnMF1SP+YLqMmNQPeYLqsuMQfWYrw8nsd2Hw6JFC9P48ostt/v22Sxduqz8ikqsXsR2Hw7r9a3PUd8a0nL7mnMez5xG/z6u7moV2w0fPjz3339/Pv7xj+eRRx5Z4fEdd9wxEyZMSF1dXc4555x885vffN9jDRkyJE8++WR23XXXlQbV/xv/+Jp6AAAAAAAAAAAAUEX77LNPkmTChAm5/PLLWz02duzYltBu2LBhueCCC/LQQw+t9DjnnntuJk+enCTZbbfd2nyfndr8iAAAAAAAAAAAAPABHXXUUTn//POzYMGCHHfccbnqqqvS0NCQqVOnZuLEiamrq8t2222X73znO9lll12y11575cgjj8y+++6bf/mXf8mMGTMybty43HPPPUmSurq6HHHEEW2+T7EdAAAAAAAAAAAANdOvX79897vfzcknn5y6urpMmDAhEyZMSJJUKpV07do1P/rRj7LTTjvlX//1X3PnnXfmiiuuyBVXXNHqOJVKJXV1dTnxxBMzaNCgNt+nt5EFAAAAAAAAAACgpk488cRcdtll6dGjRyqVSsvHBhtskBtuuCE77bRTkmT8+PH52Mc+1mrN8o8kGTlyZH7wgx9UZY+ubAcAAAAAAAAAAEDNHXvssTnyyCPz0EMP5dVXX82GG26YXXfdNV27dm1Zs8466+TRRx/Nj3/84/zmN79JY2NjevTokW222SajRo3KXnvtVbX9ie0AAAAAAAAAAABYLXTr1q0YzHXr1i2nnnpqTj311FW0q3d4G1kAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoENsBAAAAAAAAAABAgdgOAAAAAAAAAAAACsR2AAAAAAAAAAAAUCC2AwAAAAAAAAAAgAKxHQAAAAAAAAAAABSI7QAAAAAAAAAAAKBAbAcAAAAAAAAAAAAFYjsAAAAAAAAAAAAoqKtUKpVabwIAoL2ZPXt2evfu3eq+WbNmZYMNNqjRjmDNYb6geswXVJcZg+oxX1BdZgyqx3xB9ZgvqC4zxprKle0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKBDbAQAAAAAAAAAAQIHYDgAAAAAAAAAAAArEdgAAAAAAAAAAAFAgtgMAAAAAAAAAAIACsR0AAAAAAAAAAAAUiO0AAAAAAAAAAACgQGwHAAAAAAAAAAAABWI7AAAAAAAAAAAAKKirVCqVWm8CAAAAAAAAAAAAVmeubAcAAAAAAAAAAAAFYjsAAAAAAAAA+P/au/fgmu/8j+OvWHKSHNckNsGyQpGOW5HQ0kpdgtGLiY3SumXS1hKytQgdmyxbtZTSraZu1TZBprWJra2WVUqoS5m0bNUlQSNq4k4icjnR5vz+MP3+kkVOqJOvczwfM5n5fJz3mXn9k4lzzut8PgAAAA5QtgMAAAAAAAAAAAAAAAAAwAHKdgAAAAAAAAAAAAAAAAAAOEDZDgAAAAAAAAAAAAAAAAAAByjbAQAAAAAAAAAAAAAAAADgAGU7AAAAAAAAAAAAAAAAAAAcoGwHAAAAAAAAAAAAAAAAAIADlO0AAAAAAAAAAAAAAAAAAHCAsh0AAEA127Ztm6KiovTQQw/JarWqQYMGat++veLi4nTs2DGz4wEuyW6369ixY1qzZo2mTZumvn37ys/PTx4eHvLw8FDz5s3Njgi4rGvXrmndunWaOHGinnjiCQUEBMjT01O1a9dWixYtFBkZqZSUFNlsNrOjAi4nIyNDS5Ys0Ysvvqhu3bqpRYsWqlu3rjw9PeXv769HH31UkyZN0oEDB8yOCrid5cuXG/9X9PDw0JNPPml2JMClREVFVfgdcvSTmJhodmTAZR0+fFjx8fEKDQ1VYGCgPD09FRAQoA4dOigqKkpJSUm6evWq2TEBlzBz5sw7+vtV/ufkyZNmxwdcxunTpzVr1iyFhYUpICBAFotFPj4+atq0qQYMGKBFixbpypUrZscE7pqH3W63mx0CAADgQWCz2fTSSy9p9erVt53x8vLS3Llz9corr1RjMsD1TZ48WQsXLrzt47///e95Qwy4CwsXLtRf/vIXlZSUOJxt2bKlVq5cqe7du1dDMsA91K9fX/n5+VWaHTVqlJYuXSpvb28npwLc36lTp9SuXTsVFBQY/xYWFqb09HTzQgEuJioqSsnJyVWef+eddzRhwgQnJgLcT2FhoaZMmaLly5errKys0tlt27ZRHAeqYObMmfrb3/52x8+rU6eOzp49Kx8fHyekAtxLYmKipk6dquLi4krn/Pz89N577ykiIqKakgH3Tk2zAwAAADwI7Ha7hg8frrVr10qSateurejoaIWGhspms2nTpk1KS0tTSUmJJk6cqFq1aikmJsbk1IDr+PnnnyvsfXx81KpVK/33v/81KRHgHrKysoyiXaNGjdSnTx+FhoYqICBApaWl+uabb7Rq1SpdvnxZJ06cUHh4uLZs2aLHHnvM5OSA6/Dz81PXrl3VsWNHBQUFqUGDBiorK1Nubq7S09P12WefqaysTCtXrtS5c+e0ceNGeXh4mB0bcGkvvviiCgoKZLVaVVhYaHYcwOUtW7ZMv/3tbyud6dixYzWlAdxDfn6+Bg4cqN27d0uSAgICFBERoc6dO6tBgwYqKirSDz/8oB07duirr74yOS3gOoYNG6ZHHnmkSrOzZs3St99+K0l6/vnnKdoBVbB06VLFxsYa+5CQEEVERKhZs2ay2Ww6fvy4kpOTdebMGV26dEmRkZHatGmT+vbta2Jq4M5xsh0AAEA1WLVqlUaNGiVJatiwobZv366HH364wkxqaqqGDh0qu90ui8Wio0ePcvUlUEXLly/XkSNH1LlzZ3Xu3FnBwcH68ccfFRQUJImT7YC7NW7cOB0/flyTJ09WeHi4fvOb39w0c+HCBT377LP6+uuvJUlt2rTR4cOHVaNGjeqOC7icgwcPql27dpWW5zIyMtS/f39dvnxZkpSWlqY//OEP1RURcDvLli3T2LFjVbNmTc2bN0+TJk2SxMl2wJ0qf7JddnY2718A99izzz6r9evXS5JiY2M1d+7c2xZ98vPz5eHhobp161ZnRMCtXbp0SU2aNJHNZpN043VZly5dTE4F3N+Ki4sVGBhoXG2+ePFijRs37qY5m82m0aNHa82aNZKkLl26KCMjo1qzAr8W73wDAAA4md1uV0JCgrFPTEy8qWgnSUOGDNHYsWMl3XixcTfH2QMPqjFjxuitt97SyJEj1bZt21sWggDcudmzZ2vz5s0aMGDAbX+vGjZsqLVr1xpXW2ZmZnKyAlBF7du3d3hKXUhIiKZPn27sP/30U2fHAtxWTk6O4uLiJElxcXHq1KmTyYkAALjZ6tWrjaLdyy+/rEWLFlV6ola9evUo2gH32MqVK42iXadOnSjaAVWwe/duo2gXEhJyy6KdJFksFi1evFg1a964iPObb77RtWvXqi0ncC9QtgMAAHCynTt3KicnR9KN07UiIyNvOzt58mRjvXbtWuMFPQAAZvD19a3SXOPGjdWzZ09j/9133zkrEvBAateunbE+e/asiUkA12W32xUdHa2CggIFBwdrxowZZkcCAOCW5s6dK0myWq2aP3++yWmAB9OKFSuM9ZgxY0xMAriOc+fOGevWrVtXOuvr6yt/f39jT9kOroayHQAAgJNt2LDBWA8YMKDSa/VatmxpvAgpKCjQjh07nJ4PAIB7ofxJCkVFRSYmAdzPsWPHjHVgYKCJSQDXtXTpUm3dulU1atTQihUrZLFYzI4EAMBN9uzZo0OHDkmSBg0apHr16pmcCHjw7Nq1S4cPH5Yk+fj46IUXXjA5EeAaAgICjHVmZmals5cvX9bFixclSf7+/hWeC7gCynYAAABOVv50n65duzqcLz/DyUAAAFfx/fffG+vmzZubFwRwM0ePHtXs2bON/ZAhQ0xMA7imkydPaurUqZKk8ePHq0ePHiYnAtzLmDFj1Lx5c3l5ealOnTpq0aKFhgwZoqSkJJWWlpodD3Ap27dvN9aPPvqoJGn9+vUaNGiQmjRpIovFooCAAPXq1UsLFizgJCDACd577z1jPXToUK5pBqqoR48eatiwoaQbV8MuWbLklnM2m03jxo3TTz/9JEmaNGmSPDw8qi0ncC/UNDsAAACAuyv/DZ6goCCH8+Vnjh496pRMAADcS+np6Tpy5IgkydPTU/369TM5EeB6vv76a+OK2OvXr+v8+fPatWuX1q5daxQVYmJi9PTTT5sZE3A5v1wfe+3aNTVv3lxz5swxOxLgdjZv3mysbTabrl27puzsbKWlpSkhIUHJycnq3bu3iQkB17Fv3z5jHRgYqKFDh+qf//xnhZnz58/r/PnzSk9P15tvvqnU1FQ9/vjj1R0VcEv5+flKTU019lwhC1Sdl5eXli1bpmHDhqm0tFQxMTH64IMPNHjwYDVr1kw2m03Hjh1TcnKyzpw5Iw8PD02dOlXTpk0zOzpwxyjbAQAAONmVK1eMtb+/v8P58jN5eXnOiAQAwD1TVFSksWPHGvvY2Fg1aNDAxESAa3r99df1+eef3/Kxjh07avLkyRo5cmQ1pwJc3+LFi7Vt2zZJ0vLly2W1Wk1OBLgPq9Wq3r17q2vXrmrevLksFosuXLigPXv2aO3atSouLtbp06cVHh6utLQ0RUREmB0ZuO+dOXPGWCckJCgzM1O1atXS8OHD1bNnT3l7e+vQoUP64IMPlJubq7Nnzyo8PFy7d+9Wp06dTEwOuIeUlBQVFRVJktq1a2ecMAmgaiIiIpSenq7x48dr//79ysjIUEZGxk1zL7zwgqZMmcLfLrgsynYAAABOVlBQYKy9vb0dzpefuXr1qlMyAQBwL9jtdo0cOdI4xbVVq1aaMWOGyakA91KvXj3169dPnTt3NjsK4HKys7ONUxKio6MVHh5uciLAfUyYMEGJiYmqXbv2TY/FxMRo3rx5ev7557V9+3aVlZVpxIgRysrKUpMmTUxIC7iO8l/azczMVP369fXFF18oNDS0wlxcXJyeeuop7dy5UyUlJYqKitKBAwe4hg/4lcpfIcupdsDdeeyxx/Tuu+/q1Vdf1Y4dO245k5qaquLiYr3xxhtq1apVNScEfr0aZgcAAAAAAACuafLkyfrXv/4lSapTp47S0tJUp04dk1MBrumzzz6T3W6X3W5XUVGRMjMz9e6778rX11fz589Xp06dtGjRIrNjAi7jl+tjCwsL1ahRIy1YsMDsSIBbCQkJuWXR7heNGjXS559/rjZt2ki6cRryG2+8UV3xAJdlt9sr7OfPn39T0U6S6tatqzVr1sjLy0uS9N1332nr1q3VkhFwVxkZGTpw4ICkG9dhjhgxwtxAgAvKy8vTU089pe7du2vv3r2Kj4/X999/r+LiYhUWFiojI0Pjx4/Xzz//rE8++UTdunVTenq62bGBO0bZDgAAwMnKlw6Ki4sdzpefqVu3rlMyAQDwa02fPl1vvfWWJKl27drasGGDOnToYHIqwD14e3urdevWiomJ0cGDB9WzZ09dv35dr7zyilasWGF2PMAlJCYmGh/aLFmyRPXr1zc1D/Agslqtio+PN/affvqpiWkA11D+fUSr1aqRI0fedrZx48YaNGiQsd+8ebNTswHurvypdpGRkWrQoIGJaQDXU1xcrJ49e2rDhg2qVauWNm/erFmzZqlt27by8vKSj4+PunTposTERKWkpEi6caJrZGSkLl++bHJ64M5QtgMAAHCy8h/qXLx40eF8+Rk+EAIA3I/i4+M1Z84cSf9ftHv88cdNTgW4J6vVqqSkJONKsBkzZtx04gmAik6cOKFXX31VkvTcc89VKCIAqF69e/c21jk5OSoqKjIxDXD/K1/uad++vSwWS6XzISEhxvr48eNOywW4u8LCQn300UfGnitkgTu3dOlSHTx4UJI0evRoPfHEE7edHTZsmPr06SNJunTpkj788MNqyQjcKzXNDgAAAODugoOD9cMPP0iSsrOz1atXr0rns7OzKzwXAID7yfTp042iXZ06dbRx40b16NHD5FSAewsKCtLDDz+sw4cPKzc3V5mZmfw/EajE6tWrjUJPQECAXn/99VvOlX/tlZOTU2EuLi7OYcEBgGMNGzassM/Ly5OPj49JaYD7X3BwsLZs2SJJqlevnsP58l/UvXr1qrNiAW7v448/VkFBgaQbv4eVlYQA3Nq///1vY92vXz+H8/3799eXX34pSdqzZ4/TcgHOQNkOAADAyTp06KANGzZIkvbt26fo6OhK5/ft21fhuQAA3C+mTZumefPmSbpx1fnGjRvVvXt3k1MBD4byV4pduXLFxCTA/a/86Y/vvPNOlZ5z8uRJJSQkGPsJEyZQtgPugf894Z8r+YDKPfLII8Y6Pz/f4XxeXp6xrko5D8Ctlb9C9uWXXzYxCeC6cnNzjXVVbm0q/3frl7Ir4Cq4RhYAAMDJBg4caKz/85//qKys7LazJ06cUFZWlqQbH6jyDToAwP1iypQpRtGuXr16+uKLLyjaAdWkrKxMJ06cMPb/e0oQAAD3q23bthnrpk2bytvb28Q0wP1v4MCBqlHjxse3Bw8elM1mq3Q+IyPDWLdp08ap2QB3dfDgQe3du1eS5OnpqVGjRpmcCHBN5b8keOrUKYfzOTk5xtrf398pmQBnoWwHAADgZD169FCzZs0k3XjxkJaWdtvZBQsWGOvBgwfLy8vL6fkAAHDkz3/+s/E3qn79+tq8ebO6detmcirgwZGammqcDNSoUSO1aNHC5ETA/W3mzJmy2+0Of8qXgMLCwio8VpWTGABUrqioqML1zM8884yJaQDX0KhRI/Xu3VuSVFhYqFWrVt12Njc3t8KVfeW/8Aug6sqfahcREUHpB7hL5W9qSklJqXT2+vXrWrNmjbHnfUa4Gsp2AAAATlajRg299tprxj42NlZHjx69aS4tLU1Lly6VJFksFv31r3+ttowAANzOxIkT9Y9//EOS5Ovrqy+//FKhoaHmhgLcwJIlS7Rly5YK113eyrp16/TSSy8Z+/HjxxunnQAAYIbk5GRt3Lix0pP7z507p2eeeUZHjhyRJHl5eWnatGnVFRFwaXPmzJGHh4ckKS4ursLpdb+4evWqhg0bppKSEklSz549OXkcuAslJSVavXq1sR8zZoyJaQDXNmLECGO9bds2xcfH3/I9j9LSUkVHRxsn+FutVj333HPVlhO4F2qaHQAAAOBBMGrUKK1bt07r1q3T+fPn1bVrV0VHRys0NFQ2m02bNm1Samqq8cJj/vz5nFgC3IG8vDy9+eabFf4tPz+/wuPx8fE3Pa/8KQsAbpaQkKC3337b2MfGxurUqVMOr4Jo1qyZOnfu7Ox4gEvbu3evYmJi1LhxY/Xt21cdOnRQQECAvL29VVBQoKysLG3cuFEHDhwwntOnTx9NnTrVvNAAAEjav3+/3n77bQUGBqpfv37q0KGDAgMDZbFYdPHiRe3Zs0dpaWkqKiqSdONLiMnJycap/wAqFxISohkzZmjmzJnKy8tT9+7dNWLECIWFhcnLy0uHDh3S+++/r9zcXEmSn5+fkpKSzA0NuKi0tDRduXJFkvTQQw+pV69eJicCXFefPn00YsQIo8A6e/ZsrV+/XkOHDlXLli1VVlamw4cPKyUlRdnZ2cbz5s+fr8DAQLNiA3fFw+7o67MAAAC4J0pKShQdHa2PPvrotjMWi0V///vfNWnSpGpMBri+kydPKigo6I6fx8shoHJPPvmktm/ffsfPGz16NB/2AA5ERUUpOTm5SrO1atXSn/70J82ePVsWi8XJyYAHR3p6uvGBalhYmNLT080NBLiIiRMnVvhCRmWaNm2q999/X+Hh4U5OBbif2bNn67XXXlNpaeltZ4KDg/XJJ58oODi4GpMB7iMsLEw7duyQJM2dO5dTWIFf6aefftKkSZOUmJjo8L13q9WqhQsXcqIkXBJlOwAAgGq2detWJSUladeuXTpz5ow8PT31u9/9Tv3799cf//hHtW7d2uyIgMuhbAc4B2U7wHmuXr2qrVu36quvvtL+/ft14sQJXbhwQTabTVarVf7+/mrXrp3CwsI0bNgwNWnSxOzIgNuhbAfcndzcXKWnp2vv3r369ttvdfbsWV28eFHXrl1T7dq1FRgYqJCQED399NMaPHiwatWqZXZkwGUdPXpUK1as0KZNm3T69GkVFRXJz89PXbp0UWRkpIYPH66aNbnIDLgbWVlZatOmjaQbX3D68ccfFRAQYHIqwD0cOXJEycnJ2rlzp7KyspSfny8PDw/5+vqqbdu26tu3r6Kiovidg8uibAcAAAAAAAAAAAAAAAAAgAM1zA4AAAAAAAAAAAAAAAAAAMD9jrIdAAAAAAAAAAAAAAAAAAAOULYDAAAAAAAAAAAAAAAAAMABynYAAAAAAAAAAAAAAAAAADhA2Q4AAAAAAAAAAAAAAAAAAAco2wEAAAAAAAAAAAAAAAAA4ABlOwAAAAAAAAAAAAAAAAAAHKBsBwAAAAAAAAAAAAAAAACAA5TtAAAAAAAAAAAAAAAAAABwgLIdAAAAAAAAAAAAAAAAAAAOULYDAAAAAAAAAAAAAAAAAMABynYAAAAAAAAAAAAAAAAAADhA2Q4AAAAAAAAAAAAAAAAAAAco2wEAAAAAAAAAAAAAAAAA4ABlOwAAAAAAAAAAAAAAAAAAHKBsBwAAAAAAAAAAAAAAAACAA5TtAAAAAAAAAAAAAAAAAABwgLIdAAAAAAAAAAAAAAAAAAAOULYDAAAAAAAAAAAAAAAAAMABynYAAAAAAAAAAAAAAAAAADjwf7Cfj6iTrv0oAAAAAElFTkSuQmCC",
      "text/plain": [
       "<Figure size 1280x1280 with 4 Axes>"
      ]
     },
     "metadata": {
      "image/png": {
       "height": 1261,
       "width": 1261
      }
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "y_true = labels\n",
    "y_pred = predictions\n",
    "cell_type_list = np.unique(y_true)\n",
    "# cell_type_list=[\n",
    "#        'Root stele', 'Mesophyll precursor/Root endodermis', 'Mesophyll/Cortex',\n",
    "#        'Mestome sheath', 'Parenchyma', 'Pericycle', 'Phloem',\n",
    "#        'Procambium', 'Unknow', 'Vascular tissue', 'Xylem',\n",
    "#        'Epidermis', 'Fiber cell', 'Initial cell']\n",
    "# cell_type_list = np.array(cell_type_list)\n",
    "matrix = confusion_matrix(y_true, y_pred, labels=cell_type_list)\n",
    "matrix = matrix.astype(\"float\") / matrix.sum(axis=1)[:, np.newaxis]\n",
    "\n",
    "df = pd.DataFrame(matrix, index=cell_type_list[:matrix.shape[0]], columns=cell_type_list[:matrix.shape[1]])\n",
    "\n",
    "ax = sns.clustermap(df,\n",
    "                    cmap='Purples',\n",
    "                    annot=True ,fmt=\".2f\", \n",
    "                    annot_kws={'size': 6}, \n",
    "                    vmin=0, \n",
    "                    vmax=1,\n",
    "                    row_cluster=False, \n",
    "                    col_cluster=False, \n",
    "                    figsize=(16, 16))\n",
    "plt.savefig(str(save_dir / \"confusion_matrix.png\"), dpi=300, bbox_inches='tight')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "id": "62ebbbb9",
   "metadata": {},
   "outputs": [],
   "source": [
    "# save the model into the save_dir\n",
    "torch.save(best_model.state_dict(), save_dir / \"model.pt\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "id": "66e4af22",
   "metadata": {},
   "outputs": [],
   "source": [
    "adata_test_raw.obsm.clear()\n",
    "adata_test_raw.write_h5ad(save_dir / \"SRX7814224_test_raw_predictions.h5ad\")"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "scgpt_new",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.18"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
