{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import ssms\n",
    "import lanfactory\n",
    "import os\n",
    "import numpy as np\n",
    "from copy import deepcopy\n",
    "import torch"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "MODEL = \"ddm\"\n",
    "RUN_SIMS = False\n",
    "DEVICE = \"cpu\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Initialize the generator config (for MLP LANs)\n",
    "generator_config = deepcopy(ssms.config.data_generator_config[\"lan\"])\n",
    "# Specify generative model (one from the list of included models mentioned above)\n",
    "generator_config[\"model\"] = MODEL\n",
    "# Specify number of parameter sets to simulate\n",
    "generator_config[\"n_parameter_sets\"] = 256\n",
    "# Specify how many samples a simulation run should entail\n",
    "generator_config[\"n_samples\"] = 2000\n",
    "# Specify folder in which to save generated data\n",
    "generator_config[\"output_folder\"] = \"data/lan_mlp/\"\n",
    "\n",
    "# Make model config dict\n",
    "model_config = ssms.config.model_config[MODEL]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "generator_config[\"output_folder\"] = (\n",
    "    \"data/lan_mlp/\"\n",
    "    + generator_config[\"model\"]\n",
    "    + \"/\"\n",
    "    + str(generator_config[\"n_samples\"])\n",
    "    + \"_\"\n",
    "    + str(generator_config[\"n_training_samples_by_parameter_set\"])\n",
    "    + \"/\"\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "if RUN_SIMS:\n",
    "    n_datafiles = 20\n",
    "    for i in range(n_datafiles):\n",
    "        print(\"Datafile: \", i)\n",
    "        my_dataset_generator = ssms.dataset_generators.lan_mlp.data_generator(\n",
    "            generator_config=generator_config, model_config=model_config\n",
    "        )\n",
    "        training_data = my_dataset_generator.generate_data_training_uniform(save=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "import pickle\n",
    "\n",
    "folder_ = \"../data/lan_mlp/\" + MODEL + \"/\"\n",
    "files_ = [folder_ + file_ for file_ in os.listdir(folder_)]\n",
    "\n",
    "my_data = pickle.load(\n",
    "    open(\n",
    "        files_[0],\n",
    "        \"rb\",\n",
    "    )\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Network config: \n",
      "{'layer_sizes': [100, 100, 100, 1], 'activations': ['tanh', 'tanh', 'tanh', 'linear'], 'train_output_type': 'logprob'}\n",
      "Train config: \n",
      "{'cpu_batch_size': 128, 'gpu_batch_size': 256, 'n_epochs': 5, 'optimizer': 'adam', 'learning_rate': 2e-06, 'lr_scheduler': 'reduce_on_plateau', 'lr_scheduler_params': {}, 'weight_decay': 0.0, 'loss': 'huber', 'save_history': True}\n"
     ]
    }
   ],
   "source": [
    "from copy import deepcopy\n",
    "\n",
    "network_config = deepcopy(lanfactory.config.network_configs.network_config_mlp)\n",
    "network_config[\"layer_sizes\"] = [100, 100, 100, 1]\n",
    "network_config[\"activations\"] = [\"tanh\", \"tanh\", \"tanh\", \"linear\"]\n",
    "\n",
    "print(\"Network config: \")\n",
    "print(network_config)\n",
    "\n",
    "train_config = deepcopy(lanfactory.config.network_configs.train_config_mlp)\n",
    "train_config[\"learning_rate\"] = 0.000002\n",
    "\n",
    "print(\"Train config: \")\n",
    "print(train_config)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'layer_sizes': [100, 100, 100, 1],\n",
       " 'activations': ['tanh', 'tanh', 'tanh', 'linear'],\n",
       " 'train_output_type': 'logprob'}"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "network_config"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "train_config[\"cpu_batch_size\"] = 2048\n",
    "train_config[\"gpu_batch_size\"] = 2048\n",
    "train_config[\"n_epochs\"] = 20"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'cpu_batch_size': 2048,\n",
       " 'gpu_batch_size': 2048,\n",
       " 'n_epochs': 20,\n",
       " 'optimizer': 'adam',\n",
       " 'learning_rate': 2e-06,\n",
       " 'lr_scheduler': 'reduce_on_plateau',\n",
       " 'lr_scheduler_params': {},\n",
       " 'weight_decay': 0.0,\n",
       " 'loss': 'huber',\n",
       " 'save_history': True}"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_config"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "folder_ = \"../data/lan_mlp/\" + MODEL + \"/\"\n",
    "file_list_ = [folder_ + file_ for file_ in os.listdir(folder_)][:3]\n",
    "\n",
    "# Training dataset\n",
    "jax_training_dataset = lanfactory.trainers.DatasetTorch(\n",
    "    file_ids=file_list_,\n",
    "    batch_size=(\n",
    "        train_config[DEVICE + \"_batch_size\"]\n",
    "        if torch.cuda.is_available()\n",
    "        else train_config[DEVICE + \"_batch_size\"]\n",
    "    ),\n",
    "    label_lower_bound=np.log(1e-10),\n",
    "    features_key=\"lan_data\",\n",
    "    label_key=\"lan_labels\",\n",
    "    out_framework=\"jax\",\n",
    ")\n",
    "\n",
    "jax_training_dataloader = torch.utils.data.DataLoader(\n",
    "    jax_training_dataset, shuffle=True, batch_size=None, num_workers=1, pin_memory=True\n",
    ")\n",
    "\n",
    "# Validation dataset\n",
    "jax_validation_dataset = lanfactory.trainers.DatasetTorch(\n",
    "    file_ids=file_list_,\n",
    "    batch_size=(\n",
    "        train_config[DEVICE + \"_batch_size\"]\n",
    "        if torch.cuda.is_available()\n",
    "        else train_config[DEVICE + \"_batch_size\"]\n",
    "    ),\n",
    "    label_lower_bound=np.log(1e-10),\n",
    "    features_key=\"lan_data\",\n",
    "    label_key=\"lan_labels\",\n",
    "    out_framework=\"jax\",\n",
    ")\n",
    "\n",
    "jax_validation_dataloader = torch.utils.data.DataLoader(\n",
    "    jax_validation_dataset,\n",
    "    shuffle=True,\n",
    "    batch_size=None,\n",
    "    num_workers=1,\n",
    "    pin_memory=True,\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[ 0.1615,  1.2986,  0.7739,  1.6829, 11.5661,  1.0000],\n",
      "        [-1.9381,  0.9173,  0.5608,  1.3857, 13.4255,  1.0000],\n",
      "        [ 0.1582,  1.4980,  0.4946,  0.0658,  0.8469,  1.0000],\n",
      "        ...,\n",
      "        [ 0.7746,  2.2783,  0.8300,  1.8073,  2.9297,  1.0000],\n",
      "        [ 2.5729,  0.5833,  0.5117,  0.8289,  1.1672,  1.0000],\n",
      "        [-1.1318,  0.4758,  0.1894,  1.5282,  1.5557, -1.0000]])\n",
      "tensor([[ -7.8343],\n",
      "        [-23.0259],\n",
      "        [ -1.3757],\n",
      "        ...,\n",
      "        [ -1.2525],\n",
      "        [  0.0360],\n",
      "        [  2.3369]])\n"
     ]
    }
   ],
   "source": [
    "cnt = 0\n",
    "for xb, yb in jax_training_dataloader:\n",
    "    print(xb)\n",
    "    print(yb)\n",
    "    cnt += 1\n",
    "    if cnt > 0:\n",
    "        break"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "# LOAD NETWORK\n",
    "jax_net = lanfactory.trainers.MLPJaxFactory(network_config=network_config, train=True)\n",
    "pickle.dump(\n",
    "    network_config,\n",
    "    open(\"../data/jax_models/\" + MODEL + \"/jax_network_config.pickle\", \"wb\"),\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "jax_trainer = lanfactory.trainers.ModelTrainerJaxMLP(\n",
    "    train_config=train_config,\n",
    "    model=jax_net,\n",
    "    train_dl=jax_training_dataloader,\n",
    "    valid_dl=jax_validation_dataloader,\n",
    "    pin_memory=True,\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Found folder:  ..\n",
      "Moving on...\n",
      "Found folder:  ../data\n",
      "Moving on...\n",
      "Found folder:  ../data/jax_models\n",
      "Moving on...\n",
      "Found folder:  ../data/jax_models/ddm\n",
      "Moving on...\n",
      "Epoch: 0 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 4.6657476\n",
      "Training - Step: 1000 of 14646 - Loss: 0.23539262\n",
      "Training - Step: 2000 of 14646 - Loss: 0.13319452\n",
      "Training - Step: 3000 of 14646 - Loss: 0.077041775\n",
      "Training - Step: 4000 of 14646 - Loss: 0.084758\n",
      "Training - Step: 5000 of 14646 - Loss: 0.061328642\n",
      "Training - Step: 6000 of 14646 - Loss: 0.09372611\n",
      "Training - Step: 7000 of 14646 - Loss: 0.10200088\n",
      "Training - Step: 8000 of 14646 - Loss: 0.0922149\n",
      "Training - Step: 9000 of 14646 - Loss: 0.08815187\n",
      "Training - Step: 10000 of 14646 - Loss: 0.08820309\n",
      "Training - Step: 11000 of 14646 - Loss: 0.09798707\n",
      "Training - Step: 12000 of 14646 - Loss: 0.09019475\n",
      "Training - Step: 13000 of 14646 - Loss: 0.07611063\n",
      "Training - Step: 14000 of 14646 - Loss: 0.103473276\n",
      "Epoch 0/20 time: 42.59603929519653s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.06546292\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.08428119\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.10327095\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.05975366\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.08271888\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.07679789\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.078183144\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.09707835\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.10482265\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.1157265\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.13703328\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.11068382\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.08438847\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.111975215\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.079091385\n",
      "Epoch 0/20 time: 22.71810793876648s\n",
      "Epoch: 0 / 20, test_loss: 0.08957315236330032\n",
      "Epoch: 1 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.102607034\n",
      "Training - Step: 1000 of 14646 - Loss: 0.06357504\n",
      "Training - Step: 2000 of 14646 - Loss: 0.0813074\n",
      "Training - Step: 3000 of 14646 - Loss: 0.107232794\n",
      "Training - Step: 4000 of 14646 - Loss: 0.078699075\n",
      "Training - Step: 5000 of 14646 - Loss: 0.08468427\n",
      "Training - Step: 6000 of 14646 - Loss: 0.107434615\n",
      "Training - Step: 7000 of 14646 - Loss: 0.07887427\n",
      "Training - Step: 8000 of 14646 - Loss: 0.0762465\n",
      "Training - Step: 9000 of 14646 - Loss: 0.08745187\n",
      "Training - Step: 10000 of 14646 - Loss: 0.09240593\n",
      "Training - Step: 11000 of 14646 - Loss: 0.11651772\n",
      "Training - Step: 12000 of 14646 - Loss: 0.08267881\n",
      "Training - Step: 13000 of 14646 - Loss: 0.1219197\n",
      "Training - Step: 14000 of 14646 - Loss: 0.08832441\n",
      "Epoch 1/20 time: 39.99100089073181s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.10162082\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.09689363\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.0978336\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.090075575\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.10586521\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.11839847\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.10803349\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.11199933\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.09782994\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.115690276\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.0939784\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.08362706\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.09500362\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.13031444\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.10697923\n",
      "Epoch 1/20 time: 22.66794180870056s\n",
      "Epoch: 1 / 20, test_loss: 0.10666166245937347\n",
      "Epoch: 2 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.11716754\n",
      "Training - Step: 1000 of 14646 - Loss: 0.11894104\n",
      "Training - Step: 2000 of 14646 - Loss: 0.120561615\n",
      "Training - Step: 3000 of 14646 - Loss: 0.10005254\n",
      "Training - Step: 4000 of 14646 - Loss: 0.09938884\n",
      "Training - Step: 5000 of 14646 - Loss: 0.088259086\n",
      "Training - Step: 6000 of 14646 - Loss: 0.1311575\n",
      "Training - Step: 7000 of 14646 - Loss: 0.06671969\n",
      "Training - Step: 8000 of 14646 - Loss: 0.08095308\n",
      "Training - Step: 9000 of 14646 - Loss: 0.09824006\n",
      "Training - Step: 10000 of 14646 - Loss: 0.076725006\n",
      "Training - Step: 11000 of 14646 - Loss: 0.0793885\n",
      "Training - Step: 12000 of 14646 - Loss: 0.08861776\n",
      "Training - Step: 13000 of 14646 - Loss: 0.082879215\n",
      "Training - Step: 14000 of 14646 - Loss: 0.11748245\n",
      "Epoch 2/20 time: 41.12671494483948s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.08146312\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.074644215\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.06695032\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.08922843\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.06326875\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.069981724\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.090347014\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.09427995\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.085479535\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.06432562\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.09197289\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.074175075\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.08271489\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.09258978\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.068193465\n",
      "Epoch 2/20 time: 23.339656829833984s\n",
      "Epoch: 2 / 20, test_loss: 0.08405958116054535\n",
      "Epoch: 3 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.09774701\n",
      "Training - Step: 1000 of 14646 - Loss: 0.12982713\n",
      "Training - Step: 2000 of 14646 - Loss: 0.074139655\n",
      "Training - Step: 3000 of 14646 - Loss: 0.09942049\n",
      "Training - Step: 4000 of 14646 - Loss: 0.10363974\n",
      "Training - Step: 5000 of 14646 - Loss: 0.09854329\n",
      "Training - Step: 6000 of 14646 - Loss: 0.11477683\n",
      "Training - Step: 7000 of 14646 - Loss: 0.10456227\n",
      "Training - Step: 8000 of 14646 - Loss: 0.08892194\n",
      "Training - Step: 9000 of 14646 - Loss: 0.095175184\n",
      "Training - Step: 10000 of 14646 - Loss: 0.08954171\n",
      "Training - Step: 11000 of 14646 - Loss: 0.082954004\n",
      "Training - Step: 12000 of 14646 - Loss: 0.075044155\n",
      "Training - Step: 13000 of 14646 - Loss: 0.05872235\n",
      "Training - Step: 14000 of 14646 - Loss: 0.07756296\n",
      "Epoch 3/20 time: 42.70316529273987s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.09655556\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.1075421\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.07483129\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.07508853\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.07505887\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.08103688\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.07346213\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.10484196\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.07506091\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.0771514\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.09979993\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.078718945\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.09732048\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.08971454\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.10765667\n",
      "Epoch 3/20 time: 22.694720029830933s\n",
      "Epoch: 3 / 20, test_loss: 0.08327259123325348\n",
      "Epoch: 4 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.085165665\n",
      "Training - Step: 1000 of 14646 - Loss: 0.09612656\n",
      "Training - Step: 2000 of 14646 - Loss: 0.076466866\n",
      "Training - Step: 3000 of 14646 - Loss: 0.09662474\n",
      "Training - Step: 4000 of 14646 - Loss: 0.08480571\n",
      "Training - Step: 5000 of 14646 - Loss: 0.08964546\n",
      "Training - Step: 6000 of 14646 - Loss: 0.06435984\n",
      "Training - Step: 7000 of 14646 - Loss: 0.09958716\n",
      "Training - Step: 8000 of 14646 - Loss: 0.08206763\n",
      "Training - Step: 9000 of 14646 - Loss: 0.103275895\n",
      "Training - Step: 10000 of 14646 - Loss: 0.0796906\n",
      "Training - Step: 11000 of 14646 - Loss: 0.10116879\n",
      "Training - Step: 12000 of 14646 - Loss: 0.090227276\n",
      "Training - Step: 13000 of 14646 - Loss: 0.115158886\n",
      "Training - Step: 14000 of 14646 - Loss: 0.08346811\n",
      "Epoch 4/20 time: 39.71538686752319s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.11862728\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.12725148\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.13159397\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.10115577\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.12158621\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.11912296\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.11004105\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.13894609\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.0920733\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.091880664\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.10655555\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.11977337\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.1344997\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.11104846\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.13166675\n",
      "Epoch 4/20 time: 22.704818964004517s\n",
      "Epoch: 4 / 20, test_loss: 0.10983947664499283\n",
      "Epoch: 5 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.100513764\n",
      "Training - Step: 1000 of 14646 - Loss: 0.14128654\n",
      "Training - Step: 2000 of 14646 - Loss: 0.10629125\n",
      "Training - Step: 3000 of 14646 - Loss: 0.13822585\n",
      "Training - Step: 4000 of 14646 - Loss: 0.07429662\n",
      "Training - Step: 5000 of 14646 - Loss: 0.086147435\n",
      "Training - Step: 6000 of 14646 - Loss: 0.12750052\n",
      "Training - Step: 7000 of 14646 - Loss: 0.109832354\n",
      "Training - Step: 8000 of 14646 - Loss: 0.10694297\n",
      "Training - Step: 9000 of 14646 - Loss: 0.0818232\n",
      "Training - Step: 10000 of 14646 - Loss: 0.098688275\n",
      "Training - Step: 11000 of 14646 - Loss: 0.09191114\n",
      "Training - Step: 12000 of 14646 - Loss: 0.106969684\n",
      "Training - Step: 13000 of 14646 - Loss: 0.10473002\n",
      "Training - Step: 14000 of 14646 - Loss: 0.09895317\n",
      "Epoch 5/20 time: 39.85688805580139s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.091519564\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.093722746\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.08618459\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.07969398\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.103526585\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.1036411\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.07910169\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.08568846\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.07281413\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.08950718\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.102090366\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.089752376\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.10191858\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.09745856\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.08110012\n",
      "Epoch 5/20 time: 22.65532612800598s\n",
      "Epoch: 5 / 20, test_loss: 0.09185060113668442\n",
      "Epoch: 6 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.10665527\n",
      "Training - Step: 1000 of 14646 - Loss: 0.06846713\n",
      "Training - Step: 2000 of 14646 - Loss: 0.07453915\n",
      "Training - Step: 3000 of 14646 - Loss: 0.101507425\n",
      "Training - Step: 4000 of 14646 - Loss: 0.08926624\n",
      "Training - Step: 5000 of 14646 - Loss: 0.101349115\n",
      "Training - Step: 6000 of 14646 - Loss: 0.105004\n",
      "Training - Step: 7000 of 14646 - Loss: 0.10275703\n",
      "Training - Step: 8000 of 14646 - Loss: 0.09736495\n",
      "Training - Step: 9000 of 14646 - Loss: 0.09324206\n",
      "Training - Step: 10000 of 14646 - Loss: 0.109260544\n",
      "Training - Step: 11000 of 14646 - Loss: 0.08304064\n",
      "Training - Step: 12000 of 14646 - Loss: 0.10076461\n",
      "Training - Step: 13000 of 14646 - Loss: 0.08390581\n",
      "Training - Step: 14000 of 14646 - Loss: 0.08670591\n",
      "Epoch 6/20 time: 40.02571630477905s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.098730296\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.087833524\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.0896166\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.08451919\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.08258946\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.07267143\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.102916926\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.10870625\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.09061831\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.08736237\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.09933551\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.1254504\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.092847586\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.09317153\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.118211895\n",
      "Epoch 6/20 time: 22.97917890548706s\n",
      "Epoch: 6 / 20, test_loss: 0.09421870112419128\n",
      "Epoch: 7 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.1153554\n",
      "Training - Step: 1000 of 14646 - Loss: 0.08301982\n",
      "Training - Step: 2000 of 14646 - Loss: 0.11295092\n",
      "Training - Step: 3000 of 14646 - Loss: 0.1307885\n",
      "Training - Step: 4000 of 14646 - Loss: 0.092117086\n",
      "Training - Step: 5000 of 14646 - Loss: 0.09551685\n",
      "Training - Step: 6000 of 14646 - Loss: 0.091074005\n",
      "Training - Step: 7000 of 14646 - Loss: 0.09571089\n",
      "Training - Step: 8000 of 14646 - Loss: 0.10927686\n",
      "Training - Step: 9000 of 14646 - Loss: 0.095204234\n",
      "Training - Step: 10000 of 14646 - Loss: 0.110188656\n",
      "Training - Step: 11000 of 14646 - Loss: 0.06902046\n",
      "Training - Step: 12000 of 14646 - Loss: 0.060954493\n",
      "Training - Step: 13000 of 14646 - Loss: 0.08156475\n",
      "Training - Step: 14000 of 14646 - Loss: 0.08671922\n",
      "Epoch 7/20 time: 40.06680107116699s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.090090156\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.101367414\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.11850673\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.07733418\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.077928364\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.10588139\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.10858715\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.09722907\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.10084419\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.08056463\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.079147756\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.109118484\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.08178068\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.09198552\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.078506514\n",
      "Epoch 7/20 time: 22.658021926879883s\n",
      "Epoch: 7 / 20, test_loss: 0.093417227268219\n",
      "Epoch: 8 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.081325695\n",
      "Training - Step: 1000 of 14646 - Loss: 0.07719072\n",
      "Training - Step: 2000 of 14646 - Loss: 0.07789689\n",
      "Training - Step: 3000 of 14646 - Loss: 0.100548655\n",
      "Training - Step: 4000 of 14646 - Loss: 0.08583413\n",
      "Training - Step: 5000 of 14646 - Loss: 0.064535\n",
      "Training - Step: 6000 of 14646 - Loss: 0.070663616\n",
      "Training - Step: 7000 of 14646 - Loss: 0.072290495\n",
      "Training - Step: 8000 of 14646 - Loss: 0.0841706\n",
      "Training - Step: 9000 of 14646 - Loss: 0.07036973\n",
      "Training - Step: 10000 of 14646 - Loss: 0.0838722\n",
      "Training - Step: 11000 of 14646 - Loss: 0.10574156\n",
      "Training - Step: 12000 of 14646 - Loss: 0.089972556\n",
      "Training - Step: 13000 of 14646 - Loss: 0.09822747\n",
      "Training - Step: 14000 of 14646 - Loss: 0.09158549\n",
      "Epoch 8/20 time: 40.13121795654297s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.08353634\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.07228243\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.07419899\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.0767678\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.08492359\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.08297467\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.09651367\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.09095584\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.08478134\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.096991345\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.08586077\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.089520775\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.10158537\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.09799424\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.06539774\n",
      "Epoch 8/20 time: 22.543636083602905s\n",
      "Epoch: 8 / 20, test_loss: 0.08739369362592697\n",
      "Epoch: 9 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.093495846\n",
      "Training - Step: 1000 of 14646 - Loss: 0.08617012\n",
      "Training - Step: 2000 of 14646 - Loss: 0.087624416\n",
      "Training - Step: 3000 of 14646 - Loss: 0.10094027\n",
      "Training - Step: 4000 of 14646 - Loss: 0.08753978\n",
      "Training - Step: 5000 of 14646 - Loss: 0.067358844\n",
      "Training - Step: 6000 of 14646 - Loss: 0.08541225\n",
      "Training - Step: 7000 of 14646 - Loss: 0.0872491\n",
      "Training - Step: 8000 of 14646 - Loss: 0.09440501\n",
      "Training - Step: 9000 of 14646 - Loss: 0.07795748\n",
      "Training - Step: 10000 of 14646 - Loss: 0.0921316\n",
      "Training - Step: 11000 of 14646 - Loss: 0.07155805\n",
      "Training - Step: 12000 of 14646 - Loss: 0.091047764\n",
      "Training - Step: 13000 of 14646 - Loss: 0.054596804\n",
      "Training - Step: 14000 of 14646 - Loss: 0.078540556\n",
      "Epoch 9/20 time: 40.43736696243286s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.077033624\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.084343076\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.07406547\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.077877596\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.0652135\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.06895169\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.08197479\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.078026175\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.068008415\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.07657125\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.08108164\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.093987405\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.07950832\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.0734079\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.081752226\n",
      "Epoch 9/20 time: 23.677082777023315s\n",
      "Epoch: 9 / 20, test_loss: 0.0797438845038414\n",
      "Epoch: 10 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.075545974\n",
      "Training - Step: 1000 of 14646 - Loss: 0.0811207\n",
      "Training - Step: 2000 of 14646 - Loss: 0.06533979\n",
      "Training - Step: 3000 of 14646 - Loss: 0.07518707\n",
      "Training - Step: 4000 of 14646 - Loss: 0.09357915\n",
      "Training - Step: 5000 of 14646 - Loss: 0.07353226\n",
      "Training - Step: 6000 of 14646 - Loss: 0.10316682\n",
      "Training - Step: 7000 of 14646 - Loss: 0.08366839\n",
      "Training - Step: 8000 of 14646 - Loss: 0.08242096\n",
      "Training - Step: 9000 of 14646 - Loss: 0.08890864\n",
      "Training - Step: 10000 of 14646 - Loss: 0.050302405\n",
      "Training - Step: 11000 of 14646 - Loss: 0.066528045\n",
      "Training - Step: 12000 of 14646 - Loss: 0.0744487\n",
      "Training - Step: 13000 of 14646 - Loss: 0.08238711\n",
      "Training - Step: 14000 of 14646 - Loss: 0.07092109\n",
      "Epoch 10/20 time: 42.40557408332825s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.075682335\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.0604365\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.062022813\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.058329713\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.072356954\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.064605616\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.0757101\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.09050746\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.07972618\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.06823012\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.058598086\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.073755145\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.078726456\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.066120215\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.06840052\n",
      "Epoch 10/20 time: 23.287827968597412s\n",
      "Epoch: 10 / 20, test_loss: 0.07186270505189896\n",
      "Epoch: 11 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.06417088\n",
      "Training - Step: 1000 of 14646 - Loss: 0.085249305\n",
      "Training - Step: 2000 of 14646 - Loss: 0.07523715\n",
      "Training - Step: 3000 of 14646 - Loss: 0.08127022\n",
      "Training - Step: 4000 of 14646 - Loss: 0.06280526\n",
      "Training - Step: 5000 of 14646 - Loss: 0.09358591\n",
      "Training - Step: 6000 of 14646 - Loss: 0.054897703\n",
      "Training - Step: 7000 of 14646 - Loss: 0.060507536\n",
      "Training - Step: 8000 of 14646 - Loss: 0.08008078\n",
      "Training - Step: 9000 of 14646 - Loss: 0.076552585\n",
      "Training - Step: 10000 of 14646 - Loss: 0.09217297\n",
      "Training - Step: 11000 of 14646 - Loss: 0.058366157\n",
      "Training - Step: 12000 of 14646 - Loss: 0.09901221\n",
      "Training - Step: 13000 of 14646 - Loss: 0.07119492\n",
      "Training - Step: 14000 of 14646 - Loss: 0.076421164\n",
      "Epoch 11/20 time: 41.31998586654663s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.078063704\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.07134454\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.070445344\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.06306383\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.072449826\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.06744403\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.07735972\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.07772192\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.08469569\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.07031314\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.085908696\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.06604694\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.06589\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.055776134\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.06246741\n",
      "Epoch 11/20 time: 24.79094886779785s\n",
      "Epoch: 11 / 20, test_loss: 0.07257602363824844\n",
      "Epoch: 12 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.07240086\n",
      "Training - Step: 1000 of 14646 - Loss: 0.07843262\n",
      "Training - Step: 2000 of 14646 - Loss: 0.06262422\n",
      "Training - Step: 3000 of 14646 - Loss: 0.08371295\n",
      "Training - Step: 4000 of 14646 - Loss: 0.076214895\n",
      "Training - Step: 5000 of 14646 - Loss: 0.06278326\n",
      "Training - Step: 6000 of 14646 - Loss: 0.060819894\n",
      "Training - Step: 7000 of 14646 - Loss: 0.07347397\n",
      "Training - Step: 8000 of 14646 - Loss: 0.08988482\n",
      "Training - Step: 9000 of 14646 - Loss: 0.06370441\n",
      "Training - Step: 10000 of 14646 - Loss: 0.058020882\n",
      "Training - Step: 11000 of 14646 - Loss: 0.055235006\n",
      "Training - Step: 12000 of 14646 - Loss: 0.059499856\n",
      "Training - Step: 13000 of 14646 - Loss: 0.06223145\n",
      "Training - Step: 14000 of 14646 - Loss: 0.064049155\n",
      "Epoch 12/20 time: 44.07323694229126s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.07294364\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.055105366\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.07025006\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.075714044\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.0568581\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.049706575\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.07845856\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.061792057\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.056581575\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.06504653\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.0652702\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.07366894\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.07973795\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.06883329\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.07362419\n",
      "Epoch 12/20 time: 25.191205263137817s\n",
      "Epoch: 12 / 20, test_loss: 0.06973335146903992\n",
      "Epoch: 13 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.081462994\n",
      "Training - Step: 1000 of 14646 - Loss: 0.06330992\n",
      "Training - Step: 2000 of 14646 - Loss: 0.062369503\n",
      "Training - Step: 3000 of 14646 - Loss: 0.089601636\n",
      "Training - Step: 4000 of 14646 - Loss: 0.06646296\n",
      "Training - Step: 5000 of 14646 - Loss: 0.07907224\n",
      "Training - Step: 6000 of 14646 - Loss: 0.06718522\n",
      "Training - Step: 7000 of 14646 - Loss: 0.06279684\n",
      "Training - Step: 8000 of 14646 - Loss: 0.059881523\n",
      "Training - Step: 9000 of 14646 - Loss: 0.0652629\n",
      "Training - Step: 10000 of 14646 - Loss: 0.07344673\n",
      "Training - Step: 11000 of 14646 - Loss: 0.065074936\n",
      "Training - Step: 12000 of 14646 - Loss: 0.039703526\n",
      "Training - Step: 13000 of 14646 - Loss: 0.062192135\n",
      "Training - Step: 14000 of 14646 - Loss: 0.07616216\n",
      "Epoch 13/20 time: 44.25206398963928s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.06117677\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.081015415\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.07477076\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.06078861\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.064034805\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.056544\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.072866\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.075633384\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.064808354\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.060726758\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.059852608\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.071257666\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.06466904\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.0751413\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.08647896\n",
      "Epoch 13/20 time: 22.95615005493164s\n",
      "Epoch: 13 / 20, test_loss: 0.06838957965373993\n",
      "Epoch: 14 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.05500836\n",
      "Training - Step: 1000 of 14646 - Loss: 0.07897268\n",
      "Training - Step: 2000 of 14646 - Loss: 0.059146654\n",
      "Training - Step: 3000 of 14646 - Loss: 0.054428354\n",
      "Training - Step: 4000 of 14646 - Loss: 0.07218153\n",
      "Training - Step: 5000 of 14646 - Loss: 0.062417716\n",
      "Training - Step: 6000 of 14646 - Loss: 0.072071865\n",
      "Training - Step: 7000 of 14646 - Loss: 0.061253868\n",
      "Training - Step: 8000 of 14646 - Loss: 0.05309382\n",
      "Training - Step: 9000 of 14646 - Loss: 0.0494321\n",
      "Training - Step: 10000 of 14646 - Loss: 0.06303799\n",
      "Training - Step: 11000 of 14646 - Loss: 0.06860867\n",
      "Training - Step: 12000 of 14646 - Loss: 0.047374927\n",
      "Training - Step: 13000 of 14646 - Loss: 0.071096234\n",
      "Training - Step: 14000 of 14646 - Loss: 0.072178245\n",
      "Epoch 14/20 time: 40.1634681224823s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.05087753\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.09279042\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.071266875\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.047986876\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.06747369\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.06412454\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.06551173\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.068431556\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.040356494\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.055424064\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.048789762\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.047262095\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.058392286\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.0663225\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.04658174\n",
      "Epoch 14/20 time: 22.73591899871826s\n",
      "Epoch: 14 / 20, test_loss: 0.06252704560756683\n",
      "Epoch: 15 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.050786823\n",
      "Training - Step: 1000 of 14646 - Loss: 0.061192136\n",
      "Training - Step: 2000 of 14646 - Loss: 0.06424258\n",
      "Training - Step: 3000 of 14646 - Loss: 0.064229704\n",
      "Training - Step: 4000 of 14646 - Loss: 0.06542838\n",
      "Training - Step: 5000 of 14646 - Loss: 0.056133527\n",
      "Training - Step: 6000 of 14646 - Loss: 0.06509615\n",
      "Training - Step: 7000 of 14646 - Loss: 0.05516416\n",
      "Training - Step: 8000 of 14646 - Loss: 0.057620108\n",
      "Training - Step: 9000 of 14646 - Loss: 0.06825361\n",
      "Training - Step: 10000 of 14646 - Loss: 0.051005784\n",
      "Training - Step: 11000 of 14646 - Loss: 0.05445604\n",
      "Training - Step: 12000 of 14646 - Loss: 0.069162086\n",
      "Training - Step: 13000 of 14646 - Loss: 0.05800059\n",
      "Training - Step: 14000 of 14646 - Loss: 0.0719541\n",
      "Epoch 15/20 time: 43.23272180557251s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.05970817\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.059598528\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.06496088\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.057347696\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.0831949\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.054616753\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.07175883\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.045599736\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.04454229\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.053914182\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.050480895\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.063142404\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.059131753\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.07794524\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.05109192\n",
      "Epoch 15/20 time: 24.01917004585266s\n",
      "Epoch: 15 / 20, test_loss: 0.06026891991496086\n",
      "Epoch: 16 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.06734288\n",
      "Training - Step: 1000 of 14646 - Loss: 0.066941015\n",
      "Training - Step: 2000 of 14646 - Loss: 0.06116241\n",
      "Training - Step: 3000 of 14646 - Loss: 0.057800736\n",
      "Training - Step: 4000 of 14646 - Loss: 0.07770396\n",
      "Training - Step: 5000 of 14646 - Loss: 0.06652895\n",
      "Training - Step: 6000 of 14646 - Loss: 0.08204637\n",
      "Training - Step: 7000 of 14646 - Loss: 0.07076113\n",
      "Training - Step: 8000 of 14646 - Loss: 0.04716791\n",
      "Training - Step: 9000 of 14646 - Loss: 0.0545279\n",
      "Training - Step: 10000 of 14646 - Loss: 0.04449126\n",
      "Training - Step: 11000 of 14646 - Loss: 0.053786524\n",
      "Training - Step: 12000 of 14646 - Loss: 0.046175264\n",
      "Training - Step: 13000 of 14646 - Loss: 0.050227463\n",
      "Training - Step: 14000 of 14646 - Loss: 0.057398338\n",
      "Epoch 16/20 time: 44.569469928741455s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.056376215\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.05956372\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.0533906\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.04850306\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.070715375\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.05340328\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.05037438\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.08333575\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.05843948\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.0449558\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.0519816\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.058006503\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.074471384\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.053754866\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.06029801\n",
      "Epoch 16/20 time: 23.86558508872986s\n",
      "Epoch: 16 / 20, test_loss: 0.061097651720047\n",
      "Epoch: 17 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.059749752\n",
      "Training - Step: 1000 of 14646 - Loss: 0.060939964\n",
      "Training - Step: 2000 of 14646 - Loss: 0.0684205\n",
      "Training - Step: 3000 of 14646 - Loss: 0.05122822\n",
      "Training - Step: 4000 of 14646 - Loss: 0.0788785\n",
      "Training - Step: 5000 of 14646 - Loss: 0.06840427\n",
      "Training - Step: 6000 of 14646 - Loss: 0.05612978\n",
      "Training - Step: 7000 of 14646 - Loss: 0.05451271\n",
      "Training - Step: 8000 of 14646 - Loss: 0.04453163\n",
      "Training - Step: 9000 of 14646 - Loss: 0.06502029\n",
      "Training - Step: 10000 of 14646 - Loss: 0.048383158\n",
      "Training - Step: 11000 of 14646 - Loss: 0.06501102\n",
      "Training - Step: 12000 of 14646 - Loss: 0.054507997\n",
      "Training - Step: 13000 of 14646 - Loss: 0.048297584\n",
      "Training - Step: 14000 of 14646 - Loss: 0.048779957\n",
      "Epoch 17/20 time: 41.86101722717285s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.079388976\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.07251717\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.067521304\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.07563323\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.044822246\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.06196302\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.049485706\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.06089879\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.06329869\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.055756316\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.072622254\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.053078257\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.06678631\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.077328905\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.0621371\n",
      "Epoch 17/20 time: 23.18445110321045s\n",
      "Epoch: 17 / 20, test_loss: 0.059597570449113846\n",
      "Epoch: 18 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.064366885\n",
      "Training - Step: 1000 of 14646 - Loss: 0.06366398\n",
      "Training - Step: 2000 of 14646 - Loss: 0.06834474\n",
      "Training - Step: 3000 of 14646 - Loss: 0.062545165\n",
      "Training - Step: 4000 of 14646 - Loss: 0.060003035\n",
      "Training - Step: 5000 of 14646 - Loss: 0.05804465\n",
      "Training - Step: 6000 of 14646 - Loss: 0.06188599\n",
      "Training - Step: 7000 of 14646 - Loss: 0.05343902\n",
      "Training - Step: 8000 of 14646 - Loss: 0.07318553\n",
      "Training - Step: 9000 of 14646 - Loss: 0.07106879\n",
      "Training - Step: 10000 of 14646 - Loss: 0.07454805\n",
      "Training - Step: 11000 of 14646 - Loss: 0.043048255\n",
      "Training - Step: 12000 of 14646 - Loss: 0.04618484\n",
      "Training - Step: 13000 of 14646 - Loss: 0.055200078\n",
      "Training - Step: 14000 of 14646 - Loss: 0.06418129\n",
      "Epoch 18/20 time: 44.294984102249146s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.052080825\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.060721166\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.056282453\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.077899754\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.059497073\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.06544746\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.060115114\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.059120722\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.048201572\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.06988571\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.04906577\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.0714874\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.061264932\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.049350504\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.06013271\n",
      "Epoch 18/20 time: 22.686694145202637s\n",
      "Epoch: 18 / 20, test_loss: 0.059423770755529404\n",
      "Epoch: 19 of 20\n",
      "Training - Step: 0 of 14646 - Loss: 0.044619627\n",
      "Training - Step: 1000 of 14646 - Loss: 0.051704843\n",
      "Training - Step: 2000 of 14646 - Loss: 0.07106754\n",
      "Training - Step: 3000 of 14646 - Loss: 0.045913883\n",
      "Training - Step: 4000 of 14646 - Loss: 0.062975526\n",
      "Training - Step: 5000 of 14646 - Loss: 0.06320416\n",
      "Training - Step: 6000 of 14646 - Loss: 0.07029241\n",
      "Training - Step: 7000 of 14646 - Loss: 0.06111663\n",
      "Training - Step: 8000 of 14646 - Loss: 0.047495916\n",
      "Training - Step: 9000 of 14646 - Loss: 0.06761791\n",
      "Training - Step: 10000 of 14646 - Loss: 0.06378548\n",
      "Training - Step: 11000 of 14646 - Loss: 0.054594837\n",
      "Training - Step: 12000 of 14646 - Loss: 0.06272581\n",
      "Training - Step: 13000 of 14646 - Loss: 0.045926996\n",
      "Training - Step: 14000 of 14646 - Loss: 0.055133916\n",
      "Epoch 19/20 time: 41.86226677894592s\n",
      "Validation - Step: 0 of 14646 - Loss: 0.079382226\n",
      "Validation - Step: 1000 of 14646 - Loss: 0.053053588\n",
      "Validation - Step: 2000 of 14646 - Loss: 0.05367046\n",
      "Validation - Step: 3000 of 14646 - Loss: 0.046374068\n",
      "Validation - Step: 4000 of 14646 - Loss: 0.04922492\n",
      "Validation - Step: 5000 of 14646 - Loss: 0.06513646\n",
      "Validation - Step: 6000 of 14646 - Loss: 0.06698214\n",
      "Validation - Step: 7000 of 14646 - Loss: 0.051748224\n",
      "Validation - Step: 8000 of 14646 - Loss: 0.05437945\n",
      "Validation - Step: 9000 of 14646 - Loss: 0.04220363\n",
      "Validation - Step: 10000 of 14646 - Loss: 0.06786515\n",
      "Validation - Step: 11000 of 14646 - Loss: 0.05922871\n",
      "Validation - Step: 12000 of 14646 - Loss: 0.048083156\n",
      "Validation - Step: 13000 of 14646 - Loss: 0.061749697\n",
      "Validation - Step: 14000 of 14646 - Loss: 0.053806793\n",
      "Epoch 19/20 time: 23.356619119644165s\n",
      "Epoch: 19 / 20, test_loss: 0.058538973331451416\n",
      "Saving training history to: ../data/jax_models/ddm//test_run_notebook_lan_ddm__jax_training_history.csv\n",
      "Saving model parameters to: ../data/jax_models/ddm//test_run_notebook_lan_ddm__train_state.jax\n",
      "Saving training config to: ../data/jax_models/ddm//test_run_notebook_lan_ddm__train_config.pickle\n",
      "Saving training data details to: ../data/jax_models/ddm//test_run_notebook_lan_ddm__data_details.pickle\n"
     ]
    }
   ],
   "source": [
    "train_state = jax_trainer.train_and_evaluate(\n",
    "    output_folder=\"../data/jax_models/\" + MODEL + \"/\",\n",
    "    output_file_id=MODEL,\n",
    "    run_id=\"test_run_notebook\",\n",
    "    wandb_on=False,\n",
    "    wandb_project_id=\"test_run_notebook\",\n",
    "    save_data_details=True,\n",
    "    verbose=1,\n",
    "    save_all=True,\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Loaded Net\n",
    "jax_infer = lanfactory.trainers.MLPJaxFactory(\n",
    "    network_config=\"../data/jax_models/\"\n",
    "    + MODEL\n",
    "    + \"/\"\n",
    "    + MODEL\n",
    "    + \"_jax_network_config.pickle\",\n",
    "    train=False,\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "passing through identity\n"
     ]
    }
   ],
   "source": [
    "forward_pass, forward_pass_jitted = jax_infer.make_forward_partial(\n",
    "    seed=42,\n",
    "    input_dim=model_config[\"n_params\"] + 2,\n",
    "    state=\"../data/jax_models/\" + MODEL + \"/test_run_notebook_lan_ddm__train_state.jax\",\n",
    "    add_jitted=True,\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {},
   "outputs": [],
   "source": [
    "import jax.numpy as jnp\n",
    "\n",
    "# Test parameters:\n",
    "theta = deepcopy(ssms.config.model_config[MODEL][\"default_params\"])\n",
    "\n",
    "theta[0] = 0.2\n",
    "theta[3] = 1.0\n",
    "\n",
    "# Comparison simulator run\n",
    "sim_out = ssms.basic_simulators.simulator.simulator(\n",
    "    model=MODEL, theta=theta, n_samples=50000\n",
    ")\n",
    "\n",
    "# Make input metric\n",
    "input_mat = jnp.zeros((2000, len(theta) + 2))\n",
    "for i in range(len(theta)):\n",
    "    input_mat = input_mat.at[:, i].set(jnp.ones(2000) * theta[i])\n",
    "\n",
    "input_mat = input_mat.at[:, len(theta)].set(\n",
    "    jnp.array(\n",
    "        np.concatenate(\n",
    "            [\n",
    "                np.linspace(5, 0, 1000).astype(np.float32),\n",
    "                np.linspace(0, 5, 1000).astype(np.float32),\n",
    "            ]\n",
    "        )\n",
    "    )\n",
    ")\n",
    "input_mat = input_mat.at[:, len(theta) + 1].set(\n",
    "    jnp.array(\n",
    "        np.concatenate([np.repeat(-1.0, 1000), np.repeat(1.0, 1000)]).astype(np.float32)\n",
    "    )\n",
    ")\n",
    "\n",
    "net_out = forward_pass_jitted(input_mat)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(array([1.07477757e-04, 2.14955515e-04, 1.07477757e-04, 0.00000000e+00,\n",
       "        0.00000000e+00, 0.00000000e+00, 6.44866544e-04, 4.29911029e-04,\n",
       "        6.44866544e-04, 6.44866544e-04, 6.44866544e-04, 8.59822058e-04,\n",
       "        9.67299815e-04, 1.50468860e-03, 1.50468860e-03, 2.25703290e-03,\n",
       "        3.22433272e-03, 2.57946617e-03, 4.83649908e-03, 4.62154356e-03,\n",
       "        8.16830955e-03, 8.16830955e-03, 9.99543142e-03, 1.33272419e-02,\n",
       "        1.79487855e-02, 2.28927623e-02, 2.90189945e-02, 3.49302711e-02,\n",
       "        4.37434472e-02, 5.45987007e-02, 7.39446970e-02, 8.72719389e-02,\n",
       "        1.13711467e-01, 1.40903340e-01, 1.71104590e-01, 2.14418126e-01,\n",
       "        2.61385906e-01, 3.31676359e-01, 3.45433512e-01, 1.28328442e-01,\n",
       "        0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
       "        0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
       "        0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.53048326e-01,\n",
       "        5.10089436e-01, 4.96224805e-01, 4.30878329e-01, 3.27914637e-01,\n",
       "        2.55474629e-01, 2.17642458e-01, 1.68095212e-01, 1.35314496e-01,\n",
       "        1.04790813e-01, 8.72719389e-02, 7.44820858e-02, 5.43837452e-02,\n",
       "        4.32060584e-02, 3.58975709e-02, 2.92339500e-02, 2.40750176e-02,\n",
       "        1.74113967e-02, 1.27898531e-02, 1.21449866e-02, 9.78047591e-03,\n",
       "        7.41596525e-03, 4.94397683e-03, 5.37388786e-03, 3.76172150e-03,\n",
       "        2.90189945e-03, 3.22433272e-03, 1.07477757e-03, 2.04207739e-03,\n",
       "        1.61216636e-03, 6.44866544e-04, 7.52344301e-04, 5.37388786e-04,\n",
       "        6.44866544e-04, 3.22433272e-04, 1.07477757e-04, 3.22433272e-04,\n",
       "        2.14955515e-04, 2.14955515e-04, 5.37388786e-04, 0.00000000e+00,\n",
       "        0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 2.14955515e-04,\n",
       "        0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.07477757e-04]),\n",
       " array([-8.47512817, -8.28904316, -8.10295815, -7.91687313, -7.73078812,\n",
       "        -7.5447031 , -7.35861809, -7.17253307, -6.98644806, -6.80036304,\n",
       "        -6.61427803, -6.42819302, -6.242108  , -6.05602299, -5.86993797,\n",
       "        -5.68385296, -5.49776794, -5.31168293, -5.12559792, -4.9395129 ,\n",
       "        -4.75342789, -4.56734287, -4.38125786, -4.19517284, -4.00908783,\n",
       "        -3.82300282, -3.6369178 , -3.45083279, -3.26474777, -3.07866276,\n",
       "        -2.89257774, -2.70649273, -2.52040771, -2.3343227 , -2.14823769,\n",
       "        -1.96215267, -1.77606766, -1.58998264, -1.40389763, -1.21781261,\n",
       "        -1.0317276 , -0.84564259, -0.65955757, -0.47347256, -0.28738754,\n",
       "        -0.10130253,  0.08478249,  0.2708675 ,  0.45695251,  0.64303753,\n",
       "         0.82912254,  1.01520756,  1.20129257,  1.38737759,  1.5734626 ,\n",
       "         1.75954762,  1.94563263,  2.13171764,  2.31780266,  2.50388767,\n",
       "         2.68997269,  2.8760577 ,  3.06214272,  3.24822773,  3.43431274,\n",
       "         3.62039776,  3.80648277,  3.99256779,  4.1786528 ,  4.36473782,\n",
       "         4.55082283,  4.73690784,  4.92299286,  5.10907787,  5.29516289,\n",
       "         5.4812479 ,  5.66733292,  5.85341793,  6.03950294,  6.22558796,\n",
       "         6.41167297,  6.59775799,  6.783843  ,  6.96992802,  7.15601303,\n",
       "         7.34209805,  7.52818306,  7.71426807,  7.90035309,  8.0864381 ,\n",
       "         8.27252312,  8.45860813,  8.64469315,  8.83077816,  9.01686317,\n",
       "         9.20294819,  9.3890332 ,  9.57511822,  9.76120323,  9.94728825,\n",
       "        10.13337326]),\n",
       " [<matplotlib.patches.Polygon at 0x2d44fff40>])"
      ]
     },
     "execution_count": 37,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGdCAYAAADAAnMpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABNxklEQVR4nO3de3yT5d0/8E/OaXqkZ1pKKeUkKBSB1uKDqKuyeRanzO0nrnNsU3C4OufYJt3cfOrUIc8cDscE93h4ZD7z8DgdTqu4CVWUg2JFBARaKG1poce0SZrcvz/u3GlKm9KkSe5DPu/XKy8hzeFKY9MP3+t7XZdOEAQBRERERDLRyz0AIiIiim0MI0RERCQrhhEiIiKSFcMIERERyYphhIiIiGTFMEJERESyYhghIiIiWTGMEBERkayMcg9gJDweDxoaGpCYmAidTif3cIiIiGgEBEFAZ2cncnJyoNcHrn+oIow0NDQgLy9P7mEQERFRCOrr6zFu3LiAX1dFGElMTAQgvpikpCSZR0NEREQj0dHRgby8PN/v8UBUEUakqZmkpCSGESIiIpU5W4sFG1iJiIhIVgwjREREJCuGESIiIpIVwwgRERHJimGEiIiIZMUwQkRERLJiGCEiIiJZMYwQERGRrBhGiIiISFYMI0RERCQrhhEiIiKSFcMIERERyYphhIiIiGTFMEJEqnK0tRvLn9uFv3/SIPdQiChMGEaISFUe2rIfr31yAiue241uR5/cwyGiMGAYISLVEAQB/zpw0vf3HUdOyTgaIgoXhhEiUo2G9l509vZXQz6pb5dxNEQULgwjRKQaR1q6B/z9y5YumUZCROHEMEJEqtHY3jvg74dOMowQaQHDCBGpRlOnGEZm5aUAAI622GUcDRGFC8MIEalGc4cDADDbG0Y6HX3o7HXJOCIiCgeGESJSjaYOsTJSkB6PJKsRwOCpGyJSH4YRIlKNU91OAEBqvBljk+MAACcYRohUj2GEiFSjvUeckkmOM2FsihUAKyNEWsAwQkSqIe0xkhxnwthkMYw0tPfIOSQiCgOGESJSjQ5vZSQpzoSMBAsAoKXLIeeQiCgMGEaISBXcHgGd3rNokqxGpHnDSGuXU85hEVEYMIwQkSr4L+FNijMhLcEMAGjtZhghUjuGESJShY4esSpiMxtgMuiRFi9VRjhNQ6R2DCNEpArSSpokqwkAkM7KCJFmMIwQkSp09ErNq+JmZ1LPSJvdBZfbI9u4iGj0GEaISBWklTSJ3spISpwJep34tdN2VkeI1IxhhIhUwe50AxB7RgBAr9chNd47VcMVNUSqZpR7AEREI6HrOIYZusOYKrQDDRbAloa0eAtaupwMI0QqxzBCRMrXVo+r/30tFlt6gWMA/gTAZMOU1CewHwa0dnNFDZGacZqGiJTP3gqTpxcrnXfgsUlPAos3AC47cszdAMQmViJSr5DCyLp16zBhwgRYrVaUlJRgx44dAW/71FNPQafTDbhYrdaQB0xEseugkIu2lOlA+hQAQIJFLO5Kza1EpE5Bh5HNmzejoqIClZWV2LVrF2bNmoVFixahubk54H2SkpJw4sQJ3+Xo0aOjGjQRxa44k8H353hvGGlnGCFStaDDyJo1a7Bs2TKUl5dj+vTpWL9+PWw2GzZu3BjwPjqdDtnZ2b5LVlbWqAZNRLErzswwQqQ1QYURp9OJnTt3oqysrP8B9HqUlZWhpqYm4P26urqQn5+PvLw8XHvttaitrR32eRwOBzo6OgZciIiA/qW9AJDIMEKkCUGFkZaWFrjd7kGVjaysLDQ2Ng55n6lTp2Ljxo145ZVX8Mwzz8Dj8WD+/Pk4duxYwOepqqpCcnKy75KXlxfMMIlIw/zDSLyVYYRICyK+mqa0tBRLly5FUVERFi5ciBdffBEZGRl44oknAt5n1apVaG9v913q6+sjPUwiUgmrX8+Ir4G1t0+u4RBRGAS1z0h6ejoMBgOampoGXN/U1ITs7OwRPYbJZMLs2bNx8ODBgLexWCywWCzBDI2IYoTN3P+xldpzBDN03cjstgANiYAtDUhhJZVIbYKqjJjNZsyZMwfV1dW+6zweD6qrq1FaWjqix3C73di7dy/Gjh0b3EiJiOCdprGlASYbCt69C69Zfo5Nzh8Df1oIrCsG2lhJJVKboHdgraiowK233oq5c+eiuLgYa9euRXd3N8rLywEAS5cuRW5uLqqqqgAA999/Py644AJMmjQJbW1tePjhh3H06FF897vfDe8rIaKYYDUZxOrH8h042dyAb2/6EEa9Dq8syQBeXAbYW1kdIVKZoMPIkiVLcPLkSaxevRqNjY0oKirCli1bfE2tdXV10Ov7Cy6nT5/GsmXL0NjYiDFjxmDOnDnYvn07pk+fHr5XQUQxw9fAmpIHsyUbtUIL4AacY8bCLO/QiChEIZ1Ns2LFCqxYsWLIr23dunXA3x999FE8+uijoTwNEdEgA1bT+P25x+VmGCFSKZ5NQ0SKJ0Dw/dl/B1ajQQ+rSfwYszvcUR8XEYUHwwgRKZ6zzy+M+FVDgP7lvT0uhhEitWIYISLFc/Z5fH/232cEYBgh0gKGESJSPJdbDCM6HWDU6wZ8TTqfxu5kGCFSK4YRIlI8pzeMmA166HRDh5EeJ3dhJVIrhhEiUjyXR+wZMRl0g74mTdP0cpqGSLUYRohI8VzenhGT0TDoa/3TNJ5BXyMidWAYISLFk3pGTPqhKiNiQGEDK5F6MYwQkeJJq2nMxsEfWQm+ygh7RojUimGEiBTP5QlcGelvYGVlhEitGEaISPFc3k3PhuoZ4T4jROrHMEJEiufyLe1lZYRIixhGiEjxpH1GTEP0jMSzZ4RI9RhGiEjx+nyraYZqYBWnbnpdXNpLpFYMI0SkeE632DNiNg61tNcEgNM0RGrGMEJEiufb9Mww1DSNWBmxs4GVSLUYRohI8Xw9I0OEEe4zQqR+DCNEpHgut3Q2TeAGVvaMEKkXwwgRKZ7TLU7BmIepjBCRejGMEJHi+SojQzSwWox6GIbYmZWI1INhhIgUr7+BdfAOrDqdjtURIpVjGCEixfOd2jvEDqwAp2qI1I5hhIgUz7fPyBA9IwCQYjNFczhEFGYMI0SkeC5vA2ugykhyHMMIkZoxjBCR4vWf2svKCJEWMYwQkeK5htn0DACS48zRHA4RhRnDCBEpXn/PCKdpiLSIYYSIFK/PI1ZGjGxgJdIkhhEiUrw+b2XEGGBzM1ZGiNSNYYSIFK/PI4WRAJURhhEiVWMYISLFc7ulaZoAlRFO0xCpGsMIESlef2WE0zREWsQwQkSKJ4URQ4DKSIqtf2mvACEqYyKi8GEYISLF65P2GQnQM+JfGXH0MYwQqQ3DCBEpnluqjASYpok3G3xTOJ29rqiNi4jCg2GEiBTPJS3tDTBNo9PpfCf3djn6ojYuIgoPhhEiUjy3MPzSXgBIsIphpLOXYYRIbYxyD4CIaDgej+CbpglUGQGARIsRsAM4uR9oSBavtKUBKXlRGCURjQbDCBEpmsu7FTwQuGcEAARbGuynLJi54x5gh/dKkw1YvoOBhEjhOE1DRIombQUPBF5NAwCepHEoczyMv817Fvjeu8DiDYDLDthbozFMIhoFVkaISNFc7pFVRpLjTGhAOr40FgI506IxNCIKE1ZGiEjRXH6VkQCH9gLo32ukzc6lvURqwzBCRIrmXxnRIXBlJMV7Pk17D8MIkdowjBCRovn3jAxHqowwjBCpD8MIESma068yMhypMsJpGiL1YRghIkXr84wsjLAyQqReDCNEpGiuER58lxwnntzbZndGcjhEFAEMI0SkaK4gKyOdjj7fjq1EpA4MI0SkaK6+4MKIIPDkXiK1YRghIkXrG2GVw2zUI95sAMAmViK1YRghIkUb6WoagE2sRGrFMEJEijbSfUYAINnmbWJlGCFSFYYRIpXpdbnlHkJUuYKqjIjHbbEyQqQuDCNEKvLegRYU3f9P3P3Xj+UeStQEE0ZSvMt727m8l0hVQgoj69atw4QJE2C1WlFSUoIdO3aM6H7PP/88dDodrrvuulCelijmbdx2GL0uD/6261jMrBhxBTFNw/NpiNQp6DCyefNmVFRUoLKyErt27cKsWbOwaNEiNDc3D3u/I0eO4Mc//jEWLFgQ8mCJYl1tQ7vvz0da7DKOJHr6Qmhg5WoaInUJOoysWbMGy5YtQ3l5OaZPn47169fDZrNh48aNAe/jdrvxrW99C7/61a8wceLEUQ2YKFZ1OfrQ1OHw/b3uVGyEEVcQG5glS+fTsDJCpCpBhRGn04mdO3eirKys/wH0epSVlaGmpibg/e6//35kZmbitttuC32kRDGu/ozwcSpG+iJGuukZwKW9RGplDObGLS0tcLvdyMrKGnB9VlYWPv/88yHv89577+HJJ5/Enj17Rvw8DocDDkf/vwA7OjqCGSaRJp0ZRk53x0YYGelBeYB/A6sLQX68EZGMIrqaprOzE7fccgs2bNiA9PT0Ed+vqqoKycnJvkteXl4ER0mkDvWnewb8/VSMhBE2sBJpX1D/dEhPT4fBYEBTU9OA65uampCdnT3o9ocOHcKRI0dw9dVX+67zeP+VYzQasX//fhQWFg6636pVq1BRUeH7e0dHBwMJxbzj3jCi04nnr8TK6bTB7TMi9YzExveGSCuCqoyYzWbMmTMH1dXVvus8Hg+qq6tRWlo66PbTpk3D3r17sWfPHt/lmmuuwSWXXII9e/YEDBgWiwVJSUkDLkSxrrVbnLqckpkIIHaaNEMKI1xNQ6QqQU+qVlRU4NZbb8XcuXNRXFyMtWvXoru7G+Xl5QCApUuXIjc3F1VVVbBarTj33HMH3D8lJQUABl1PRMOTpmXyUuOwv6kTdmds7MQa3HbwYhhx9Hng6PPAEqlBEVFYBR1GlixZgpMnT2L16tVobGxEUVERtmzZ4mtqraurg17PjV2Jwq21Swwj48bYAAB2Z5+cw4maYA7KS7QYYdDr4PYI6HL0MYwQqURI7eYrVqzAihUrhvza1q1bh73vU089FcpTEsU8qTIybkwcAMDuYGXkTDqdDklWI07bXehy9CEtguMiovBhCYNIBQRB8AsjYmWkO0YqI8H0jABAivfk3s7e2Pj+EGkBwwiRCnQ5+nzTFXmpsVUZCWZpL9DfxBorZ/cQaQHDCJEKSFURq0mPjASxE6Lb2QdBCO4XtRq5g9j0DOgPI10OVkaI1IJhhEgFWr1hJC3egjizAQDgEcRVI1rXF8TZNED/xmddnKYhUg2GESIVkLZ+T403w2bu7zuPheW9wTSwAqyMEKkRwwiRCrT6hRGDXgerSfzR7Y6BX7hBV0Z8PSPa/94QaQXDCJEKnPJN04grReK91ZFYWFETdM+ItJomBoIakVYwjBCpwCm/yggAWE1i30iviz0jZ+I0DZH6MIwQqYC0+2pqghRGxB/dXhd7Rs4kTdN0cWkvkWowjBCpwCnvIXlpgyoj2g8j7mArI1xNQ6Q6DCNEKtA/TSPuMRJLYaQvyJ4RXwMrp2mIVINhhEgFWgf1jEjTNNrvGQm6MsKeESLVYRghUoEzV9NYjbFTGQl2O/gkbxiJgc1piTSDYYRI4Xpdbt/mZv0NrLETRoKtjFhNBsR5vz9EpA4MI0QKJ1VFTAYdEi3i/iIWaZomJraDD/41SlM1RKQODCNECieFkTE2M3Q6HQD4/uXPysjQpPNpiEgdGEaIFO7M5lUgtjY9C7ZnBOjvGyEidWAYIVI43x4jCf5hJHY2PQupMsIwQqQqDCNECufbfdW7xwjQv5rG0af9MBLsdvCAOKVFROrBMEKkcGcu6wX6p2l6nNoPI8EelAcAY+IZRojUhGGESOHOPCQPiK1Nz4I9mwYYGNyISPkYRogUbqgGVovUwMppmiGxMkKkLgwjRAo33DQNG1iHlhrPBlYiNWEYIVK4oaZp4mJoaW8om575N/sSkfIxjBApnK8yEoNLez0eASEURpDK1TREqsIwQqRgLrcH7T0uAGcs7TVJS3u1XRkJpV8EAMb4TdPEwpb5RGrHMEKkYKftYlVErxt43kqsnNobSr8IACRYjDDqxa3zO3pd4RwSEUUAwwiRgklTNCk2MwzeX65A/zRNj8bDSCj9IgCg0+l84a2jh2GESOmMcg+AiAI71XVG82pbPWBvRWJHL2boDsPuSpZxdJEXyh4jkiSrEehiGCFSA4YRIgUbsMdIWz2wrhhw2ZEN4DULYBcsENoWQpcyXt6BRkioPSOA97C8LqCjty+MIyKiSOA0DZGCDdhjxN4KuOzA4g3o+nY1VjrvgE3ngLOjReZRRo7UM2L0m6IaKenk3nY7KyNESscwQqRgQ+2+ivQpMI87HweFXACAU8OrRaSeEUMIYcTXM8IGViLFYxghUrBT3Q4Ag89aMRl0vl/QTreGw4i3ZySUMJJkZQMrkVowjBAp2FC7rwLiahGzwRtGNHw+jdQzoteNYpqGYYRI8RhGiBTMF0YSBm9vbjFKG5+F3uSpdL6eEUMo0zRifz6naYiUj2GESMGGOiRPYjaKP77arox4e0ZCqYxI0zRcTUOkeAwjRArWeuY+I36kMKLl7c6lykhIPSO+aRqGESKl4z4jRArlPl2HnJ79yNYBWd0ZgP3IgK+bDVJlRLthxDWKBlZpNU1nrwsejwB9CI9BRNHBMEKkRG310K8rwatmu/j3Z7zXm2yALQ0AYDFqP4yMpjKSaDX6HqOztw/JNtNZ7kFEcmEYIVIieyt0fXasdN6BJks+nl92gXi9LQ1IyQPg1zOi5aW9o9hnRKocAcApu5NhhEjBGEaIFOygkIuehGlATtGgr0mrabTcwOoexdJeySTdcfQc3Qk4k8Qr/AIdESkDwwiRwqXHD17WC/RP02h5aa+06Vko28HDloZeWPBf5seBVx/vv95kA5bvYCAhUhCGESKFG2olDQCYYmJpb+g9I0jJQ+X4Tfj0wGEsv6QQV5w7Fmj5AnhxmXjOD8MIkWIwjBApXGrC0GEkFioj7lH0jACAPmU8agXgC30hrsiZEs6hEVEYcZ8RIoVLD1AZMft2YNV+ZSTUVbkZ3iDX0uUI15CIKAIYRogULtA0jcUonU2j4dU0vp6R0D6qMhLFfpuWTmfYxkRE4ccwQqRwaUOcSwP0V0bYMxJYuvd7d5KVESJFYxghUrihzqUBALOvMsKekUDSpcoIwwiRojGMEClcoMqINZZ6RkL8pMpIkKZpGEaIlIxhhEiB3EJ/tePsS3tjoGdEF9pHlVQZ6Xa6YXfywDwipWIYIVIg6dh7nQ4YE2Ab8/6lvRoOI1LPiCG0aZp4swFWk/h9YhMrkXIxjBApUEePCwCQaDHCaBj6x9R3aq+Gz6aRekZC3Q5ep9P5VtSwiZVIuRhGiBSozS7+Kz45LvDhbrFUGTGO4pPKt6KGfSNEisUwQqRA7d7KyHAnzfYv7dVwGHFLm56F/lElhRGuqCFSLoYRIgVq7xF7RpKtw1VGxKmLWFhNE+rSXsBv4zOGESLFCimMrFu3DhMmTIDVakVJSQl27NgR8LYvvvgi5s6di5SUFMTHx6OoqAhPP/10yAMmigW+ysgw0zSxUBnx7TMSYgMrwMoIkRoEHUY2b96MiooKVFZWYteuXZg1axYWLVqE5ubmIW+fmpqKn//856ipqcEnn3yC8vJylJeX44033hj14Im06nS32DOSEmBZL9DfM6LlMOLrGQmxgRXoP5+GPSNEyhV0GFmzZg2WLVuG8vJyTJ8+HevXr4fNZsPGjRuHvP3FF1+M66+/Hueccw4KCwuxcuVKzJw5E++9996oB0+kVae9DayBlvUCgFlqYHV7IAja3IXV1zMSlmkaLu0lUqqgwojT6cTOnTtRVlbW/wB6PcrKylBTU3PW+wuCgOrqauzfvx8XXXRRwNs5HA50dHQMuBDFEimMpMYPvfsq0B9GBEG7y3vdUs/IKCojnKYhUr6gwkhLSwvcbjeysrIGXJ+VlYXGxsaA92tvb0dCQgLMZjOuvPJKPPbYY7jssssC3r6qqgrJycm+S15eXjDDJFI9aZpmuMqIxWDw/bnXpc0w0jfKs2mA/spIc4cDArRZQSJSu6ispklMTMSePXvw4Ycf4oEHHkBFRQW2bt0a8ParVq1Ce3u771JfXx+NYRIpgiAIOG0XG1gDbQUPAMb+LAKHS5sratxhWE2TmWgFAPS43Oh2avP7RKR2xmBunJ6eDoPBgKampgHXNzU1ITs7O+D99Ho9Jk2aBAAoKirCvn37UFVVhYsvvnjI21ssFlgsgcvTRFrWZnf5GjdT4gKHER36f0H3aDyMjGKWBnFmA5KsRnT09uFUlxMJYRobEYVPUJURs9mMOXPmoLq62nedx+NBdXU1SktLR/w4Ho8HDgfnb4mG0uy36sM0wiWt2p2mkXZgHUUaAZCdLFZHWrr5uUOkREFVRgCgoqICt956K+bOnYvi4mKsXbsW3d3dKC8vBwAsXboUubm5qKqqAiD2f8ydOxeFhYVwOBx4/fXX8fTTT+OPf/xjeF8JkUY0d/YGfZ9ejVZGPB5pB9bRhZGsJCu+aOrCqS5XOIZFRGEWdBhZsmQJTp48idWrV6OxsRFFRUXYsmWLr6m1rq4Oen1/waW7uxt33HEHjh07hri4OEybNg3PPPMMlixZEr5XQaQhoeyHodUw4l3ZO+owkp0kVkZaWRkhUqSgwwgArFixAitWrBjya2c2pv7mN7/Bb37zm1CehigmNYcSRjS68ZlUGRnNDqyAWBkBgFbuNUKkSDybhkhhWBnpJy3tHe0HVVYyKyNESsYwQqQwIVVGNBpGpL3cRrMDK+A/TcPKCJESMYwQKczJEBpYXW5tbublEUa/HTzQH0ZOcZqGSJEYRogUJpTKiNRboTXS0l4DRtkzkizuWyRts09EysIwQqQwofSMuDV6UJ5vae8oKyNp8RYY9DpoNLMRqR7DCJGC2J196OztC/p+Ho2GEbcvjIzucQx6HTITuaszkVIxjBApyIl2sV8k3mw4yy0H0uo0jVTxGc2pvRJpeS8RKQ/DCJGCNHrDSFpCcP+Kd2s1jIRpB1agv4mViJSHYYRIQU74wkjgA/KGotEs4jdNE47KCKdpiJSKYYRIQU609QAAMoKsjGi1Z8QTzmmaZFZGiJSKYYRIQU50iJWRdE7TAAD63OFpYAWA3JS40T8IEUUEwwiRgkg9I+lBTtNodmmvEL6ekRyGESLFYhghUpAG7zRNsA2sGs0ifg2so38s/8qIVsMbkVoxjBApSKN3miYjyD0xtDpNI70uQxjmaTITxY3PAOB0t2vUj0dE4cMwQqQQPU432uziL8lgV9NoNowI4auMGA16pMWL39dQttwnoshhGCFSCKkqEm82BL3pmaDRaYdw7jMCwLcL68mu4A8jJKLIYRghUghpWW92shW6IA+G02oPhMc3TROuMCIu7w3l/B8iihyj3AMgIlHbiS8xQ3cYs20pQEtzUPd1eyIzJrn1helsGkl6oneapoNhhEhJGEaIlKCtHpe9czWusPQCTQBeBGCyAba0Ed1d65uehW+ahpURIiViGCFSAnsrTJ5erHTegXlzL8D/uyBfDCIpeSO6u2YPyvO+LmOYpmmkVUonO9kzQqQkDCNECnJQyMWF488HckYWQiRa7RmRpml0YaqMSOfTNHexMkKkJAwjRAozbkzwO4VqtTLia2ANTxZBurcyMtZZh64jHyHBbAyqAkVEkcEwQqQAHkHwLW3LG2ML4f7hHY9SSBWfcFVG4pIy0QML/sv8OPDU4+KVJhuwfAcDCZGMuLSXSAGkzc4Meh3GhnC6rFanaTzeVULhWtqLlDzcnvxHXOl4ADsuewlYvAFw2QF7a3gen4hCwjBCpACN3qWmafFmGA3B/1hqdZqmz5tGDGGqjACAOS0ftUIB9usnAulTwva4RBQ6hhEiBWj2ru6QGiyDpcXt4AVB8E0/6cJVGUH/6b3HvJvMEZH8GEaIFKCpQwojwU/RANrsGfF/TcbwZRHkpYo9OcdOMYwQKQXDCJECNHvDSGbIYUR7aUSaogHCWxkZ7w0jdafsYXtMIhodhhEiBWjy9oxkJYYWRrQ4TeOXRcLaM8IwQqQ8DCNECtA/TRNaz4gWKyP+K4TCtR08AOSlij0j7T0udPb2he1xiSh0DCNEMvN4BDR7z0rJCmFZL6DRMOL2CyNhnKaxmY1ITxBDX2MHt4UnUgKGESKZNXX2+rY9T4s3h/QYWpym8a+MhGsHVsl4b3WksZ1hhEgJGEaIZHbsdP+qjlB7I9yes99GbfwDVjinaQAgPy0eACsjRErBMEIks2OnR99IKWhwmkaaegrXib3+pOW9rIwQKQPDCJHM6sOw34UWt4OXpq7C2S8ikVbUNLEyQqQIDCNEMjvaOvrKiBZ7RvpP7I1cGDnRwY3PiJSAYYRIZkdbu0f9GBosjPgCViSmaaQwcrLTGfbHJqLgMYwQyewIKyNDiuQ0TWaiBWajXpPfNyI1YhghklG3ow8tXY5RP44We0akBlZDBMKIXq/zVUeISH4MI0QykvpFkuNMo3ocjwb/hS9VLcK9rFfCMEKkHAwjRDKS+kWyQzwgT6LJHVgj2DMCMIwQKQnDCJGMjnoPa8tJGV0YcWsvi/jCSCSmaYD+vUaISH4MI0QykiojY5PjRvU4mpymEaQG1sg8PisjRMrBMEIkoyMtYmVkbIgH5Em0OE3j8U3TROZjqiA93vdnAdr7/hGpCcMIkYzqvNM02aOsjGhxiapvaW9kZmkwPtXme+xT3a7IPAkRjYhR7gEQxapelxsN7eIOoKPtGdFyZSRSPSNmox5ZSVagFzh1ZO/AE5NtaUBKXkSel4gGYxghksmx03YIApBgMSI5bnQ/ilqsjPh6RiK0tBcAktKyYT9mweRtFcA2vy+YbMDyHQwkRFHCMEIkh7Z6tB44gBm6wyhMjoeupX1UD6fBwohvmsZoiGAYySpA2aGHcducJNx2YYF4ZcsXwIvLAHsrwwhRlDCMEEVbWz2wrhglLjteswDoAPAixH+N29JCekhN7sAawYPyJBMz4tGAdNTYs3BbTlHEnoeIhscwQhRt9lbAZcf/5q/Gpi8suHHuOHy7dMKo+hQ0OU0TwbNpJNKKmsMtXRF7DiI6O66mIZLJx45s1AoFiBt/PpBTNKopAQ0WRvrPpolgZUQKI3Wn7JoMdERqwTBCJJPGNnElTX5a/FlueXZa/EUqvaRIVkZykuNgNurhcgs4fronYs9DRMNjGCGSSXOneFpvftrodwLVZM+IENl9RgAx6Ezwfv+/5FQNkWwYRohk0ucRYDHqkZU4uj1GAG1uB++rjERwmgbw7xvpjujzEFFgIYWRdevWYcKECbBarSgpKcGOHTsC3nbDhg1YsGABxowZgzFjxqCsrGzY2xPFkvw0W1imIbS46ZkQhX1GAKAgPQEAcIRhhEg2QYeRzZs3o6KiApWVldi1axdmzZqFRYsWobm5ecjbb926FTfffDPeeecd1NTUIC8vD5dffjmOHz8+6sETqd341NH3iwBa7RkRX1OEswgmeisjXzKMEMkm6DCyZs0aLFu2DOXl5Zg+fTrWr18Pm82GjRs3Dnn7Z599FnfccQeKioowbdo0/PnPf4bH40F1dfWoB0+kdhPC0C8C9E9paInHI/434pWRDE7TEMktqDDidDqxc+dOlJWV9T+AXo+ysjLU1NSM6DHsdjtcLhdSU1MD3sbhcKCjo2PAhUiL8tPDUxnR4jRNNBpYAWCCdzXT8bYe9LrckX0yIhpSUGGkpaUFbrcbWVlZA67PyspCY2PjiB7j3nvvRU5OzoBAc6aqqiokJyf7Lnl53JKZtCk/NTyVES1O0whRamBNTzAjyWqEIABHWlkdIZJDVFfTPPjgg3j++efx0ksvwWoNvIJg1apVaG9v913q6+ujOEqiyPKvYkwIwx4jZz6mVvT3jEQ2jOh0OkzOSgQAHGji8l4iOQS1HXx6ejoMBgOampoGXN/U1ITs7Oxh7/vII4/gwQcfxFtvvYWZM2cOe1uLxQKLxRLM0IhUo7XbiQwARr0OOSmjX9YLaLMy0r+0N/LPNSkjATuPnsaB5i4gI/LPR0QDBVUZMZvNmDNnzoDmU6kZtbS0NOD9HnroIfz617/Gli1bMHfu3NBHS6QBDd6dPrOTrDAawlOc1GAW8esZiXwamZwlLu892NwZ8eciosGC/iSsqKjAhg0b8Je//AX79u3D7bffju7ubpSXlwMAli5dilWrVvlu/9vf/hb33XcfNm7ciAkTJqCxsRGNjY3o6mI5lGLTce828DkpcWF9XK1tfObbZyQKk8mTMqUwws8lIjkEfWrvkiVLcPLkSaxevRqNjY0oKirCli1bfE2tdXV10Pt9evzxj3+E0+nE17/+9QGPU1lZiV/+8pejGz2RCh1v6wUA5IY5jLgFAXpEYU4jSqRsFemeEQC+npHDLd1weZJgivgzEpG/oMMIAKxYsQIrVqwY8mtbt24d8PcjR46E8hREmuWrjIwJT7+IRGtNrNGcpslJtsJmNsDudKOxvRdcv0cUXTybhijKGrxhJNyVEWmTMK2IZgOrTqfzTdXUn+LyXqJoYxghiiKX24PGDnGaJtw9I1o7uTdaZ9NI+sNIT1Sej4j6MYwQRVH9KbtvGW5avDmsj6215b3ROptGMjlT7BupO22PzhMSkQ/DCFEU+Z9/Eu5/8Qsaq4x4orQDq2SytzJS18owQhRtDCNEURTJw9i0WhmJRs8I0D9Nc+w0p2mIoo1hhCiKIhpGNFYZidbZNJK8VBvMRj2cbo11AhOpAMMIURRFIowYvKUDza2m8UTnbBqJQa9DYUZCVJ6LiAZiGCGKokiEEalyoLXKSDSX9krOyU6M3pMRkQ/DCFGU2J19ONHeG/bHlY63cbu1Fkaiu7QXAM4ZmxS15yKifgwjRFFypEVcpZFkDWnj44AM3l/WfRqbpxGi3MAKMIwQyYVhhChKpCmacO+8qpd6RjQ6TROtnhEAmDa2f5qmp88dteclinUMI0TR0FaPrsMfYYbuMGbbmsP60P2VEa2FkehP06QnWDDGuxnd0RbuN0IULeGtFxPRYG31wLpiLHHZscQC4DgAkw2wpYXl4aXKSJ/mekbE/0ZzmgYAJqTZgCaxkjUtuk9NFLMYRogizd4KuOx4NOkevHUyBT/96jQsmDUVSAnP2bAG7y9rrU3T+HpGopxGJqbHA01A17FaoCFbvNKWFrb3i4gGYxghipKajjTUCuORMbUYSAlfo6SvMqLRaZooztIAAHJzxsH+qQU31t0P/Ol+8UqTDVi+g4GEKELYM0IUJd0ON/Q6oCA9PqyPK/WMeDQXRsT/RrNnBADyC6eizPEwbhIehPC9rcDiDYDLLla4iCgiWBkhiqL8tHhYjIawPqbWKyPR7hkpzEjASUMGGhwCjlmnIi89ygMgikGsjBBFkXQYWzhJ28Fr7aC8aJ9NIzEZ9JiUKS7xrW3oiOpzE8UqhhGiKIpEGPFtB6+xMNLfMxL9ysR5uWJPz6fH26P+3ESxiGGEKIomR6QyIv5Xq2Ek2tM0ADBzXAoA4ONjbdF/cqIYxDBCFEWRqYyIP8baCyPif6M9TQMAM8clAwD2Hm+HAG19X4mUiGGEKMI6el2+P0fiiHqDVhtYPfJVRqZmJ8Js0KPN7kJjuyP6AyCKMQwjRBFWf7oHAJCZaEG8JfwL2KRNz7RXGZGvZ8RiNPjOqTnQ3Bn15yeKNQwjRBFWf0o842Rcqi0ij6/X4qm9bfUYa9+PGbrDyOz6HGj5IupDkKZqDjR3Rf25iWIN9xkhirC6U2JlZPyYyIQRg0Fjp/Z6z/L5kcuOH1kA7PRewniez0jMzE0BUIcDTayMEEUawwhRhNWfFisjealxEXl836m9Wjkoz3uWz9M5v8Dzh+Nw238UYPHs3KifDzMzT6yMHDrZDXDfM6KIYhghirC61m4AQF6Epmm0uulZo3k8aoUEnEo6B8iZGPXnn5SRAKtJD7vTDVii/vREMYU9I0QR1GZ3oqXLCQDIT4tUz4j4X82tppFxaS8AGA16nJuTLMtzE8UahhGiCPq8sb/fIMEcmUKkVisjgoybnklmj0+R78mJYgjDCFEEfX4i8meb6PXa3PTMF0ZkTCNz8lNle26iWMKeEaJwa6v3HTffcfgAJumOR/TpjJo9m0b8rxz7jEjm5I/x/bmztw+Jso2ESNsYRojCybssFS5xBc0PAcAM9BniYIzQslRvYURzPSNKmKbJSLQgNyUO6BGn3OZFv4+WKCYwjBCFk3dZKhZvgDttMm5cXwNHnwePL7sM+RFaltrfM6KhTc8gfwOr5JyxicCXwGcn2jFP1pEQaRfDCFEkpE/BUdMk7HI1wGrSY9yEKRF7Kt8+I5qrjIj/lbMyAgDTxyYBXwL7otD/QxSrGEaIIkRaSTM1K9FXvYgEvcZX08jZMwIA03OSAAB9zfvhOrYbJum9jPImbERaxjBCFCHSSppp2UkRfR69Vk/t9f5X7mma3Jxx6IEFj+j/APz5D/1fMNmA5TsYSIjCgGGEKEI+k8LI2MiuwZBW03i0FkYU0MAKAPox41GZtwm1Bw+j/MIJ+Pr548SD+15cJvYIMYwQjRr3GSGKkL3H2wEA5+VGdhdPrVZGoJAGVgCYNnU6aoUCvNKUAeQUAemR6wEiikUMI0QR0NrtRFOHA3pdf89BpBg0u8+I1DMi80AAXDgpHQDw4ZFTcPZpa9USkRIwjBBFwIHmLgDA5MxE2CK0Dbwko/cIZugOI6NrH9CwR9zrRAP6p2nkTyNTshKQnmBGr8uD3XWn5R4OkeawZ4QoAg42dQIw4rxxEZyisaUBJhu+dmA1vmYB8IX3opHGSqnOo4QwotPpUFqYjlc/bsC2Q60omSH3iIi0hZURogiQKiMzIxlGUvKA5Tuw+fxncKXjAayduAFYvEHcdM27Hb2aKWWfEcmFheIOujWHWmQeCZH2sDJCFAEHm7sApGDmuJTIPlFKHtqSnagV9JhqyQXSI/t00aSEs2n8zS8Uv7m769rQ47IgTubxEGkJKyNEEdDW44JRr8O07MgfrWbQ6GoaJZxN4298mg3jxsShzyPg0wbuxkoUTgwjRBEyNTsRVpMh4s9jlHZgFbQVRpRyNo2/BZPF6siuo2xiJQonhhGiCIlov4gfg0H8Me5za2vJqeBtYdUr6FPq4qmZAMQlvkQUPgr6MSfSltnjx0TleaRpjDdqm+DQ0B4YSjmbxt+Fk9JhMuhwor1X7qEQaQrDCFEYOf2qE3PzoxNG2ntcvj8fa7NH5TmjweP9VionigAJFiNKCtLkHgaR5jCMEI1WW7242VjDHhw/8DEAICXOhIL0+Kg8/eLZ43x/1lLbiFQZieSJx6G4eGqG3EMg0hwu7SUajbZ6YF2xuLcHgAIAdsGCcePGRW16ITvZipxkKxrae32/wLVAqjEZFDRNAwCXTsvES6+Lf7a73LDJOxwiTWAYIRoNe6sYRBZvANKn4NevfYZ/HHLh1sJpUR2GdFiellb3Km2fEUlBejzGJluBXmBPXRvm58s9IiL14zQNUTikT4EwdhZebsxAA9Ixd0J0+kUk0vJXLYURwaOsfUYkOp0OxQXSbqzq3+mWSAkYRojC5HBLN1q7nTAb9Tg3NzrLeiXSL2xNTdMotGcEAC6cJIaRD4608hRfojBgGCEKk+3efyUX5aXAYoz8Zmf+pMqIdqII4IHylvZKzvHurNvtcGMbz6ohGrWQwsi6deswYcIEWK1WlJSUYMeOHQFvW1tbixtuuAETJkyATqfD2rVrQx0rkaJt9/5SurAw+gfESL+vPRqap5GW9iqxMiKFv0m64/hkx7viaqq2enkHRaRiQYeRzZs3o6KiApWVldi1axdmzZqFRYsWobm5ecjb2+12TJw4EQ8++CCys7NHPWAiJfIIgq9/QCrhR5OvMqKdLKK4s2kGsKXBbYjDf5kfx8pD3wX+tFBcVcVAQhSSoMPImjVrsGzZMpSXl2P69OlYv349bDYbNm7cOOTt582bh4cffhjf+MY3YLFYRj1gIiX6sqUbp+0uxJsNmJWXEvXn7w8j2kkjSjybxiclD8LyD/BN/UO40vEAvrhwjbiqys6GVqJQBBVGnE4ndu7cibKysv4H0OtRVlaGmpqasA3K4XCgo6NjwIVIyT4+1gYAKC5IhckQ/VYs3zRN1J85cqTXosgwAsCYmo/xM0pRKxTgjaboNiwTaU1Qn5otLS1wu93IysoacH1WVhYaGxvDNqiqqiokJyf7Lnl5eWF7bKJI+Li+HYB4dokctFgZ8S3tVXCb/dWzcgAA7x1kEyvRaCjyx3zVqlVob2/3XerrOQ9LylbbIIaR+TI0rwL9v7A11L/av7RXoZURALhgYhqykizocvTJPRQiVQsqjKSnp8NgMKCpqWnA9U1NTWFtTrVYLEhKShpwIVKyXpcHGYkWTPMu+Yw2LTawKnUHVn8GvQ7XFeXKPQwi1QsqjJjNZsyZMwfV1dW+6zweD6qrq1FaWhr2wRGpyVemZfq2ZY82nRanaZS8msbP9ef3h5GOXtcwtySiQII+m6aiogK33nor5s6di+LiYqxduxbd3d0oLy8HACxduhS5ubmoqqoCIDa9fvbZZ74/Hz9+HHv27EFCQgImTZoUxpdCFH0CBN8R95dOy5RtHHotNrAqeAdWf9Oyk8QTmjuBfx9owZUT5R4RkfoEHUaWLFmCkydPYvXq1WhsbERRURG2bNnia2qtq6uD3q/jrKGhAbNnz/b9/ZFHHsEjjzyChQsXYuvWraN/BUQyqj/Vg/EATAa9bM2rgP/ZNNqpjCh6ae8ZLp2WCXwIvP15M65cJPdoiNQnpFN7V6xYgRUrVgz5tTMDxoQJEzRVOiby98HhUxgPYOa4ZMRb5DsE21c80NDPmi+MKLwyAgAXT8kAPgQ+b+zE/sZOTJWpd4hIreT79CRSo7b6ARtbNRzYAwAoKUiVaUAinRZP7VVJzwgApMabAYjbw7/zzj8xdWGh+AVbGpDCrQmIzoZhhGik2urFLb9ddt9VKwHYBQvmniNv/5OvZ0RTlREpjKggjfhtD4/9EC8AYLIBy3cwkBCdBcMI0UjZW8UgsngDkD4F//dxA57415cYlzsOT4yfLOvQNHlqr4p6RpCSB92KHfjuE//EifZerCybjMsz2oEXl4n/3zCMEA1LkZueESla+hQgpwh/OZKCWqEAJUWz5B6RJvcZkahhmgYA9GPGY84Fl6BWKMDjnyeI/58Q0YgwjBCFoKGtBzuPnoZOB1xx3li5h9N/No0G04jSl/b6u3HuOJgMOuypb8OB5i65h0OkGgwjRCF4fe8JAMC8/FRkJ1tlHo1fZURLG414KXkH1jOlJ1hwpTecvrLnuMyjIVIPhhGiELy0W/xFc+VM+asigP+mZ9qrjKioMAIA+O4Ccdezfx/g4XlEI8UwQhSkQy1dqG3ogNmgxzXeU1vlpuWeETVN0wDAubnJuGBiKvq0tM6aKMIYRoiC9NZnzQCAy6ZnYYx3fwm5afFsGokqVtOcYdmC/j3h7S63jCMhUgcu7SUajv8mZy1fAADe2d8MIA83zh0n37jO0L/PiLzjiASjyiojAHDJ1Ez8dUwcYAd27KjBxSaD+AVugkY0JIYRokCG2OSszxCHuu44ZCdZsWByhoyDG0iv4cqI2qZpAHEL+68Vnwv7OxZcXPtzoNb7BW6CRjQkhhGiQM7Y5AwAfvRKHRq6DVgxZ5yifklKZ1NqLYqYDDpVrabx97X/mIsl238PV2cLfrCwEFfndHITNKIA2DNCdDbeTc4+10/Eq0cNMOh1uLlkvNyjGkCLZ9MA6qyKSCxGA264tBS1QgEe2GWGI0XeIwOIlIxhhGiEntp2BACwaEYWclPi5B3MGbQ6TWPSq/sj6qa54zA22YrGjl68ua9J7uEQKZa6f9KJouR0t9O3t0j5hQUyj2YwrTawGg3qrYwAYnXkjovFE3xf+LBe5tEQKRfDCNEIPLejDo4+D87NTcLc/DFyD2cQrVZGDCqvjADATfPykJNsRUu3U+6hECmW+n/SiSKsp8+NJ987DAC47T8KFNlQqdPoDqwmlVdGALE6cs9Xp/r+frrHJeNoiJSJYYToLP6xtxGnup3IT7Ph6pnK2HH1TFrdgVXNDaz+rp2Vi8mZCQCA//mgTubRECkPwwjRWfxtl9grsvziSTAalPkjo9foqb0mhX6/g6XX6/Adb6/RltpGHGzulHlERMqijZ90oghqszuRmxKH68/PlXsoAWm1MqLG3VcDmTkuGQDg9ghY/Uqt5vp7iEaDYYQogE5Hn+/PKy6dpOh/pes0Gka0Mk3jz2zQY/uhVry857jcQyFSDOV+uhLJ7IWPxKWY+Wk23DhHOefQDIVLe9XjG8Xi7qu/+fs+tNm5woYI4HbwRP38DsVr7OhF7ccfAUagfH6BYntFJHrfDqzaSiNaWNp7phvyuvFuuh1HW+34y9/asPLqC7g9PMU8hhEiYNCheNkAfmcEenUWzJ1eKO/YRkCqjGitD0FThRFbGmCywfjK9/E4AFgAHALcj8XBcOeHDCQU0xhGiIABh+Jt70jDA6/tg1Gvw+++fSkmpSjrHJqh6DRbGdFQGknJE0/s9VbfNvz7MD79eAf+y/w42lobkcIwQjFMezVQolGwJxfi7n8DtUIB5i/4CiZNPkfuIY2IbzWNzOMIN02FEUAMJDlFQE4Rbll8DVypkwEAf3j7gOaqWkTBYBgh8rNp22GcaO/F+FQbfnjpZLmHM2Ja3WdEc2HEj9VkwN2XTwEA1Hx5Cs9wMzSKYQwjRH5e39sIAPjP689DnNkg82hGTvqlrbEs4qv4aFVheoLvz/e/WovddadlHA2RfBhGiACctvefF/KdCwvwH5PTZRxN8Hz7jHhkHkiYabky4m9+YRpcbgF3PLsLrV0OuYdDFHUMIxS72uqBhj1wH9+N519/EwBQkGbDT/wONVML3zSNvMMIO4PGKyOSitkCFo1pRGrHPlT+9xtw9mntnSQaHlfTUGzyW8prAHA7ALtgwY+unQ+rST3TM5L+7eC1NU+j+cqId7lv3Ku34wkAsAD2Jgse2vwMfv7NyxR5QjRRJDCMUGzyLuXdOfchrN4mTtHcdc0FuGzyNJkHFhrN7jOi9TByxnLfz/Z+iOk1P0bNp1/g99UFWFmmniZqotFgGKGY9pv33agVCvD9hRNxWak6lvEOpX+fEZkHEmZ6rYcRQAwk3j1GpgNAjXj1o299gexkC5bMU/4+N0SjxZ4RikmHW7sBAE63B2XnZOIni9RZEZFodpomRqcpvu49C+mnL+7FKzxQj2IAwwjFnMMt3ah8pRYAMH1sEh67+XzVTwdo9aA8tb8vofr2ZAfuOa8H03EYT77wMt7+YKfcQyKKKE7TUGzwHoJXf9qO1S99ilT7YcAMrL5quqr2EwlEms4QNLYHa8yFEW9Dq+6l72E5gOUW8Wr76xa8g3/gkpI5sg6PKFIYRkj7/FbO5AF4GgDMgGC0ITE1S+bBhYdOqw2ssTZNc0ZDq1sQ8Ozf38TSEw9gzSs1OKnPxE3zeIYNaQ/DCGmfd+XMT4U7sdeZjUmZ8fj1teciKTVbMyel6tnAqh1+Da0GAN+6CsCGB+ARgJ/87RO09TjxvYuUf5I0UTAYRkibvNMyALD9/e2YD2CvMxvx+XNw/61zkRRnknd8YabdnhG5RyA/qTr0/ekuPPHZYbzyj8PoPnkO7rzuEhj5DSKNYBgh7fGblgGA+RA3NCuZMRn3fqMYFqP6e0TO5DvDRWthJNamaYbi7SO55tAvcY3UQ/KJBfe0bsQvb1mEZI0Fa4pNDCOkPd5pmYdsd+Pd06nQ6YAbLpyF+674D83uaOnbZ0RjaSQmp2nOdEYfyce7d2DWh/fgi8NHcP3j2/DnpXMxMSPhLA9CpGys8ZGmCIKAf+5rAgC8ezoVjbap+Mm3l6D8ygWaDSKAdqdpzEZ+RAEQA0lOEZBThFmziwEA6QlmfHmyG1c/9h73IiHVY2WE1M/bH3Kyy4Hfv30QHXWf4nIzcG5uEjYuXYCsJKvcI4w4rW56psUptXD5fZkN//lhKz493oE/bT6I2s+m4K6vXwqbmR/rpD78v5bUra0ewrpi6Fx2ZAD4NQCYAZfeige+uRDGGAgiQH9lRGunvVpNrIwM4u0hSf7HcvwWAKQ+kv0WlK9dh7tv/AqKC1LlHCFR0BhGSNU+OfAlZrrsWOm8AweFXJwzNgkryyYjL2ecZpbtjkRGohi6TnU7ZR5JeMWp8ATliDujhwQADtTuxORtFeg63YSbnqjB0tJ8/OSr05Bg4Uc8qQP/TyVVOtzSjUfe2I8jn+7Faxag0ZyPJYu+im+V5Mferp0AEq3ij3KfW1uVkbHJcXIPQZn89iIBgMkAsA1YNCMLtZ8C/11zFP+sbcJPvzYN1xblaLpfirSBYYRUpeHoF3jhX3tQ/flJuD0CpujFxr0NS+cgaeIEeQcnI6M3gLk01sEab2FlJBg/nClgweR4/OGdA9jXZsZdm3vxl5ojuO+q6Th//Bi5h0cUEMMIqcIXTZ3429vvY+Xn38JKnQMr/bdWMNnE3VRjmMm76qTPo63KCBtYR8jbR4IXl2E2gCcBuGxWfLXvd9hdByx+fDsunZaJOy+dhNkMJaRADCOkWC63B+/uP4m/1BzBvw+0YIbuMFZZHHg89V5cfOECTB+bKN7QlhZT/SFDMXt34uxza6syYuHS3pE5s4+k5QuYXlyGF26Zgqo9Fvxt1zG8/Xkz3v68GQsmp+O7CyZiwaR07uNCisEwQori9gjYU38ar358Aq9+3ABLdwPG6Dpxnh64flw30Azc8fUrxD0XyMdoEH+p9GlsmsbKBtaRO6OPBABSe47g4Qun4K4ZqXjho3q8vf8kTh08jIcOfoink6249PxzcPn8uUhPsMg0aCIRwwjJ7thpO97/8hTe/eIk/n3gJNrsLgBADlpQbb0HcXCIN2yGWIq2pck3WIUy+SojWpumYWUkJH7TNgCQC+AuAHf5T2/2AvZtFlz+zsPIL5yKq2bm4PLpWUhjMCEZMIyQLD493o5nP6jDvw+cxLHTPQDE8JGr68RUqwFz8lNxVY4TcTUOYPEGIH2KeEdOyQzJN03jETS1r7KF+4yEZojlv/56+tzYs/MDlH68CnN0n+PgoU48cwh49mUgMzMH55wzHRdNzsC8CamcyqGoYBih6Girh6PjJGq+bMXfPzmBfSc6AADJAFINOsxJd+NnnQ/A5OkVb3/UezHZgPGlDCBn4Zumcas/jPR5BN8HExtYR2GIaRtJHIDSpEzgs1/jv/D4gK/ZT1vwg3fvwp4d6Xj6O/OA+HT+/FHEMYxQ2PW5PTjZ5UBThwON7b041XAIN9QshkXoxcUALgZ8u0b6tEMMHt/8G2BL77+elZARMflXRlSux9kHb2uyb/8UioAhqidtLSeQ8Eo5/lv3W8ANYAPEn8vlO/hzSBEV0k/6unXr8PDDD6OxsRGzZs3CY489huLi4oC3f+GFF3DffffhyJEjmDx5Mn7729/iiiuuCHnQpDDes2Gefv8o/vlZE9rszgEHtk3SHcc3zb1Y6bwD7fEFuGxGNr52bjZSbeaBj8PgETIpjGhBl0MMI1aTXlOvS5HOqJ6k5BQB4z8csCoHLy4D6moCTvkA4M8ujVrQYWTz5s2oqKjA+vXrUVJSgrVr12LRokXYv38/MjMzB91++/btuPnmm1FVVYWrrroKzz33HK677jrs2rUL5557blheBEWQN2gM4P/B01YPrCsGXHbcAuAWADgjYwDiWTHf/sbNmHnueTG5Q2qkaWnb7y5HHwAggQe+ycM/oJzRCBuQyQYseXpgVXMkGGLISycEecxnSUkJ5s2bhz/84Q8AAI/Hg7y8PNx555346U9/Ouj2S5YsQXd3N/7+97/7rrvgggtQVFSE9evXj+g5Ozo6kJycjPb2diQlJQUzXPI3VLAYjr0F2HwL4LIPvN7/g0f6l9PiDThhGo8uRx/S4s1Itplg8N+Cmh86EXf7MztRV1uD1yw/B773rmqXP+/64B2c/4/rsDzhUaz78XfkHg6d7XMj0OfESJw5BTTSzyh+nqjGSH9/B/VPD6fTiZ07d2LVqlW+6/R6PcrKylBTUzPkfWpqalBRUTHgukWLFuHll18O+DwOhwMOh8P39/b2dgDiiwq7ziagqyn8j6s09lbgxe8BfT3B3c8YByz+7/7ltNLjPLl44G1SzkV8yjjEe6/qHuqxIvH+kU8cnHA5etABAX986n9x3PiB3EMKSYr9CCY5BJhsrsj8zFNw9MlAQnLgrydMBG6pBnpOBfe4rQeBV38IfFYNpE0K7jPKGAcs/hOX+YdTQhaQmBX2h5V+hs9a9xCCcPz4cQGAsH379gHX33PPPUJxcfGQ9zGZTMJzzz034Lp169YJmZmZAZ+nsrJSAMALL7zwwgsvvGjgUl9fP2y+UOSk7KpVqwZUUzweD06dOoW0tDRVnT7Z0dGBvLw81NfXx8T0Uqy9XoCvORZec6y9XiD2XnOsvV4geq9ZEAR0dnYiJydn2NsFFUbS09NhMBjQ1DRwWqOpqQnZ2UMfVJadnR3U7QHAYrHAYhm49jMlJSWYoSpKUlJSzPwPDsTe6wX4mmNBrL1eIPZec6y9XiA6rzk5Ofmstwlq3ZzZbMacOXNQXV3tu87j8aC6uhqlpaVD3qe0tHTA7QHgzTffDHh7IiIiii1BT9NUVFTg1ltvxdy5c1FcXIy1a9eiu7sb5eXlAIClS5ciNzcXVVVVAICVK1di4cKF+N3vfocrr7wSzz//PD766CP86U9/Cu8rISIiIlUKOowsWbIEJ0+exOrVq9HY2IiioiJs2bIFWVliF25dXR30+v6Cy/z58/Hcc8/hF7/4BX72s59h8uTJePnll2NijxGLxYLKyspBU05aFWuvF+BrjgWx9nqB2HvNsfZ6AeW95qD3GSEiIiIKJ+61TERERLJiGCEiIiJZMYwQERGRrBhGiIiISFYMI2G0detW6HS6IS8ffvhhwPtdfPHFg27/gx/8IIojD92ECRMGjf3BBx8c9j69vb1Yvnw50tLSkJCQgBtuuGHQxnhKdeTIEdx2220oKChAXFwcCgsLUVlZCafTOez91PYer1u3DhMmTIDVakVJSQl27Ngx7O1feOEFTJs2DVarFeeddx5ef/31KI10dKqqqjBv3jwkJiYiMzMT1113Hfbv3z/sfZ566qlB76XVao3SiEfvl7/85aDxT5s2bdj7qPX9lQz1OaXT6bB8+fIhb6+29/hf//oXrr76auTk5ECn0w06+00QBKxevRpjx45FXFwcysrKcODAgbM+brCfA6PBMBJG8+fPx4kTJwZcvvvd76KgoABz584d9r7Lli0bcL+HHnooSqMevfvvv3/A2O+8885hb/+jH/0Ir776Kl544QW8++67aGhowOLFi4e9j1J8/vnn8Hg8eOKJJ1BbW4tHH30U69evx89+9rOz3lct7/HmzZtRUVGByspK7Nq1C7NmzcKiRYvQ3Nw85O23b9+Om2++Gbfddht2796N6667Dtdddx0+/fTTKI88eO+++y6WL1+O999/H2+++SZcLhcuv/xydHcPedSjT1JS0oD38ujRo1EacXjMmDFjwPjfe++9gLdV8/sr+fDDDwe83jfffBMAcOONNwa8j5re4+7ubsyaNQvr1q0b8usPPfQQfv/732P9+vX44IMPEB8fj0WLFqG3tzfgYwb7OTBqIzgfj0LkdDqFjIwM4f777x/2dgsXLhRWrlwZnUGFWX5+vvDoo4+O+PZtbW2CyWQSXnjhBd91+/btEwAINTU1ERhh5D300ENCQUHBsLdR03tcXFwsLF++3Pd3t9st5OTkCFVVVUPe/qabbhKuvPLKAdeVlJQI3//+9yM6zkhobm4WAAjvvvtuwNts2rRJSE5Ojt6gwqyyslKYNWvWiG+vpfdXsnLlSqGwsFDweDxDfl3N7zEA4aWXXvL93ePxCNnZ2cLDDz/su66trU2wWCzC//zP/wR8nGA/B0aLlZEI+r//+z+0trb6dqcdzrPPPov09HSce+65WLVqFex2exRGGB4PPvgg0tLSMHv2bDz88MPo6+sLeNudO3fC5XKhrKzMd920adMwfvx41NTURGO4Ydfe3o7U1NSz3k4N77HT6cTOnTsHvD96vR5lZWUB35+ampoBtweARYsWqfL9bG9vB4Czvp9dXV3Iz89HXl4err32WtTW1kZjeGFz4MAB5OTkYOLEifjWt76Furq6gLfV0vsLiP+PP/PMM/jOd74z7MGran+PJYcPH0ZjY+OA9zA5ORklJSUB38NQPgdGS5Gn9mrFk08+iUWLFmHcuHHD3u6b3/wm8vPzkZOTg08++QT33nsv9u/fjxdffDFKIw3dD3/4Q5x//vlITU3F9u3bsWrVKpw4cQJr1qwZ8vaNjY0wm82DDj7MyspCY2NjFEYcXgcPHsRjjz2GRx55ZNjbqeU9bmlpgdvt9u2oLMnKysLnn38+5H0aGxuHvL3a3k+Px4O77roLF1544bA7RE+dOhUbN27EzJkz0d7ejkceeQTz589HbW3tWX/WlaCkpARPPfUUpk6dihMnTuBXv/oVFixYgE8//RSJiYmDbq+V91fy8ssvo62tDd/+9rcD3kbt77E/6X0K5j0M5XNg1CJSb9GYe++9VwAw7GXfvn0D7lNfXy/o9Xrhf//3f4N+vurqagGAcPDgwXC9hKCE8nolTz75pGA0GoXe3t4hv/7ss88KZrN50PXz5s0TfvKTn4T1dQQjlNd87NgxobCwULjtttuCfj653+NAjh8/LgAQtm/fPuD6e+65RyguLh7yPiaTSXjuuecGXLdu3TohMzMzYuOMhB/84AdCfn6+UF9fH9T9nE6nUFhYKPziF7+I0Mgi6/Tp00JSUpLw5z//eciva+X9lVx++eXCVVddFdR91PQe44xpmm3btgkAhIaGhgG3u/HGG4WbbrppyMcI5XNgtFgZGYG777572BQNABMnThzw902bNiEtLQ3XXHNN0M9XUlICQPxXd2FhYdD3H61QXq+kpKQEfX19OHLkCKZOnTro69nZ2XA6nWhraxtQHWlqakJ2dvZohj0qwb7mhoYGXHLJJZg/f35Ihz7K/R4Hkp6eDoPBMGh103DvT3Z2dlC3V6IVK1bg73//O/71r38F/S9fk8mE2bNn4+DBgxEaXWSlpKRgypQpAcevhfdXcvToUbz11ltBVyTV/B5L71NTUxPGjh3ru76pqQlFRUVD3ieUz4HRYhgZgYyMDGRkZIz49oIgYNOmTVi6dClMJlPQz7dnzx4AGPA/TjQF+3r97dmzB3q9HpmZmUN+fc6cOTCZTKiursYNN9wAANi/fz/q6upQWloa8phHK5jXfPz4cVxyySWYM2cONm3aNOBgyJGS+z0OxGw2Y86cOaiursZ1110HQJy+qK6uxooVK4a8T2lpKaqrq3HXXXf5rnvzzTdlfT9HShAE3HnnnXjppZewdetWFBQUBP0Ybrcbe/fuxRVXXBGBEUZeV1cXDh06hFtuuWXIr6v5/T3Tpk2bkJmZiSuvvDKo+6n5PS4oKEB2djaqq6t94aOjowMffPABbr/99iHvE8rnwKhFpN4S4956662AUxnHjh0Tpk6dKnzwwQeCIAjCwYMHhfvvv1/46KOPhMOHDwuvvPKKMHHiROGiiy6K9rCDtn37duHRRx8V9uzZIxw6dEh45plnhIyMDGHp0qW+25z5egVBLIePHz9eePvtt4WPPvpIKC0tFUpLS+V4CUE7duyYMGnSJOErX/mKcOzYMeHEiRO+i/9t1PweP//884LFYhGeeuop4bPPPhO+973vCSkpKUJjY6MgCIJwyy23CD/96U99t9+2bZtgNBqFRx55RNi3b59QWVkpmEwmYe/evXK9hBG7/fbbheTkZGHr1q0D3ku73e67zZmv91e/+pXwxhtvCIcOHRJ27twpfOMb3xCsVqtQW1srx0sI2t133y1s3bpVOHz4sLBt2zahrKxMSE9PF5qbmwVB0Nb768/tdgvjx48X7r333kFfU/t73NnZKezevVvYvXu3AEBYs2aNsHv3buHo0aOCIAjCgw8+KKSkpAivvPKK8MknnwjXXnutUFBQIPT09Pge49JLLxUee+wx39/P9jkQbgwjEXDzzTcL8+fPH/Jrhw8fFgAI77zzjiAIglBXVydcdNFFQmpqqmCxWIRJkyYJ99xzj9De3h7FEYdm586dQklJiZCcnCxYrVbhnHPOEf7zP/9zQL/Ima9XEAShp6dHuOOOO4QxY8YINptNuP766wf8MleyTZs2BewpkWjhPX7ssceE8ePHC2azWSguLhbef/9939cWLlwo3HrrrQNu/9e//lWYMmWKYDabhRkzZgivvfZalEccmkDv5aZNm3y3OfP13nXXXb7vTVZWlnDFFVcIu3btiv7gQ7RkyRJh7NixgtlsFnJzc4UlS5YM6F3S0vvr74033hAACPv37x/0NbW/x++8886Q/x9Lr8nj8Qj33XefkJWVJVgsFuErX/nKoO9Dfn6+UFlZOeC64T4Hwk0nCIIQmZoLERER0dlxnxEiIiKSFcMIERERyYphhIiIiGTFMEJERESyYhghIiIiWTGMEBERkawYRoiIiEhWDCNEREQkK4YRIiIikhXDCBEREcmKYYSIiIhkxTBCREREsvr/qHeoAlVwCeoAAAAASUVORK5CYII=",
      "text/plain": [
       "<Figure size 640x480 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "from matplotlib import pyplot as plt\n",
    "\n",
    "plt.plot(np.linspace(-5, 5, 2000).astype(np.float32), np.exp(net_out))\n",
    "\n",
    "plt.hist(\n",
    "    sim_out[\"rts\"] * sim_out[\"choices\"],\n",
    "    bins=100,\n",
    "    histtype=\"step\",\n",
    "    fill=None,\n",
    "    density=True,\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "ssms_dev",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.12"
  },
  "orig_nbformat": 4
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
