{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 下载\n",
    "# https://data.keithito.com/data/speech/LJSpeech-1.1.tar.bz2 -> dataset/LJSpeech-1.1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "import os\n",
    "os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"1\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "import warnings\n",
    "warnings.filterwarnings(\"ignore\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " > Setting up Audio Processor...\n",
      " | > sample_rate:22050\n",
      " | > resample:False\n",
      " | > num_mels:80\n",
      " | > log_func:np.log10\n",
      " | > min_level_db:-100\n",
      " | > frame_shift_ms:None\n",
      " | > frame_length_ms:None\n",
      " | > ref_level_db:20\n",
      " | > fft_size:1024\n",
      " | > power:1.5\n",
      " | > preemphasis:0.0\n",
      " | > griffin_lim_iters:60\n",
      " | > signal_norm:True\n",
      " | > symmetric_norm:True\n",
      " | > mel_fmin:0\n",
      " | > mel_fmax:None\n",
      " | > pitch_fmin:1.0\n",
      " | > pitch_fmax:640.0\n",
      " | > spec_gain:20.0\n",
      " | > stft_pad_mode:reflect\n",
      " | > max_norm:4.0\n",
      " | > clip_norm:True\n",
      " | > do_trim_silence:True\n",
      " | > trim_db:45\n",
      " | > do_sound_norm:False\n",
      " | > do_amp_to_db_linear:True\n",
      " | > do_amp_to_db_mel:True\n",
      " | > do_rms_norm:False\n",
      " | > db_level:None\n",
      " | > stats_path:None\n",
      " | > base:10\n",
      " | > hop_length:256\n",
      " | > win_length:1024\n",
      "[-0.00167847 -0.00149536  0.00015259 ...  0.00091553  0.00183105\n",
      "  0.0012207 ]\n",
      "[64, 22, 130, 64, 28, 130, 110, 41, 112, 5, 24, 64, 49, 21, 130, 31, 29, 22, 130, 14, 111, 51, 7, 98, 49, 5, 110, 64, 14, 64, 82, 11, 130, 64, 28, 130, 31, 49, 130, 9, 110, 52, 112, 21, 22, 130, 116, 110, 64, 34, 130, 22, 49, 5, 11, 130, 110, 8, 64, 15, 7, 130, 29, 22, 130, 64, 16, 31, 49, 130, 9, 110, 44, 112, 78, 15, 28, 130, 92, 24, 31, 49, 130, 14, 110, 51, 82, 50, 28]\n",
      "(80, 497)\n"
     ]
    }
   ],
   "source": [
    "from TTS.tts.configs.shared_configs import BaseDatasetConfig\n",
    "from TTS.tts.configs.glow_tts_config import GlowTTSConfig\n",
    "from TTS.utils.audio import AudioProcessor\n",
    "from TTS.tts.utils.text.tokenizer import TTSTokenizer\n",
    "\n",
    "dataset_config = BaseDatasetConfig(dataset_name='ljspeech',\n",
    "                                   formatter='ljspeech',\n",
    "                                   path='dataset/LJSpeech-1.1/',\n",
    "                                   meta_file_train='metadata.csv')\n",
    "\n",
    "config = GlowTTSConfig(\n",
    "    batch_size=64,\n",
    "    eval_batch_size=16,\n",
    "    num_loader_workers=4,\n",
    "    num_eval_loader_workers=4,\n",
    "    run_eval=True,\n",
    "    test_delay_epochs=-1,\n",
    "    epochs=50,\n",
    "    text_cleaner='phoneme_cleaners',\n",
    "    use_phonemes=True,\n",
    "    phoneme_language='en-us',\n",
    "    phoneme_cache_path='dataset/phoneme_cache',\n",
    "    print_step=25,\n",
    "    print_eval=False,\n",
    "    mixed_precision=True,\n",
    "    output_path='./models/train',\n",
    "    datasets=[dataset_config],\n",
    "    save_step=1000,\n",
    ")\n",
    "\n",
    "ap = AudioProcessor.init_from_config(config)\n",
    "tokenizer, config = TTSTokenizer.init_from_config(config)\n",
    "\n",
    "print(ap.load_wav('dataset/LJSpeech-1.1/wavs/LJ001-0108.wav'))\n",
    "\n",
    "print(\n",
    "    tokenizer.text_to_ids(\n",
    "        'it is obvious that legibility is the first thing to be aimed at in the forms of the letters'\n",
    "    ))\n",
    "\n",
    "wav = ap.load_wav('dataset/LJSpeech-1.1/wavs/LJ001-0108.wav')\n",
    "print(ap.melspectrogram(wav).shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "BaseDatasetConfig(formatter='ljspeech', dataset_name='ljspeech', path='dataset/LJSpeech-1.1/', meta_file_train='metadata.csv', ignored_speakers=None, language='', phonemizer='', meta_file_val='', meta_file_attn_mask='')"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dataset_config"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Found 13100 files in /maindata/data/user/ai_story/zhigong.wang/endless-frontier/simple-multimodal/tts/dataset/LJSpeech-1.1\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "(12969,\n",
       " [{'text': 'I take this means of saying \"thank you.\"\\n',\n",
       "   'audio_file': 'dataset/LJSpeech-1.1/wavs/LJ023-0005.wav',\n",
       "   'speaker_name': 'ljspeech',\n",
       "   'root_path': 'dataset/LJSpeech-1.1/',\n",
       "   'language': '',\n",
       "   'audio_unique_name': 'ljspeech#wavs/LJ023-0005'},\n",
       "  {'text': 'Felons who could pay the price were permitted, irrespective of their character or offenses,\\n',\n",
       "   'audio_file': 'dataset/LJSpeech-1.1/wavs/LJ003-0141.wav',\n",
       "   'speaker_name': 'ljspeech',\n",
       "   'root_path': 'dataset/LJSpeech-1.1/',\n",
       "   'language': '',\n",
       "   'audio_unique_name': 'ljspeech#wavs/LJ003-0141'}],\n",
       " 131,\n",
       " [{'text': 'A molecular change takes place in the nerve of the tentacle,\\n',\n",
       "   'audio_file': 'dataset/LJSpeech-1.1/wavs/LJ025-0134.wav',\n",
       "   'speaker_name': 'ljspeech',\n",
       "   'root_path': 'dataset/LJSpeech-1.1/',\n",
       "   'language': '',\n",
       "   'audio_unique_name': 'ljspeech#wavs/LJ025-0134'},\n",
       "  {'text': 'through advances made to various builders, and that it could only maintain its credit by wholesale discounting.\\n',\n",
       "   'audio_file': 'dataset/LJSpeech-1.1/wavs/LJ010-0304.wav',\n",
       "   'speaker_name': 'ljspeech',\n",
       "   'root_path': 'dataset/LJSpeech-1.1/',\n",
       "   'language': '',\n",
       "   'audio_unique_name': 'ljspeech#wavs/LJ010-0304'}])"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from TTS.tts.datasets import load_tts_samples\n",
    "\n",
    "train_samples, eval_samples = load_tts_samples(\n",
    "    dataset_config,\n",
    "    eval_split=True,\n",
    "    eval_split_max_size=config.eval_split_max_size,\n",
    "    eval_split_size=config.eval_split_size,\n",
    ")\n",
    "\n",
    "len(train_samples), train_samples[:2], len(eval_samples), eval_samples[:2]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2861.0257\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " > Training Environment:\n",
      " | > Backend: Torch\n",
      " | > Mixed precision: True\n",
      " | > Precision: fp16\n",
      " | > Current device: 0\n",
      " | > Num. of GPUs: 1\n",
      " | > Num. of CPUs: 128\n",
      " | > Num. of Torch Threads: 64\n",
      " | > Torch seed: 54321\n",
      " | > Torch CUDNN: True\n",
      " | > Torch CUDNN deterministic: False\n",
      " | > Torch CUDNN benchmark: False\n",
      " | > Torch TF32 MatMul: False\n",
      " > Start Tensorboard: tensorboard --logdir=./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      " > Model has 28610257 parameters\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "<trainer.trainer.Trainer at 0x7f84095d9510>"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from TTS.tts.models.glow_tts import GlowTTS\n",
    "from trainer import Trainer, TrainerArgs\n",
    "\n",
    "model = GlowTTS(config, ap, tokenizer, speaker_manager=None)\n",
    "model.run_data_dep_init = False\n",
    "\n",
    "#统计参数量\n",
    "print(sum(i.numel() for i in model.parameters()) / 10000)\n",
    "\n",
    "trainer = Trainer(args=TrainerArgs(),\n",
    "                  config=config,\n",
    "                  output_path='./models/train',\n",
    "                  model=model,\n",
    "                  train_samples=train_samples,\n",
    "                  eval_samples=eval_samples)\n",
    "\n",
    "trainer"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 0/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "\n",
      "> DataLoader initialization\n",
      "| > Tokenizer:\n",
      "\t| > add_blank: False\n",
      "\t| > use_eos_bos: False\n",
      "\t| > use_phonemes: True\n",
      "\t| > phonemizer:\n",
      "\t\t| > phoneme language: en-us\n",
      "\t\t| > phoneme backend: espeak\n",
      "| > Number of instances : 12969\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 16:39:20) \u001b[0m\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Preprocessing samples\n",
      " | > Max text length: 188\n",
      " | > Min text length: 13\n",
      " | > Avg text length: 100.90014650319993\n",
      " | \n",
      " | > Max audio length: 222643.0\n",
      " | > Min audio length: 24499.0\n",
      " | > Avg audio length: 144984.29755570978\n",
      " | > Num. instances discarded samples: 0\n",
      " | > Batch group size: 0.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:39:33 -- STEP: 0/203 -- GLOBAL_STEP: 0\u001b[0m\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 9.5032  (9.503183364868164)\n",
      "     | > loader_time: 3.7085  (3.7085397243499756)\n",
      "\n",
      " [!] `train_step()` retuned `None` outputs. Skipping training step.\n",
      " [!] `train_step()` retuned `None` outputs. Skipping training step.\n",
      " [!] `train_step()` retuned `None` outputs. Skipping training step.\n",
      " [!] `train_step()` retuned `None` outputs. Skipping training step.\n",
      " [!] `train_step()` retuned `None` outputs. Skipping training step.\n",
      " [!] `train_step()` retuned `None` outputs. Skipping training step.\n",
      " [!] `train_step()` retuned `None` outputs. Skipping training step.\n",
      " [!] `train_step()` retuned `None` outputs. Skipping training step.\n",
      " [!] `train_step()` retuned `None` outputs. Skipping training step.\n",
      " [!] `train_step()` retuned `None` outputs. Skipping training step.\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:39:52 -- STEP: 25/203 -- GLOBAL_STEP: 25\u001b[0m\n",
      "     | > loss: 3.794452667236328  (3.7767117659250897)\n",
      "     | > log_mle: 0.7977033853530884  (0.7975333174069722)\n",
      "     | > loss_dur: 2.99674916267395  (2.979178460439046)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.1910, device='cuda:0')  (tensor(10.4161, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 0.8965  (0.7425455665588379)\n",
      "     | > loader_time: 0.0066  (5.899853811264038)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:40:11 -- STEP: 50/203 -- GLOBAL_STEP: 50\u001b[0m\n",
      "     | > loss: 3.8144960403442383  (3.7869114875793457)\n",
      "     | > log_mle: 0.8004264831542969  (0.7985386475920677)\n",
      "     | > loss_dur: 3.0140695571899414  (2.9883728444576265)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.2717, device='cuda:0')  (tensor(10.9141, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 1.1653  (0.7533945846557617)\n",
      "     | > loader_time: 0.0059  (2.957255492210388)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:40:34 -- STEP: 75/203 -- GLOBAL_STEP: 75\u001b[0m\n",
      "     | > loss: 3.8247900009155273  (3.7920739797445444)\n",
      "     | > log_mle: 0.8017454743385315  (0.7989252603971041)\n",
      "     | > loss_dur: 3.0230445861816406  (2.9931487230154183)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.3235, device='cuda:0')  (tensor(11.0418, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 0.8503  (0.7908132489522297)\n",
      "     | > loader_time: 0.0152  (1.980756775538126)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:41:02 -- STEP: 100/203 -- GLOBAL_STEP: 100\u001b[0m\n",
      "     | > loss: 3.790989398956299  (3.796598317888048)\n",
      "     | > log_mle: 0.8037334680557251  (0.799219368563758)\n",
      "     | > loss_dur: 2.987255811691284  (2.997378942701552)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.2713, device='cuda:0')  (tensor(11.1061, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 0.8123  (0.8165395259857178)\n",
      "     | > loader_time: 0.0118  (1.5472333550453181)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:41:35 -- STEP: 125/203 -- GLOBAL_STEP: 125\u001b[0m\n",
      "     | > loss: 3.862664222717285  (3.799969650351483)\n",
      "     | > log_mle: 0.8029153347015381  (0.7997503871503083)\n",
      "     | > loss_dur: 3.059748888015747  (3.000219262164572)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.3317, device='cuda:0')  (tensor(11.1436, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 0.9281  (0.8639330024719238)\n",
      "     | > loader_time: 0.2005  (1.290147754669189)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:42:07 -- STEP: 150/203 -- GLOBAL_STEP: 150\u001b[0m\n",
      "     | > loss: 3.8383612632751465  (3.8030326434544155)\n",
      "     | > log_mle: 0.8001901507377625  (0.7997827397925513)\n",
      "     | > loss_dur: 3.0381710529327393  (3.0032499057906015)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.3276, device='cuda:0')  (tensor(11.1741, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 0.9237  (0.892638201713562)\n",
      "     | > loader_time: 0.1297  (1.1093955612182615)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:42:41 -- STEP: 175/203 -- GLOBAL_STEP: 175\u001b[0m\n",
      "     | > loss: 3.7822203636169434  (3.8073559009667597)\n",
      "     | > log_mle: 0.7993040084838867  (0.7998796336578601)\n",
      "     | > loss_dur: 2.9829163551330566  (3.007476272005024)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.2061, device='cuda:0')  (tensor(11.1994, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 1.2821  (0.9327079950060163)\n",
      "     | > loader_time: 0.117  (0.9795772865840361)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:43:16 -- STEP: 200/203 -- GLOBAL_STEP: 200\u001b[0m\n",
      "     | > loss: 3.8289520740509033  (3.8077643218793367)\n",
      "     | > log_mle: 0.8000791668891907  (0.799963831274133)\n",
      "     | > loss_dur: 3.0288729667663574  (3.0078005000164634)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.3648, device='cuda:0')  (tensor(11.2127, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 0.6118  (0.9627963817119598)\n",
      "     | > loader_time: 0.0106  (0.8833469307422632)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "\n",
      "> DataLoader initialization\n",
      "| > Tokenizer:\n",
      "\t| > add_blank: False\n",
      "\t| > use_eos_bos: False\n",
      "\t| > use_phonemes: True\n",
      "\t| > phonemizer:\n",
      "\t\t| > phoneme language: en-us\n",
      "\t\t| > phoneme backend: espeak\n",
      "| > Number of instances : 131\n",
      " | > Preprocessing samples\n",
      " | > Max text length: 174\n",
      " | > Min text length: 20\n",
      " | > Avg text length: 100.76335877862596\n",
      " | \n",
      " | > Max audio length: 222643.0\n",
      " | > Min audio length: 34739.0\n",
      " | > Avg audio length: 144033.41221374046\n",
      " | > Num. instances discarded samples: 0\n",
      " | > Batch group size: 0.\n",
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time: 0.0026942193508148193 \u001b[0m(+0)\n",
      "     | > avg_loss: 3.8214183151721954 \u001b[0m(+0)\n",
      "     | > avg_log_mle: 0.7976875975728035 \u001b[0m(+0)\n",
      "     | > avg_loss_dur: 3.023730754852295 \u001b[0m(+0)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_203.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 1/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 16:43:39) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:44:01 -- STEP: 22/203 -- GLOBAL_STEP: 225\u001b[0m\n",
      "     | > loss: 3.7251501083374023  (3.746811855923046)\n",
      "     | > log_mle: 0.7987940311431885  (0.7940424545244738)\n",
      "     | > loss_dur: 2.926356077194214  (2.952769409526478)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.0473, device='cuda:0')  (tensor(11.0263, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 0.5682  (0.7114795446395874)\n",
      "     | > loader_time: 0.0059  (0.00853176550431685)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:44:27 -- STEP: 47/203 -- GLOBAL_STEP: 250\u001b[0m\n",
      "     | > loss: 3.7458088397979736  (3.748543830628091)\n",
      "     | > log_mle: 0.7980268001556396  (0.7955643674160572)\n",
      "     | > loss_dur: 2.947782039642334  (2.9529794733575048)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.1086, device='cuda:0')  (tensor(11.0665, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 0.9063  (0.8598827808461291)\n",
      "     | > loader_time: 0.0067  (0.016887482176435754)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:44:54 -- STEP: 72/203 -- GLOBAL_STEP: 275\u001b[0m\n",
      "     | > loss: 3.715701103210449  (3.7544873555501304)\n",
      "     | > log_mle: 0.7939386367797852  (0.7961631268262863)\n",
      "     | > loss_dur: 2.921762466430664  (2.9583242303795285)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.0897, device='cuda:0')  (tensor(11.0978, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 0.327  (0.9240834580527412)\n",
      "     | > loader_time: 0.0066  (0.021113750007417467)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:45:37 -- STEP: 97/203 -- GLOBAL_STEP: 300\u001b[0m\n",
      "     | > loss: 3.768784523010254  (3.7585772853536703)\n",
      "     | > log_mle: 0.7997838258743286  (0.7965294903086633)\n",
      "     | > loss_dur: 2.969000816345215  (2.9620477981174114)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.1772, device='cuda:0')  (tensor(11.1168, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 3.5  (1.0320661215438056)\n",
      "     | > loader_time: 0.4946  (0.1155414040555659)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:46:10 -- STEP: 122/203 -- GLOBAL_STEP: 325\u001b[0m\n",
      "     | > loss: 3.756880283355713  (3.759423722986315)\n",
      "     | > log_mle: 0.7979562878608704  (0.7970012670657673)\n",
      "     | > loss_dur: 2.9589240550994873  (2.962422462760425)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.1473, device='cuda:0')  (tensor(11.1217, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 1.0288  (1.028065599378993)\n",
      "     | > loader_time: 0.1897  (0.14972329725984668)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:46:40 -- STEP: 147/203 -- GLOBAL_STEP: 350\u001b[0m\n",
      "     | > loss: 3.744713068008423  (3.7606484403415603)\n",
      "     | > log_mle: 0.7992596626281738  (0.797026712067273)\n",
      "     | > loss_dur: 2.945453405380249  (2.9636217331399726)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.0971, device='cuda:0')  (tensor(11.1262, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 0.9939  (1.0177250670737965)\n",
      "     | > loader_time: 0.1571  (0.16107249097759221)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:47:13 -- STEP: 172/203 -- GLOBAL_STEP: 375\u001b[0m\n",
      "     | > loss: 3.785404920578003  (3.7629243703775628)\n",
      "     | > log_mle: 0.7944880127906799  (0.7970340733611303)\n",
      "     | > loss_dur: 2.9909169673919678  (2.965890302214511)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.1924, device='cuda:0')  (tensor(11.1309, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 1.1123  (1.0274827910024065)\n",
      "     | > loader_time: 0.2329  (0.17356368414191317)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:47:49 -- STEP: 197/203 -- GLOBAL_STEP: 400\u001b[0m\n",
      "     | > loss: 3.7757506370544434  (3.7615684761008636)\n",
      "     | > log_mle: 0.7978191375732422  (0.7970464223532504)\n",
      "     | > loss_dur: 2.977931499481201  (2.964522059193722)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(11.1400, device='cuda:0')  (tensor(11.1261, device='cuda:0'))\n",
      "     | > current_lr: 2.5e-07 \n",
      "     | > step_time: 0.6147  (1.0485876707861264)\n",
      "     | > loader_time: 0.01  (0.18021223992865704)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.003405362367630005 \u001b[0m(+0.0007111430168151855)\n",
      "     | > avg_loss:\u001b[92m 3.767191469669342 \u001b[0m(-0.054226845502853394)\n",
      "     | > avg_log_mle:\u001b[92m 0.7940273433923721 \u001b[0m(-0.003660254180431366)\n",
      "     | > avg_loss_dur:\u001b[92m 2.973164141178131 \u001b[0m(-0.05056661367416382)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_406.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 2/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 16:48:15) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:48:36 -- STEP: 19/203 -- GLOBAL_STEP: 425\u001b[0m\n",
      "     | > loss: 3.718637704849243  (3.700894832611084)\n",
      "     | > log_mle: 0.7921385765075684  (0.7901490864000822)\n",
      "     | > loss_dur: 2.926499128341675  (2.910745721114309)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(10.9355, device='cuda:0')  (tensor(10.8310, device='cuda:0'))\n",
      "     | > current_lr: 5e-07 \n",
      "     | > step_time: 0.4278  (0.8257607786278975)\n",
      "     | > loader_time: 0.0043  (0.029123218435990184)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:48:58 -- STEP: 44/203 -- GLOBAL_STEP: 450\u001b[0m\n",
      "     | > loss: 3.6622531414031982  (3.6846535856073555)\n",
      "     | > log_mle: 0.7923269271850586  (0.7915176871148024)\n",
      "     | > loss_dur: 2.8699262142181396  (2.8931358835913916)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(10.7194, device='cuda:0')  (tensor(10.8104, device='cuda:0'))\n",
      "     | > current_lr: 5e-07 \n",
      "     | > step_time: 0.4185  (0.8497923829338767)\n",
      "     | > loader_time: 0.0078  (0.029904105446555397)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:49:24 -- STEP: 69/203 -- GLOBAL_STEP: 475\u001b[0m\n",
      "     | > loss: 3.7225728034973145  (3.678264780320983)\n",
      "     | > log_mle: 0.7904689311981201  (0.7918347327605538)\n",
      "     | > loss_dur: 2.9321038722991943  (2.8864300354667334)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(10.8315, device='cuda:0')  (tensor(10.7878, device='cuda:0'))\n",
      "     | > current_lr: 5e-07 \n",
      "     | > step_time: 1.1199  (0.9115242716194926)\n",
      "     | > loader_time: 0.0901  (0.02787089002305183)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:49:57 -- STEP: 94/203 -- GLOBAL_STEP: 500\u001b[0m\n",
      "     | > loss: 3.7517080307006836  (3.6757841135593172)\n",
      "     | > log_mle: 0.7928898334503174  (0.7916742772498029)\n",
      "     | > loss_dur: 2.958818197250366  (2.8841098242617655)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(10.8015, device='cuda:0')  (tensor(10.7647, device='cuda:0'))\n",
      "     | > current_lr: 5e-07 \n",
      "     | > step_time: 0.5419  (0.940073568770226)\n",
      "     | > loader_time: 0.3282  (0.0950146938892121)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:50:28 -- STEP: 119/203 -- GLOBAL_STEP: 525\u001b[0m\n",
      "     | > loss: 3.6917805671691895  (3.6700474314329004)\n",
      "     | > log_mle: 0.7879236936569214  (0.7916385872023446)\n",
      "     | > loss_dur: 2.9038567543029785  (2.8784088327103294)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(10.6267, device='cuda:0')  (tensor(10.7288, device='cuda:0'))\n",
      "     | > current_lr: 5e-07 \n",
      "     | > step_time: 0.6448  (0.9552978707962677)\n",
      "     | > loader_time: 0.1884  (0.11955696795167041)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:51:04 -- STEP: 144/203 -- GLOBAL_STEP: 550\u001b[0m\n",
      "     | > loss: 3.5704638957977295  (3.663169064455562)\n",
      "     | > log_mle: 0.7851338386535645  (0.7912400729126401)\n",
      "     | > loss_dur: 2.785330057144165  (2.8719289799531293)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(10.3418, device='cuda:0')  (tensor(10.6869, device='cuda:0'))\n",
      "     | > current_lr: 5e-07 \n",
      "     | > step_time: 1.2076  (1.002503224545055)\n",
      "     | > loader_time: 0.2018  (0.13660340673393673)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:51:40 -- STEP: 169/203 -- GLOBAL_STEP: 575\u001b[0m\n",
      "     | > loss: 3.627992630004883  (3.6567566211407003)\n",
      "     | > log_mle: 0.7887670993804932  (0.7908799218002863)\n",
      "     | > loss_dur: 2.8392255306243896  (2.86587669299199)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(10.3558, device='cuda:0')  (tensor(10.6418, device='cuda:0'))\n",
      "     | > current_lr: 5e-07 \n",
      "     | > step_time: 2.2789  (1.0344803305067258)\n",
      "     | > loader_time: 0.3234  (0.14751081918118267)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:52:16 -- STEP: 194/203 -- GLOBAL_STEP: 600\u001b[0m\n",
      "     | > loss: 3.5863189697265625  (3.6488439815560567)\n",
      "     | > log_mle: 0.7891148328781128  (0.7904541068470357)\n",
      "     | > loss_dur: 2.79720401763916  (2.8583898691786933)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(10.1841, device='cuda:0')  (tensor(10.5911, device='cuda:0'))\n",
      "     | > current_lr: 5e-07 \n",
      "     | > step_time: 1.4684  (1.0592975714772015)\n",
      "     | > loader_time: 0.2285  (0.15632094796170892)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.015655696392059326 \u001b[0m(+0.012250334024429321)\n",
      "     | > avg_loss:\u001b[92m 3.56278195977211 \u001b[0m(-0.20440950989723206)\n",
      "     | > avg_log_mle:\u001b[92m 0.7838767319917679 \u001b[0m(-0.010150611400604248)\n",
      "     | > avg_loss_dur:\u001b[92m 2.7789052426815033 \u001b[0m(-0.1942588984966278)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_609.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 3/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 16:52:43) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:53:01 -- STEP: 16/203 -- GLOBAL_STEP: 625\u001b[0m\n",
      "     | > loss: 3.5232033729553223  (3.5203006267547607)\n",
      "     | > log_mle: 0.7810491323471069  (0.7799176275730133)\n",
      "     | > loss_dur: 2.742154359817505  (2.7403830140829086)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(9.8476, device='cuda:0')  (tensor(9.8544, device='cuda:0'))\n",
      "     | > current_lr: 7.5e-07 \n",
      "     | > step_time: 0.8807  (0.9181956052780151)\n",
      "     | > loader_time: 0.0054  (0.00488494336605072)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:53:22 -- STEP: 41/203 -- GLOBAL_STEP: 650\u001b[0m\n",
      "     | > loss: 3.490612030029297  (3.497939929729555)\n",
      "     | > log_mle: 0.779899537563324  (0.7809829944517555)\n",
      "     | > loss_dur: 2.710712432861328  (2.7169569410928864)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(9.6570, device='cuda:0')  (tensor(9.7779, device='cuda:0'))\n",
      "     | > current_lr: 7.5e-07 \n",
      "     | > step_time: 0.501  (0.8194958233251805)\n",
      "     | > loader_time: 0.902  (0.03211404637592595)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:53:50 -- STEP: 66/203 -- GLOBAL_STEP: 675\u001b[0m\n",
      "     | > loss: 3.476801872253418  (3.481888756607518)\n",
      "     | > log_mle: 0.7764824628829956  (0.7804285540725245)\n",
      "     | > loss_dur: 2.700319290161133  (2.701460209759799)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(9.5083, device='cuda:0')  (tensor(9.6928, device='cuda:0'))\n",
      "     | > current_lr: 7.5e-07 \n",
      "     | > step_time: 0.9253  (0.8681595939578433)\n",
      "     | > loader_time: 0.009  (0.08773741577610825)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:54:26 -- STEP: 91/203 -- GLOBAL_STEP: 700\u001b[0m\n",
      "     | > loss: 3.419612407684326  (3.470487219946725)\n",
      "     | > log_mle: 0.7729159593582153  (0.7796441093905945)\n",
      "     | > loss_dur: 2.6466965675354004  (2.6908431105561315)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(9.2214, device='cuda:0')  (tensor(9.6025, device='cuda:0'))\n",
      "     | > current_lr: 7.5e-07 \n",
      "     | > step_time: 0.5883  (0.9186652780889155)\n",
      "     | > loader_time: 0.5107  (0.16940324385087566)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:55:04 -- STEP: 116/203 -- GLOBAL_STEP: 725\u001b[0m\n",
      "     | > loss: 3.4118475914001465  (3.458328972602713)\n",
      "     | > log_mle: 0.7725561261177063  (0.7789368480443957)\n",
      "     | > loss_dur: 2.639291524887085  (2.6793921260998173)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(9.0748, device='cuda:0')  (tensor(9.5051, device='cuda:0'))\n",
      "     | > current_lr: 7.5e-07 \n",
      "     | > step_time: 1.2758  (0.9800503767769912)\n",
      "     | > loader_time: 0.1381  (0.20114423694281738)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:55:42 -- STEP: 141/203 -- GLOBAL_STEP: 750\u001b[0m\n",
      "     | > loss: 3.375640869140625  (3.4468685329383146)\n",
      "     | > log_mle: 0.7717148065567017  (0.777890223983332)\n",
      "     | > loss_dur: 2.603926181793213  (2.668978308954984)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(8.7836, device='cuda:0')  (tensor(9.4032, device='cuda:0'))\n",
      "     | > current_lr: 7.5e-07 \n",
      "     | > step_time: 0.8338  (1.0138046978213262)\n",
      "     | > loader_time: 0.5786  (0.2279372418180425)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:56:19 -- STEP: 166/203 -- GLOBAL_STEP: 775\u001b[0m\n",
      "     | > loss: 3.4079513549804688  (3.436476776398808)\n",
      "     | > log_mle: 0.7680590748786926  (0.7766739833785827)\n",
      "     | > loss_dur: 2.639892339706421  (2.6598027915839695)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(8.6862, device='cuda:0')  (tensor(9.2998, device='cuda:0'))\n",
      "     | > current_lr: 7.5e-07 \n",
      "     | > step_time: 1.3467  (1.040467989013856)\n",
      "     | > loader_time: 0.2893  (0.2326827049255371)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:56:56 -- STEP: 191/203 -- GLOBAL_STEP: 800\u001b[0m\n",
      "     | > loss: 3.3710100650787354  (3.426334386096575)\n",
      "     | > log_mle: 0.7674264907836914  (0.7753667616095218)\n",
      "     | > loss_dur: 2.603583574295044  (2.6509676216784577)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(8.4223, device='cuda:0')  (tensor(9.1925, device='cuda:0'))\n",
      "     | > current_lr: 7.5e-07 \n",
      "     | > step_time: 1.0349  (1.0699502111105397)\n",
      "     | > loader_time: 0.1388  (0.23109381985289887)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.06497585773468018 \u001b[0m(+0.04932016134262085)\n",
      "     | > avg_loss:\u001b[92m 3.2924279272556305 \u001b[0m(-0.2703540325164795)\n",
      "     | > avg_log_mle:\u001b[92m 0.761247493326664 \u001b[0m(-0.022629238665103912)\n",
      "     | > avg_loss_dur:\u001b[92m 2.5311804711818695 \u001b[0m(-0.2477247714996338)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_812.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 4/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 16:57:28) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:57:41 -- STEP: 13/203 -- GLOBAL_STEP: 825\u001b[0m\n",
      "     | > loss: 3.2259421348571777  (3.287945032119751)\n",
      "     | > log_mle: 0.7594567537307739  (0.7603185268548819)\n",
      "     | > loss_dur: 2.4664852619171143  (2.5276265144348145)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(7.9060, device='cuda:0')  (tensor(8.0296, device='cuda:0'))\n",
      "     | > current_lr: 1e-06 \n",
      "     | > step_time: 0.5967  (0.66781891309298)\n",
      "     | > loader_time: 0.0057  (0.00512238649221567)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:58:01 -- STEP: 38/203 -- GLOBAL_STEP: 850\u001b[0m\n",
      "     | > loss: 3.2693686485290527  (3.2720391499368766)\n",
      "     | > log_mle: 0.7606993913650513  (0.7600245287543849)\n",
      "     | > loss_dur: 2.508669137954712  (2.5120146023599723)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(7.7682, device='cuda:0')  (tensor(7.9182, device='cuda:0'))\n",
      "     | > current_lr: 1e-06 \n",
      "     | > step_time: 1.202  (0.7550355258740877)\n",
      "     | > loader_time: 0.0758  (0.016055514937952944)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:58:26 -- STEP: 63/203 -- GLOBAL_STEP: 875\u001b[0m\n",
      "     | > loss: 3.232097625732422  (3.259015264965239)\n",
      "     | > log_mle: 0.7528151273727417  (0.7578123836290269)\n",
      "     | > loss_dur: 2.4792823791503906  (2.501202859575787)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(7.4840, device='cuda:0')  (tensor(7.7905, device='cuda:0'))\n",
      "     | > current_lr: 1e-06 \n",
      "     | > step_time: 1.0115  (0.8388745709071084)\n",
      "     | > loader_time: 0.0826  (0.020701904145498124)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:58:57 -- STEP: 88/203 -- GLOBAL_STEP: 900\u001b[0m\n",
      "     | > loss: 3.272454261779785  (3.2544842633334072)\n",
      "     | > log_mle: 0.7457890510559082  (0.7552210830829362)\n",
      "     | > loss_dur: 2.526665210723877  (2.4992631619626824)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(7.2964, device='cuda:0')  (tensor(7.6715, device='cuda:0'))\n",
      "     | > current_lr: 1e-06 \n",
      "     | > step_time: 0.9789  (0.8645111186937853)\n",
      "     | > loader_time: 0.6085  (0.098908085714687)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 16:59:42 -- STEP: 113/203 -- GLOBAL_STEP: 925\u001b[0m\n",
      "     | > loss: 3.228872060775757  (3.2484911416484192)\n",
      "     | > log_mle: 0.742408275604248  (0.7527567026889431)\n",
      "     | > loss_dur: 2.486463785171509  (2.49573442155281)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(7.0490, device='cuda:0')  (tensor(7.5535, device='cuda:0'))\n",
      "     | > current_lr: 1e-06 \n",
      "     | > step_time: 2.477  (0.9667451930257072)\n",
      "     | > loader_time: 0.8069  (0.17526535861260067)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:00:16 -- STEP: 138/203 -- GLOBAL_STEP: 950\u001b[0m\n",
      "     | > loss: 3.2054784297943115  (3.2444979989010356)\n",
      "     | > log_mle: 0.7362592816352844  (0.7500375608603161)\n",
      "     | > loss_dur: 2.469219207763672  (2.494460423787435)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(6.8731, device='cuda:0')  (tensor(7.4457, device='cuda:0'))\n",
      "     | > current_lr: 1e-06 \n",
      "     | > step_time: 1.4746  (0.9826174179712931)\n",
      "     | > loader_time: 0.3082  (0.19745203895845276)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:00:52 -- STEP: 163/203 -- GLOBAL_STEP: 975\u001b[0m\n",
      "     | > loss: 3.2134757041931152  (3.2404521828048787)\n",
      "     | > log_mle: 0.7280001640319824  (0.7471267668747462)\n",
      "     | > loss_dur: 2.485475540161133  (2.4933254060569725)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(6.7586, device='cuda:0')  (tensor(7.3460, device='cuda:0'))\n",
      "     | > current_lr: 1e-06 \n",
      "     | > step_time: 1.4329  (1.0112480107991972)\n",
      "     | > loader_time: 0.214  (0.21296333827855396)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:01:27 -- STEP: 188/203 -- GLOBAL_STEP: 1000\u001b[0m\n",
      "     | > loss: 3.227515459060669  (3.2377000666679208)\n",
      "     | > log_mle: 0.7225762009620667  (0.7441041345926038)\n",
      "     | > loss_dur: 2.504939317703247  (2.493595922246892)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(6.6197, device='cuda:0')  (tensor(7.2576, device='cuda:0'))\n",
      "     | > current_lr: 1e-06 \n",
      "     | > step_time: 0.9911  (1.0378926776825113)\n",
      "     | > loader_time: 0.1948  (0.2102363870498982)\n",
      "\n",
      "\n",
      " > CHECKPOINT : ./models/train/run-June-12-2024_04+39PM-2c81749/checkpoint_1000.pth\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.028747737407684326 \u001b[0m(-0.03622812032699585)\n",
      "     | > avg_loss:\u001b[92m 3.1579524278640747 \u001b[0m(-0.1344754993915558)\n",
      "     | > avg_log_mle:\u001b[92m 0.7167367562651634 \u001b[0m(-0.04451073706150055)\n",
      "     | > avg_loss_dur:\u001b[92m 2.4412156641483307 \u001b[0m(-0.08996480703353882)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_1015.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 5/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 17:02:10) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:02:23 -- STEP: 10/203 -- GLOBAL_STEP: 1025\u001b[0m\n",
      "     | > loss: 3.0806961059570312  (3.164918875694275)\n",
      "     | > log_mle: 0.7236038446426392  (0.7211560308933258)\n",
      "     | > loss_dur: 2.3570923805236816  (2.4437628269195555)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(6.3608, device='cuda:0')  (tensor(6.5218, device='cuda:0'))\n",
      "     | > current_lr: 1.2499999999999999e-06 \n",
      "     | > step_time: 0.7052  (0.9542691230773925)\n",
      "     | > loader_time: 0.0043  (0.014054346084594726)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:02:46 -- STEP: 35/203 -- GLOBAL_STEP: 1050\u001b[0m\n",
      "     | > loss: 3.154136896133423  (3.145054340362549)\n",
      "     | > log_mle: 0.7140455842018127  (0.7183682186262949)\n",
      "     | > loss_dur: 2.440091371536255  (2.4266860893794466)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(6.4061, device='cuda:0')  (tensor(6.4428, device='cuda:0'))\n",
      "     | > current_lr: 1.2499999999999999e-06 \n",
      "     | > step_time: 0.5226  (0.9026243346078056)\n",
      "     | > loader_time: 0.0058  (0.01571737698146275)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:03:15 -- STEP: 60/203 -- GLOBAL_STEP: 1075\u001b[0m\n",
      "     | > loss: 3.1705844402313232  (3.138828416665395)\n",
      "     | > log_mle: 0.7024200558662415  (0.7139164070288339)\n",
      "     | > loss_dur: 2.4681644439697266  (2.424912003676096)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(6.3532, device='cuda:0')  (tensor(6.3940, device='cuda:0'))\n",
      "     | > current_lr: 1.2499999999999999e-06 \n",
      "     | > step_time: 1.2994  (0.9759698192278544)\n",
      "     | > loader_time: 0.0983  (0.044101230303446454)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:03:50 -- STEP: 85/203 -- GLOBAL_STEP: 1100\u001b[0m\n",
      "     | > loss: 3.111816644668579  (3.138240620669196)\n",
      "     | > log_mle: 0.6939804553985596  (0.7094164259293498)\n",
      "     | > loss_dur: 2.4178361892700195  (2.428824183520148)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(6.2267, device='cuda:0')  (tensor(6.3628, device='cuda:0'))\n",
      "     | > current_lr: 1.2499999999999999e-06 \n",
      "     | > step_time: 0.4496  (1.0230954422670253)\n",
      "     | > loader_time: 0.203  (0.10603112332961138)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:04:27 -- STEP: 110/203 -- GLOBAL_STEP: 1125\u001b[0m\n",
      "     | > loss: 3.1655688285827637  (3.1356053850867527)\n",
      "     | > log_mle: 0.687747597694397  (0.7051856165582483)\n",
      "     | > loss_dur: 2.477821111679077  (2.4304197571494357)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(6.2793, device='cuda:0')  (tensor(6.3350, device='cuda:0'))\n",
      "     | > current_lr: 1.2499999999999999e-06 \n",
      "     | > step_time: 0.9602  (1.0491467194123703)\n",
      "     | > loader_time: 0.1443  (0.15657564293254506)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:04:58 -- STEP: 135/203 -- GLOBAL_STEP: 1150\u001b[0m\n",
      "     | > loss: 3.1327741146087646  (3.1330216902273667)\n",
      "     | > log_mle: 0.6761984825134277  (0.700964524127819)\n",
      "     | > loss_dur: 2.456575632095337  (2.432057154620135)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(6.2356, device='cuda:0')  (tensor(6.3137, device='cuda:0'))\n",
      "     | > current_lr: 1.2499999999999999e-06 \n",
      "     | > step_time: 1.0807  (1.045771226176509)\n",
      "     | > loader_time: 0.1202  (0.16779165974369756)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:05:34 -- STEP: 160/203 -- GLOBAL_STEP: 1175\u001b[0m\n",
      "     | > loss: 3.09101939201355  (3.1289582341909403)\n",
      "     | > log_mle: 0.6712637543678284  (0.6968107536435129)\n",
      "     | > loss_dur: 2.419755697250366  (2.432147476077079)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(6.1617, device='cuda:0')  (tensor(6.2924, device='cuda:0'))\n",
      "     | > current_lr: 1.2499999999999999e-06 \n",
      "     | > step_time: 0.8763  (1.0563226595520976)\n",
      "     | > loader_time: 0.2001  (0.1911093801259994)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:06:09 -- STEP: 185/203 -- GLOBAL_STEP: 1200\u001b[0m\n",
      "     | > loss: 3.0842061042785645  (3.1256043653230408)\n",
      "     | > log_mle: 0.6607152223587036  (0.6925398578514924)\n",
      "     | > loss_dur: 2.4234910011291504  (2.4330645084381093)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(6.1561, device='cuda:0')  (tensor(6.2769, device='cuda:0'))\n",
      "     | > current_lr: 1.2499999999999999e-06 \n",
      "     | > step_time: 1.3934  (1.0666584594829662)\n",
      "     | > loader_time: 0.1164  (0.20115740105912489)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.022276639938354492 \u001b[0m(-0.006471097469329834)\n",
      "     | > avg_loss:\u001b[92m 3.0172612071037292 \u001b[0m(-0.14069122076034546)\n",
      "     | > avg_log_mle:\u001b[92m 0.6555851027369499 \u001b[0m(-0.0611516535282135)\n",
      "     | > avg_loss_dur:\u001b[92m 2.3616760969161987 \u001b[0m(-0.07953956723213196)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_1218.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 6/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 17:06:50) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:07:00 -- STEP: 7/203 -- GLOBAL_STEP: 1225\u001b[0m\n",
      "     | > loss: 3.0348899364471436  (3.02105028288705)\n",
      "     | > log_mle: 0.6612710952758789  (0.6673657894134521)\n",
      "     | > loss_dur: 2.3736188411712646  (2.353684527533395)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(6.0406, device='cuda:0')  (tensor(6.0325, device='cuda:0'))\n",
      "     | > current_lr: 1.5e-06 \n",
      "     | > step_time: 0.2196  (0.9054887635367257)\n",
      "     | > loader_time: 0.0043  (0.022574288504464284)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:07:26 -- STEP: 32/203 -- GLOBAL_STEP: 1250\u001b[0m\n",
      "     | > loss: 2.9354186058044434  (2.983132973313331)\n",
      "     | > log_mle: 0.6609114408493042  (0.6632502786815166)\n",
      "     | > loss_dur: 2.2745070457458496  (2.319882705807686)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(5.9169, device='cuda:0')  (tensor(5.9917, device='cuda:0'))\n",
      "     | > current_lr: 1.5e-06 \n",
      "     | > step_time: 2.3975  (1.0002572908997536)\n",
      "     | > loader_time: 0.0904  (0.019013114273548126)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:07:52 -- STEP: 57/203 -- GLOBAL_STEP: 1275\u001b[0m\n",
      "     | > loss: 2.980710506439209  (2.972601238049959)\n",
      "     | > log_mle: 0.6438420414924622  (0.6570562019682769)\n",
      "     | > loss_dur: 2.3368685245513916  (2.315545044447245)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(6.0200, device='cuda:0')  (tensor(5.9898, device='cuda:0'))\n",
      "     | > current_lr: 1.5e-06 \n",
      "     | > step_time: 1.0079  (0.9264457309455202)\n",
      "     | > loader_time: 0.0084  (0.0858682163974695)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:08:22 -- STEP: 82/203 -- GLOBAL_STEP: 1300\u001b[0m\n",
      "     | > loss: 2.9334747791290283  (2.965367349182688)\n",
      "     | > log_mle: 0.63348388671875  (0.6509736085810314)\n",
      "     | > loss_dur: 2.2999908924102783  (2.3143937442360842)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(5.9591, device='cuda:0')  (tensor(5.9931, device='cuda:0'))\n",
      "     | > current_lr: 1.5e-06 \n",
      "     | > step_time: 1.8909  (0.9551130794897312)\n",
      "     | > loader_time: 0.213  (0.11355465214426924)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:09:00 -- STEP: 107/203 -- GLOBAL_STEP: 1325\u001b[0m\n",
      "     | > loss: 2.8715012073516846  (2.952839922682148)\n",
      "     | > log_mle: 0.6249721050262451  (0.645613040879508)\n",
      "     | > loss_dur: 2.2465291023254395  (2.307226880688532)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(5.9091, device='cuda:0')  (tensor(5.9860, device='cuda:0'))\n",
      "     | > current_lr: 1.5e-06 \n",
      "     | > step_time: 0.4522  (0.9848258094252827)\n",
      "     | > loader_time: 0.0113  (0.1843516225012663)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:09:39 -- STEP: 132/203 -- GLOBAL_STEP: 1350\u001b[0m\n",
      "     | > loss: 2.859978199005127  (2.9383730653560534)\n",
      "     | > log_mle: 0.613162636756897  (0.6406013026381984)\n",
      "     | > loss_dur: 2.2468156814575195  (2.2977717609116515)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(5.9196, device='cuda:0')  (tensor(5.9762, device='cuda:0'))\n",
      "     | > current_lr: 1.5e-06 \n",
      "     | > step_time: 0.7279  (1.0289237228306862)\n",
      "     | > loader_time: 0.1836  (0.21579010739470972)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:10:20 -- STEP: 157/203 -- GLOBAL_STEP: 1375\u001b[0m\n",
      "     | > loss: 2.7781822681427  (2.921178835972101)\n",
      "     | > log_mle: 0.59988933801651  (0.6357961179344517)\n",
      "     | > loss_dur: 2.178292989730835  (2.2853827142411722)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(5.8142, device='cuda:0')  (tensor(5.9592, device='cuda:0'))\n",
      "     | > current_lr: 1.5e-06 \n",
      "     | > step_time: 1.386  (1.0676130246204931)\n",
      "     | > loader_time: 0.044  (0.24031731125655448)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:11:01 -- STEP: 182/203 -- GLOBAL_STEP: 1400\u001b[0m\n",
      "     | > loss: 2.7281930446624756  (2.903455131656521)\n",
      "     | > log_mle: 0.5991995930671692  (0.6309571475773067)\n",
      "     | > loss_dur: 2.128993511199951  (2.2724979788392456)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(5.7307, device='cuda:0')  (tensor(5.9417, device='cuda:0'))\n",
      "     | > current_lr: 1.5e-06 \n",
      "     | > step_time: 1.2074  (1.1075904159755485)\n",
      "     | > loader_time: 0.2108  (0.246487944990724)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.08061245083808899 \u001b[0m(+0.0583358108997345)\n",
      "     | > avg_loss:\u001b[92m 2.6474145650863647 \u001b[0m(-0.3698466420173645)\n",
      "     | > avg_log_mle:\u001b[92m 0.5905977860093117 \u001b[0m(-0.06498731672763824)\n",
      "     | > avg_loss_dur:\u001b[92m 2.0568167865276337 \u001b[0m(-0.30485931038856506)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_1421.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 7/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 17:11:50) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:11:58 -- STEP: 4/203 -- GLOBAL_STEP: 1425\u001b[0m\n",
      "     | > loss: 2.689755439758301  (2.708298444747925)\n",
      "     | > log_mle: 0.6100675463676453  (0.611952617764473)\n",
      "     | > loss_dur: 2.0796878337860107  (2.096345841884613)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(5.5759, device='cuda:0')  (tensor(5.6096, device='cuda:0'))\n",
      "     | > current_lr: 1.75e-06 \n",
      "     | > step_time: 0.7177  (0.605430543422699)\n",
      "     | > loader_time: 0.1628  (0.04359370470046997)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:12:12 -- STEP: 29/203 -- GLOBAL_STEP: 1450\u001b[0m\n",
      "     | > loss: 2.635404586791992  (2.6489232079736116)\n",
      "     | > log_mle: 0.5903386473655701  (0.6039014993042782)\n",
      "     | > loss_dur: 2.0450658798217773  (2.045021723056661)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(5.5707, device='cuda:0')  (tensor(5.5700, device='cuda:0'))\n",
      "     | > current_lr: 1.75e-06 \n",
      "     | > step_time: 0.9659  (0.58320243605252)\n",
      "     | > loader_time: 0.0056  (0.012268337710150358)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:12:35 -- STEP: 54/203 -- GLOBAL_STEP: 1475\u001b[0m\n",
      "     | > loss: 2.56044340133667  (2.617595076560974)\n",
      "     | > log_mle: 0.5788477659225464  (0.5964567065238954)\n",
      "     | > loss_dur: 1.981595754623413  (2.0211383788673953)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(5.4905, device='cuda:0')  (tensor(5.5360, device='cuda:0'))\n",
      "     | > current_lr: 1.75e-06 \n",
      "     | > step_time: 1.2978  (0.7137247147383516)\n",
      "     | > loader_time: 0.0601  (0.015272153748406304)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:13:09 -- STEP: 79/203 -- GLOBAL_STEP: 1500\u001b[0m\n",
      "     | > loss: 2.5100696086883545  (2.591812212255937)\n",
      "     | > log_mle: 0.5674933791160583  (0.5894341695157789)\n",
      "     | > loss_dur: 1.942576289176941  (2.0023780487760705)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(5.3870, device='cuda:0')  (tensor(5.5014, device='cuda:0'))\n",
      "     | > current_lr: 1.75e-06 \n",
      "     | > step_time: 1.2939  (0.7600673271130914)\n",
      "     | > loader_time: 0.172  (0.16636219809326944)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:13:47 -- STEP: 104/203 -- GLOBAL_STEP: 1525\u001b[0m\n",
      "     | > loss: 2.4563469886779785  (2.564768788906245)\n",
      "     | > log_mle: 0.5533723831176758  (0.5833637909247326)\n",
      "     | > loss_dur: 1.9029744863510132  (1.9814049933965383)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(5.3131, device='cuda:0')  (tensor(5.4577, device='cuda:0'))\n",
      "     | > current_lr: 1.75e-06 \n",
      "     | > step_time: 0.3709  (0.8553089247300076)\n",
      "     | > loader_time: 0.0124  (0.2147146119521214)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:14:22 -- STEP: 129/203 -- GLOBAL_STEP: 1550\u001b[0m\n",
      "     | > loss: 2.409712076187134  (2.5370595362759385)\n",
      "     | > log_mle: 0.5451965928077698  (0.5777388450711274)\n",
      "     | > loss_dur: 1.8645155429840088  (1.9593206930530158)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(5.1761, device='cuda:0')  (tensor(5.4094, device='cuda:0'))\n",
      "     | > current_lr: 1.75e-06 \n",
      "     | > step_time: 1.6879  (0.901761295259461)\n",
      "     | > loader_time: 0.1207  (0.22866745882256084)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:14:54 -- STEP: 154/203 -- GLOBAL_STEP: 1575\u001b[0m\n",
      "     | > loss: 2.330864906311035  (2.50945540682062)\n",
      "     | > log_mle: 0.5425373315811157  (0.572507045098714)\n",
      "     | > loss_dur: 1.7883275747299194  (1.9369483644312075)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(5.0238, device='cuda:0')  (tensor(5.3581, device='cuda:0'))\n",
      "     | > current_lr: 1.75e-06 \n",
      "     | > step_time: 1.0946  (0.9283381297990874)\n",
      "     | > loader_time: 0.0146  (0.2282098609131652)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:15:28 -- STEP: 179/203 -- GLOBAL_STEP: 1600\u001b[0m\n",
      "     | > loss: 2.256652355194092  (2.4835943962608638)\n",
      "     | > log_mle: 0.5273095369338989  (0.5672921634919156)\n",
      "     | > loss_dur: 1.7293429374694824  (1.9163022321029746)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(4.9044, device='cuda:0')  (tensor(5.3084, device='cuda:0'))\n",
      "     | > current_lr: 1.75e-06 \n",
      "     | > step_time: 1.1787  (0.9565454381804227)\n",
      "     | > loader_time: 0.1821  (0.2235981538975039)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.06479138135910034 \u001b[0m(-0.015821069478988647)\n",
      "     | > avg_loss:\u001b[92m 2.171048939228058 \u001b[0m(-0.4763656258583069)\n",
      "     | > avg_log_mle:\u001b[92m 0.5261840373277664 \u001b[0m(-0.06441374868154526)\n",
      "     | > avg_loss_dur:\u001b[92m 1.6448649019002914 \u001b[0m(-0.4119518846273422)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_1624.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 8/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 17:16:16) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:16:20 -- STEP: 1/203 -- GLOBAL_STEP: 1625\u001b[0m\n",
      "     | > loss: 2.3098561763763428  (2.3098561763763428)\n",
      "     | > log_mle: 0.5492492318153381  (0.5492492318153381)\n",
      "     | > loss_dur: 1.7606068849563599  (1.7606068849563599)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(4.8637, device='cuda:0')  (tensor(4.8637, device='cuda:0'))\n",
      "     | > current_lr: 2e-06 \n",
      "     | > step_time: 1.0  (0.9999773502349854)\n",
      "     | > loader_time: 0.0049  (0.004880666732788086)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:16:35 -- STEP: 26/203 -- GLOBAL_STEP: 1650\u001b[0m\n",
      "     | > loss: 2.1720662117004395  (2.21448475580949)\n",
      "     | > log_mle: 0.535886287689209  (0.5432999753035034)\n",
      "     | > loss_dur: 1.6361799240112305  (1.671184791968419)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(4.6938, device='cuda:0')  (tensor(4.7533, device='cuda:0'))\n",
      "     | > current_lr: 2e-06 \n",
      "     | > step_time: 0.7935  (0.5736955312582162)\n",
      "     | > loader_time: 0.0063  (0.012961616882911095)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:16:53 -- STEP: 51/203 -- GLOBAL_STEP: 1675\u001b[0m\n",
      "     | > loss: 2.1232762336730957  (2.1846461903815166)\n",
      "     | > log_mle: 0.5071499347686768  (0.5349559947556145)\n",
      "     | > loss_dur: 1.6161261796951294  (1.6496901886135924)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(4.6153, device='cuda:0')  (tensor(4.6986, device='cuda:0'))\n",
      "     | > current_lr: 2e-06 \n",
      "     | > step_time: 0.9049  (0.6425625997431135)\n",
      "     | > loader_time: 0.0171  (0.011106135798435585)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:17:18 -- STEP: 76/203 -- GLOBAL_STEP: 1700\u001b[0m\n",
      "     | > loss: 2.0962400436401367  (2.160244339390804)\n",
      "     | > log_mle: 0.5100489854812622  (0.5279916022953238)\n",
      "     | > loss_dur: 1.5861910581588745  (1.6322527323898517)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(4.5210, device='cuda:0')  (tensor(4.6512, device='cuda:0'))\n",
      "     | > current_lr: 2e-06 \n",
      "     | > step_time: 1.2175  (0.7165606335589758)\n",
      "     | > loader_time: 0.9055  (0.03772029437516865)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:17:49 -- STEP: 101/203 -- GLOBAL_STEP: 1725\u001b[0m\n",
      "     | > loss: 2.0481436252593994  (2.139166921672253)\n",
      "     | > log_mle: 0.5029191374778748  (0.5222380984537675)\n",
      "     | > loss_dur: 1.5452244281768799  (1.6169288182022548)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(4.4204, device='cuda:0')  (tensor(4.6073, device='cuda:0'))\n",
      "     | > current_lr: 2e-06 \n",
      "     | > step_time: 0.872  (0.7755266935518471)\n",
      "     | > loader_time: 0.2127  (0.10253593709209174)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:18:24 -- STEP: 126/203 -- GLOBAL_STEP: 1750\u001b[0m\n",
      "     | > loss: 2.0409035682678223  (2.119356085383703)\n",
      "     | > log_mle: 0.4871669411659241  (0.5170450536977678)\n",
      "     | > loss_dur: 1.5537365674972534  (1.6023110245901442)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(4.3953, device='cuda:0')  (tensor(4.5669, device='cuda:0'))\n",
      "     | > current_lr: 2e-06 \n",
      "     | > step_time: 1.5246  (0.8363362342592269)\n",
      "     | > loader_time: 0.5065  (0.13861519192892405)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:18:56 -- STEP: 151/203 -- GLOBAL_STEP: 1775\u001b[0m\n",
      "     | > loss: 2.0056846141815186  (2.100142986569184)\n",
      "     | > log_mle: 0.4876920282840729  (0.5123246002276212)\n",
      "     | > loss_dur: 1.517992615699768  (1.5878183818021359)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(4.3080, device='cuda:0')  (tensor(4.5263, device='cuda:0'))\n",
      "     | > current_lr: 2e-06 \n",
      "     | > step_time: 1.2014  (0.8787419353889312)\n",
      "     | > loader_time: 0.4132  (0.14945855677522563)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:19:29 -- STEP: 176/203 -- GLOBAL_STEP: 1800\u001b[0m\n",
      "     | > loss: 1.964766025543213  (2.083104996518655)\n",
      "     | > log_mle: 0.47511884570121765  (0.5078055731613529)\n",
      "     | > loss_dur: 1.4896471500396729  (1.5752994181080298)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(4.2460, device='cuda:0')  (tensor(4.4895, device='cuda:0'))\n",
      "     | > current_lr: 2e-06 \n",
      "     | > step_time: 1.1135  (0.9082335504618556)\n",
      "     | > loader_time: 1.1829  (0.16058835658160123)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:20:00 -- STEP: 201/203 -- GLOBAL_STEP: 1825\u001b[0m\n",
      "     | > loss: 1.910820722579956  (2.0645299426358736)\n",
      "     | > log_mle: 0.4698338806629181  (0.5035555881647327)\n",
      "     | > loss_dur: 1.4409868717193604  (1.5609743500230324)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(4.1508, device='cuda:0')  (tensor(4.4513, device='cuda:0'))\n",
      "     | > current_lr: 2e-06 \n",
      "     | > step_time: 0.5768  (0.9268310188654049)\n",
      "     | > loader_time: 0.0114  (0.15839449445999676)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.047162383794784546 \u001b[0m(-0.017628997564315796)\n",
      "     | > avg_loss:\u001b[92m 1.8509946763515472 \u001b[0m(-0.3200542628765106)\n",
      "     | > avg_log_mle:\u001b[92m 0.47384848818182945 \u001b[0m(-0.052335549145936966)\n",
      "     | > avg_loss_dur:\u001b[92m 1.3771461993455887 \u001b[0m(-0.26771870255470276)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_1827.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 9/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 17:20:21) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:20:38 -- STEP: 23/203 -- GLOBAL_STEP: 1850\u001b[0m\n",
      "     | > loss: 1.9235717058181763  (1.9083473837893943)\n",
      "     | > log_mle: 0.48404279351234436  (0.49461246184680774)\n",
      "     | > loss_dur: 1.4395289421081543  (1.4137349128723145)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(4.1285, device='cuda:0')  (tensor(4.1079, device='cuda:0'))\n",
      "     | > current_lr: 2.25e-06 \n",
      "     | > step_time: 0.6812  (0.5732472772183627)\n",
      "     | > loader_time: 0.0068  (0.004684790321018385)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:20:58 -- STEP: 48/203 -- GLOBAL_STEP: 1875\u001b[0m\n",
      "     | > loss: 1.8543827533721924  (1.8869001145164173)\n",
      "     | > log_mle: 0.4743330478668213  (0.486344363540411)\n",
      "     | > loss_dur: 1.380049705505371  (1.400555742283662)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(3.9986, device='cuda:0')  (tensor(4.0710, device='cuda:0'))\n",
      "     | > current_lr: 2.25e-06 \n",
      "     | > step_time: 0.4947  (0.6127884040276209)\n",
      "     | > loader_time: 0.9287  (0.07259932657082875)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:21:19 -- STEP: 73/203 -- GLOBAL_STEP: 1900\u001b[0m\n",
      "     | > loss: 1.8243913650512695  (1.8692291958691323)\n",
      "     | > log_mle: 0.4679040014743805  (0.47962743697101123)\n",
      "     | > loss_dur: 1.3564873933792114  (1.3896017531826073)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(3.9691, device='cuda:0')  (tensor(4.0381, device='cuda:0'))\n",
      "     | > current_lr: 2.25e-06 \n",
      "     | > step_time: 0.4724  (0.6591215688888339)\n",
      "     | > loader_time: 0.0066  (0.08103246558202457)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:21:49 -- STEP: 98/203 -- GLOBAL_STEP: 1925\u001b[0m\n",
      "     | > loss: 1.7893598079681396  (1.8550490651811873)\n",
      "     | > log_mle: 0.45421284437179565  (0.47421739417679454)\n",
      "     | > loss_dur: 1.3351470232009888  (1.3808316649222858)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(3.8802, device='cuda:0')  (tensor(4.0097, device='cuda:0'))\n",
      "     | > current_lr: 2.25e-06 \n",
      "     | > step_time: 1.8483  (0.7380489037961376)\n",
      "     | > loader_time: 0.2627  (0.11045274442555952)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:22:23 -- STEP: 123/203 -- GLOBAL_STEP: 1950\u001b[0m\n",
      "     | > loss: 1.73426353931427  (1.839771072069804)\n",
      "     | > log_mle: 0.45688891410827637  (0.4695919354756673)\n",
      "     | > loss_dur: 1.2773746252059937  (1.3701791317482301)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(3.7460, device='cuda:0')  (tensor(3.9801, device='cuda:0'))\n",
      "     | > current_lr: 2.25e-06 \n",
      "     | > step_time: 1.6122  (0.8046006381027098)\n",
      "     | > loader_time: 0.3097  (0.14401890591877256)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:22:53 -- STEP: 148/203 -- GLOBAL_STEP: 1975\u001b[0m\n",
      "     | > loss: 1.7515597343444824  (1.8260863592495788)\n",
      "     | > log_mle: 0.4440390467643738  (0.46530382637236567)\n",
      "     | > loss_dur: 1.3075207471847534  (1.3607825322731117)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(3.8016, device='cuda:0')  (tensor(3.9542, device='cuda:0'))\n",
      "     | > current_lr: 2.25e-06 \n",
      "     | > step_time: 1.5376  (0.8368464933859334)\n",
      "     | > loader_time: 0.4925  (0.15699601334494512)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:23:25 -- STEP: 173/203 -- GLOBAL_STEP: 2000\u001b[0m\n",
      "     | > loss: 1.7313482761383057  (1.8139098806877356)\n",
      "     | > log_mle: 0.4341897964477539  (0.4613062002066243)\n",
      "     | > loss_dur: 1.2971584796905518  (1.352603680825647)\n",
      "     | > amp_scaler: 16384.0  (16384.0)\n",
      "     | > grad_norm: tensor(3.7755, device='cuda:0')  (tensor(3.9315, device='cuda:0'))\n",
      "     | > current_lr: 2.25e-06 \n",
      "     | > step_time: 0.9388  (0.8731398265485819)\n",
      "     | > loader_time: 0.0129  (0.1591817464442612)\n",
      "\n",
      "\n",
      " > CHECKPOINT : ./models/train/run-June-12-2024_04+39PM-2c81749/checkpoint_2000.pth\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:24:04 -- STEP: 198/203 -- GLOBAL_STEP: 2025\u001b[0m\n",
      "     | > loss: 1.7023053169250488  (1.800315134453051)\n",
      "     | > log_mle: 0.4270433187484741  (0.457580866386192)\n",
      "     | > loss_dur: 1.2752619981765747  (1.3427342676153085)\n",
      "     | > amp_scaler: 32768.0  (17625.212121212113)\n",
      "     | > grad_norm: tensor(3.7032, device='cuda:0')  (tensor(3.9057, device='cuda:0'))\n",
      "     | > current_lr: 2.25e-06 \n",
      "     | > step_time: 0.5612  (0.9060951820527664)\n",
      "     | > loader_time: 0.0117  (0.15829496191005518)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.05575475096702576 \u001b[0m(+0.008592367172241211)\n",
      "     | > avg_loss:\u001b[92m 1.6279772967100143 \u001b[0m(-0.2230173796415329)\n",
      "     | > avg_log_mle:\u001b[92m 0.4326712526381016 \u001b[0m(-0.041177235543727875)\n",
      "     | > avg_loss_dur:\u001b[92m 1.1953060626983643 \u001b[0m(-0.18184013664722443)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_2030.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 10/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 17:24:28) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:24:43 -- STEP: 20/203 -- GLOBAL_STEP: 2050\u001b[0m\n",
      "     | > loss: 1.6401020288467407  (1.6865526139736176)\n",
      "     | > log_mle: 0.46311113238334656  (0.45664314180612564)\n",
      "     | > loss_dur: 1.1769908666610718  (1.229909461736679)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.5606, device='cuda:0')  (tensor(3.6558, device='cuda:0'))\n",
      "     | > current_lr: 2.4999999999999998e-06 \n",
      "     | > step_time: 1.9003  (0.597837221622467)\n",
      "     | > loader_time: 0.0048  (0.01553041934967041)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:25:05 -- STEP: 45/203 -- GLOBAL_STEP: 2075\u001b[0m\n",
      "     | > loss: 1.6258940696716309  (1.6699825763702392)\n",
      "     | > log_mle: 0.4289330840110779  (0.44864052335421245)\n",
      "     | > loss_dur: 1.1969609260559082  (1.2213420470555625)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.5539, device='cuda:0')  (tensor(3.6312, device='cuda:0'))\n",
      "     | > current_lr: 2.4999999999999998e-06 \n",
      "     | > step_time: 0.7168  (0.7262263827853732)\n",
      "     | > loader_time: 0.0073  (0.027072440253363715)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:25:29 -- STEP: 70/203 -- GLOBAL_STEP: 2100\u001b[0m\n",
      "     | > loss: 1.625921607017517  (1.65509318624224)\n",
      "     | > log_mle: 0.4222911298274994  (0.44165381235735757)\n",
      "     | > loss_dur: 1.2036304473876953  (1.21343936920166)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.5693, device='cuda:0')  (tensor(3.6052, device='cuda:0'))\n",
      "     | > current_lr: 2.4999999999999998e-06 \n",
      "     | > step_time: 0.4167  (0.7721815177372524)\n",
      "     | > loader_time: 0.0061  (0.05336486952645438)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:25:59 -- STEP: 95/203 -- GLOBAL_STEP: 2125\u001b[0m\n",
      "     | > loss: 1.5974640846252441  (1.6439583464672691)\n",
      "     | > log_mle: 0.4148109555244446  (0.4363009590851633)\n",
      "     | > loss_dur: 1.1826531887054443  (1.2076573848724363)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.5287, device='cuda:0')  (tensor(3.5828, device='cuda:0'))\n",
      "     | > current_lr: 2.4999999999999998e-06 \n",
      "     | > step_time: 0.6734  (0.7966940177114387)\n",
      "     | > loader_time: 0.1039  (0.11598184485184519)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:26:30 -- STEP: 120/203 -- GLOBAL_STEP: 2150\u001b[0m\n",
      "     | > loss: 1.5615655183792114  (1.6323300451040268)\n",
      "     | > log_mle: 0.4157601594924927  (0.43186685293912885)\n",
      "     | > loss_dur: 1.1458053588867188  (1.200463187694549)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.4334, device='cuda:0')  (tensor(3.5619, device='cuda:0'))\n",
      "     | > current_lr: 2.4999999999999998e-06 \n",
      "     | > step_time: 1.0844  (0.8216915210088094)\n",
      "     | > loader_time: 0.2337  (0.16580140789349873)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:27:08 -- STEP: 145/203 -- GLOBAL_STEP: 2175\u001b[0m\n",
      "     | > loss: 1.5677857398986816  (1.6219915184481386)\n",
      "     | > log_mle: 0.4080256223678589  (0.42776778052593095)\n",
      "     | > loss_dur: 1.1597601175308228  (1.1942237352502754)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.4937, device='cuda:0')  (tensor(3.5470, device='cuda:0'))\n",
      "     | > current_lr: 2.4999999999999998e-06 \n",
      "     | > step_time: 1.3866  (0.886533049879403)\n",
      "     | > loader_time: 0.2932  (0.1911536529146392)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:27:45 -- STEP: 170/203 -- GLOBAL_STEP: 2200\u001b[0m\n",
      "     | > loss: 1.5659940242767334  (1.6130871758741487)\n",
      "     | > log_mle: 0.39591163396835327  (0.4240945148117402)\n",
      "     | > loss_dur: 1.1700823307037354  (1.188992656679714)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.5281, device='cuda:0')  (tensor(3.5299, device='cuda:0'))\n",
      "     | > current_lr: 2.4999999999999998e-06 \n",
      "     | > step_time: 0.7844  (0.9246481222264907)\n",
      "     | > loader_time: 0.009  (0.2073619099224315)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:28:20 -- STEP: 195/203 -- GLOBAL_STEP: 2225\u001b[0m\n",
      "     | > loss: 1.5204920768737793  (1.6031143689766905)\n",
      "     | > log_mle: 0.3877115845680237  (0.42056984091416383)\n",
      "     | > loss_dur: 1.1327804327011108  (1.182544524241716)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.4139, device='cuda:0')  (tensor(3.5108, device='cuda:0'))\n",
      "     | > current_lr: 2.4999999999999998e-06 \n",
      "     | > step_time: 1.5729  (0.9611203780541053)\n",
      "     | > loader_time: 0.135  (0.2040734352209629)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.05896329879760742 \u001b[0m(+0.003208547830581665)\n",
      "     | > avg_loss:\u001b[92m 1.4650474041700363 \u001b[0m(-0.16292989253997803)\n",
      "     | > avg_log_mle:\u001b[92m 0.3984922468662262 \u001b[0m(-0.03417900577187538)\n",
      "     | > avg_loss_dur:\u001b[92m 1.0665551573038101 \u001b[0m(-0.12875090539455414)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_2233.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 11/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 17:28:45) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:28:59 -- STEP: 17/203 -- GLOBAL_STEP: 2250\u001b[0m\n",
      "     | > loss: 1.5016487836837769  (1.5242490347693949)\n",
      "     | > log_mle: 0.42158734798431396  (0.4237329346292159)\n",
      "     | > loss_dur: 1.080061435699463  (1.100516115917879)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.2777, device='cuda:0')  (tensor(3.3357, device='cuda:0'))\n",
      "     | > current_lr: 2.75e-06 \n",
      "     | > step_time: 0.5784  (0.6039757588330437)\n",
      "     | > loader_time: 0.0058  (0.017302863738116098)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:29:21 -- STEP: 42/203 -- GLOBAL_STEP: 2275\u001b[0m\n",
      "     | > loss: 1.4581949710845947  (1.508155266443888)\n",
      "     | > log_mle: 0.4002201557159424  (0.4169048283781324)\n",
      "     | > loss_dur: 1.0579748153686523  (1.0912504565148133)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.1875, device='cuda:0')  (tensor(3.2998, device='cuda:0'))\n",
      "     | > current_lr: 2.75e-06 \n",
      "     | > step_time: 0.7857  (0.74580188592275)\n",
      "     | > loader_time: 0.0082  (0.02454886549995059)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:29:43 -- STEP: 67/203 -- GLOBAL_STEP: 2300\u001b[0m\n",
      "     | > loss: 1.4811294078826904  (1.494456693307677)\n",
      "     | > log_mle: 0.3926025629043579  (0.40963453499238883)\n",
      "     | > loss_dur: 1.0885268449783325  (1.0848221707699908)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.2560, device='cuda:0')  (tensor(3.2780, device='cuda:0'))\n",
      "     | > current_lr: 2.75e-06 \n",
      "     | > step_time: 0.4694  (0.7889499735476365)\n",
      "     | > loader_time: 0.007  (0.02696004910255546)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:30:13 -- STEP: 92/203 -- GLOBAL_STEP: 2325\u001b[0m\n",
      "     | > loss: 1.467804193496704  (1.4863785025866136)\n",
      "     | > log_mle: 0.38364559412002563  (0.4042324553365293)\n",
      "     | > loss_dur: 1.0841585397720337  (1.0821460576161097)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.2793, device='cuda:0')  (tensor(3.2613, device='cuda:0'))\n",
      "     | > current_lr: 2.75e-06 \n",
      "     | > step_time: 0.879  (0.8338789551154427)\n",
      "     | > loader_time: 0.1033  (0.08445464528125265)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:30:43 -- STEP: 117/203 -- GLOBAL_STEP: 2350\u001b[0m\n",
      "     | > loss: 1.4654645919799805  (1.4777939635464268)\n",
      "     | > log_mle: 0.37575262784957886  (0.3995211557445363)\n",
      "     | > loss_dur: 1.0897120237350464  (1.0782728144246292)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.2605, device='cuda:0')  (tensor(3.2445, device='cuda:0'))\n",
      "     | > current_lr: 2.75e-06 \n",
      "     | > step_time: 1.197  (0.8623050583733453)\n",
      "     | > loader_time: 0.1821  (0.11323192180731358)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:31:14 -- STEP: 142/203 -- GLOBAL_STEP: 2375\u001b[0m\n",
      "     | > loss: 1.426973819732666  (1.4699276307938804)\n",
      "     | > log_mle: 0.37278735637664795  (0.3954294181205857)\n",
      "     | > loss_dur: 1.054186463356018  (1.0744982202288134)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.3138, device='cuda:0')  (tensor(3.2334, device='cuda:0'))\n",
      "     | > current_lr: 2.75e-06 \n",
      "     | > step_time: 0.9169  (0.8859234393482477)\n",
      "     | > loader_time: 0.1991  (0.13347841316545514)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:31:49 -- STEP: 167/203 -- GLOBAL_STEP: 2400\u001b[0m\n",
      "     | > loss: 1.4496521949768066  (1.4634436989972688)\n",
      "     | > log_mle: 0.3602706789970398  (0.39186450749814156)\n",
      "     | > loss_dur: 1.089381456375122  (1.0715791979235803)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.2128, device='cuda:0')  (tensor(3.2211, device='cuda:0'))\n",
      "     | > current_lr: 2.75e-06 \n",
      "     | > step_time: 1.3026  (0.9261006281047524)\n",
      "     | > loader_time: 0.202  (0.1535667388025158)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:32:23 -- STEP: 192/203 -- GLOBAL_STEP: 2425\u001b[0m\n",
      "     | > loss: 1.3797601461410522  (1.4561607583115495)\n",
      "     | > log_mle: 0.3700748682022095  (0.38843098500122625)\n",
      "     | > loss_dur: 1.0096852779388428  (1.067729779829581)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.9820, device='cuda:0')  (tensor(3.2102, device='cuda:0'))\n",
      "     | > current_lr: 2.75e-06 \n",
      "     | > step_time: 1.0019  (0.9490980679790179)\n",
      "     | > loader_time: 0.2124  (0.16123599559068677)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.0765644907951355 \u001b[0m(+0.017601191997528076)\n",
      "     | > avg_loss:\u001b[92m 1.3455264568328857 \u001b[0m(-0.11952094733715057)\n",
      "     | > avg_log_mle:\u001b[92m 0.36676792055368423 \u001b[0m(-0.03172432631254196)\n",
      "     | > avg_loss_dur:\u001b[92m 0.9787585586309433 \u001b[0m(-0.08779659867286682)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_2436.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 12/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 17:32:55) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:33:07 -- STEP: 14/203 -- GLOBAL_STEP: 2450\u001b[0m\n",
      "     | > loss: 1.382055640220642  (1.401291080883571)\n",
      "     | > log_mle: 0.3917580246925354  (0.3934541940689087)\n",
      "     | > loss_dur: 0.9902976155281067  (1.0078368740422385)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.0400, device='cuda:0')  (tensor(3.0586, device='cuda:0'))\n",
      "     | > current_lr: 3e-06 \n",
      "     | > step_time: 0.7749  (0.6382608924593244)\n",
      "     | > loader_time: 0.0056  (0.01582629340035575)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:33:25 -- STEP: 39/203 -- GLOBAL_STEP: 2475\u001b[0m\n",
      "     | > loss: 1.3713966608047485  (1.3855752394749568)\n",
      "     | > log_mle: 0.3756440281867981  (0.3867067495981852)\n",
      "     | > loss_dur: 0.9957526326179504  (0.9988684914050958)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.2133, device='cuda:0')  (tensor(3.0310, device='cuda:0'))\n",
      "     | > current_lr: 3e-06 \n",
      "     | > step_time: 1.1221  (0.6838648258111414)\n",
      "     | > loader_time: 0.0107  (0.019400364313370142)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:33:48 -- STEP: 64/203 -- GLOBAL_STEP: 2500\u001b[0m\n",
      "     | > loss: 1.3793153762817383  (1.3729275427758694)\n",
      "     | > log_mle: 0.362185537815094  (0.37914144061505795)\n",
      "     | > loss_dur: 1.0171297788619995  (0.9937861002981663)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.1451, device='cuda:0')  (tensor(3.0246, device='cuda:0'))\n",
      "     | > current_lr: 3e-06 \n",
      "     | > step_time: 0.4531  (0.7600105144083498)\n",
      "     | > loader_time: 0.0075  (0.025520559400320046)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:34:16 -- STEP: 89/203 -- GLOBAL_STEP: 2525\u001b[0m\n",
      "     | > loss: 1.3366944789886475  (1.366545484307107)\n",
      "     | > log_mle: 0.3622649312019348  (0.37360089079717573)\n",
      "     | > loss_dur: 0.9744295477867126  (0.9929445935099313)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.9013, device='cuda:0')  (tensor(3.0081, device='cuda:0'))\n",
      "     | > current_lr: 3e-06 \n",
      "     | > step_time: 1.0346  (0.8073101740204887)\n",
      "     | > loader_time: 0.4988  (0.0679446981194314)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:34:46 -- STEP: 114/203 -- GLOBAL_STEP: 2550\u001b[0m\n",
      "     | > loss: 1.339573860168457  (1.3593056296047412)\n",
      "     | > log_mle: 0.3504803776741028  (0.3688216261696397)\n",
      "     | > loss_dur: 0.989093542098999  (0.9904840039579492)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.9450, device='cuda:0')  (tensor(2.9940, device='cuda:0'))\n",
      "     | > current_lr: 3e-06 \n",
      "     | > step_time: 0.6397  (0.8275664542850695)\n",
      "     | > loader_time: 0.2834  (0.11435847324237489)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:35:24 -- STEP: 139/203 -- GLOBAL_STEP: 2575\u001b[0m\n",
      "     | > loss: 1.3271839618682861  (1.353496287366469)\n",
      "     | > log_mle: 0.35180169343948364  (0.36463126861791817)\n",
      "     | > loss_dur: 0.9753822088241577  (0.9888650213214133)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.9106, device='cuda:0')  (tensor(2.9842, device='cuda:0'))\n",
      "     | > current_lr: 3e-06 \n",
      "     | > step_time: 0.7286  (0.8935872033345612)\n",
      "     | > loader_time: 0.014  (0.15242675568560043)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:36:04 -- STEP: 164/203 -- GLOBAL_STEP: 2600\u001b[0m\n",
      "     | > loss: 1.3369486331939697  (1.3482719390857512)\n",
      "     | > log_mle: 0.34421712160110474  (0.3610639826553624)\n",
      "     | > loss_dur: 0.9927315711975098  (0.9872079582476034)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.9643, device='cuda:0')  (tensor(2.9742, device='cuda:0'))\n",
      "     | > current_lr: 3e-06 \n",
      "     | > step_time: 1.1015  (0.9492085401604815)\n",
      "     | > loader_time: 0.4116  (0.18051769529900902)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:36:41 -- STEP: 189/203 -- GLOBAL_STEP: 2625\u001b[0m\n",
      "     | > loss: 1.2994623184204102  (1.3434476063995768)\n",
      "     | > log_mle: 0.33694714307785034  (0.3574917281746234)\n",
      "     | > loss_dur: 0.9625152349472046  (0.9859558782249531)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.9978, device='cuda:0')  (tensor(2.9645, device='cuda:0'))\n",
      "     | > current_lr: 3e-06 \n",
      "     | > step_time: 0.8836  (0.984624979987977)\n",
      "     | > loader_time: 0.1075  (0.190122092211688)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.01622435450553894 \u001b[0m(-0.06034013628959656)\n",
      "     | > avg_loss:\u001b[92m 1.2542649805545807 \u001b[0m(-0.09126147627830505)\n",
      "     | > avg_log_mle:\u001b[92m 0.33580826967954636 \u001b[0m(-0.03095965087413788)\n",
      "     | > avg_loss_dur:\u001b[92m 0.9184567108750343 \u001b[0m(-0.060301847755908966)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_2639.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 13/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 17:37:17) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:37:28 -- STEP: 11/203 -- GLOBAL_STEP: 2650\u001b[0m\n",
      "     | > loss: 1.2627763748168945  (1.3038785024122759)\n",
      "     | > log_mle: 0.36569058895111084  (0.36465615034103394)\n",
      "     | > loss_dur: 0.8970858454704285  (0.9392223845828663)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.6982, device='cuda:0')  (tensor(2.8665, device='cuda:0'))\n",
      "     | > current_lr: 3.25e-06 \n",
      "     | > step_time: 0.6645  (0.7421069578690962)\n",
      "     | > loader_time: 0.0056  (0.011256261305375532)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:37:58 -- STEP: 36/203 -- GLOBAL_STEP: 2675\u001b[0m\n",
      "     | > loss: 1.2534453868865967  (1.288082781765196)\n",
      "     | > log_mle: 0.34566831588745117  (0.35810011956426835)\n",
      "     | > loss_dur: 0.9077770709991455  (0.9299826870361964)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.7043, device='cuda:0')  (tensor(2.7984, device='cuda:0'))\n",
      "     | > current_lr: 3.25e-06 \n",
      "     | > step_time: 1.0105  (0.8844496541553073)\n",
      "     | > loader_time: 0.5291  (0.15742479430304634)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:38:23 -- STEP: 61/203 -- GLOBAL_STEP: 2700\u001b[0m\n",
      "     | > loss: 1.2592064142227173  (1.278019518148704)\n",
      "     | > log_mle: 0.33513158559799194  (0.3503212225241739)\n",
      "     | > loss_dur: 0.9240748286247253  (0.9276983083271589)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.8891, device='cuda:0')  (tensor(2.8014, device='cuda:0'))\n",
      "     | > current_lr: 3.25e-06 \n",
      "     | > step_time: 1.0878  (0.8762507047809538)\n",
      "     | > loader_time: 0.0085  (0.1442039521014104)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:38:48 -- STEP: 86/203 -- GLOBAL_STEP: 2725\u001b[0m\n",
      "     | > loss: 1.2714495658874512  (1.2727408339810926)\n",
      "     | > log_mle: 0.3310524821281433  (0.34440944083901337)\n",
      "     | > loss_dur: 0.9403971433639526  (0.9283314028451609)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.6985, device='cuda:0')  (tensor(2.7896, device='cuda:0'))\n",
      "     | > current_lr: 3.25e-06 \n",
      "     | > step_time: 1.2009  (0.8732339642768683)\n",
      "     | > loader_time: 0.1156  (0.1364221101583436)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:39:23 -- STEP: 111/203 -- GLOBAL_STEP: 2750\u001b[0m\n",
      "     | > loss: 1.2389469146728516  (1.2667452687615752)\n",
      "     | > log_mle: 0.31943434476852417  (0.33950077735625944)\n",
      "     | > loss_dur: 0.9195125699043274  (0.927244499459997)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.8406, device='cuda:0')  (tensor(2.7840, device='cuda:0'))\n",
      "     | > current_lr: 3.25e-06 \n",
      "     | > step_time: 0.6866  (0.924513022104899)\n",
      "     | > loader_time: 0.0083  (0.17901515101527307)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:39:58 -- STEP: 136/203 -- GLOBAL_STEP: 2775\u001b[0m\n",
      "     | > loss: 1.2299613952636719  (1.2620661425239903)\n",
      "     | > log_mle: 0.30914032459259033  (0.3352500121383106)\n",
      "     | > loss_dur: 0.9208211302757263  (0.9268161356449127)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.7696, device='cuda:0')  (tensor(2.7842, device='cuda:0'))\n",
      "     | > current_lr: 3.25e-06 \n",
      "     | > step_time: 1.0532  (0.9575531132080978)\n",
      "     | > loader_time: 0.4162  (0.19740280859610615)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:40:30 -- STEP: 161/203 -- GLOBAL_STEP: 2800\u001b[0m\n",
      "     | > loss: 1.2364892959594727  (1.257773391948724)\n",
      "     | > log_mle: 0.30765724182128906  (0.33175709047672913)\n",
      "     | > loss_dur: 0.9288320541381836  (0.9260163048039312)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.7694, device='cuda:0')  (tensor(2.7772, device='cuda:0'))\n",
      "     | > current_lr: 3.25e-06 \n",
      "     | > step_time: 0.7216  (0.9784359917137195)\n",
      "     | > loader_time: 0.202  (0.19501418800827874)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:41:06 -- STEP: 186/203 -- GLOBAL_STEP: 2825\u001b[0m\n",
      "     | > loss: 1.2192221879959106  (1.2538739167233954)\n",
      "     | > log_mle: 0.30587679147720337  (0.3282533505911468)\n",
      "     | > loss_dur: 0.9133453965187073  (0.9256205683754336)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.6607, device='cuda:0')  (tensor(2.7633, device='cuda:0'))\n",
      "     | > current_lr: 3.25e-06 \n",
      "     | > step_time: 1.0856  (1.009907235381424)\n",
      "     | > loader_time: 0.0103  (0.19558885661504602)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.033464640378952026 \u001b[0m(+0.017240285873413086)\n",
      "     | > avg_loss:\u001b[92m 1.1794233471155167 \u001b[0m(-0.07484163343906403)\n",
      "     | > avg_log_mle:\u001b[92m 0.3078804388642311 \u001b[0m(-0.027927830815315247)\n",
      "     | > avg_loss_dur:\u001b[92m 0.8715429380536079 \u001b[0m(-0.04691377282142639)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_2842.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 14/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 17:41:48) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:41:58 -- STEP: 8/203 -- GLOBAL_STEP: 2850\u001b[0m\n",
      "     | > loss: 1.2310657501220703  (1.2337706238031387)\n",
      "     | > log_mle: 0.3348250389099121  (0.33499766886234283)\n",
      "     | > loss_dur: 0.896240770816803  (0.8987729549407959)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.7041, device='cuda:0')  (tensor(2.6989, device='cuda:0'))\n",
      "     | > current_lr: 3.5e-06 \n",
      "     | > step_time: 0.4476  (0.8814082443714142)\n",
      "     | > loader_time: 0.0049  (0.020159244537353516)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:42:14 -- STEP: 33/203 -- GLOBAL_STEP: 2875\u001b[0m\n",
      "     | > loss: 1.2063968181610107  (1.2082146586793843)\n",
      "     | > log_mle: 0.31946879625320435  (0.33088060039462464)\n",
      "     | > loss_dur: 0.8869280815124512  (0.8773340564785581)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.6507, device='cuda:0')  (tensor(2.6038, device='cuda:0'))\n",
      "     | > current_lr: 3.5e-06 \n",
      "     | > step_time: 0.4951  (0.698120608474269)\n",
      "     | > loader_time: 0.0056  (0.015242923389781608)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:42:33 -- STEP: 58/203 -- GLOBAL_STEP: 2900\u001b[0m\n",
      "     | > loss: 1.1878294944763184  (1.1983668742508726)\n",
      "     | > log_mle: 0.3108712434768677  (0.3230262419273113)\n",
      "     | > loss_dur: 0.8769581913948059  (0.875340625129897)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.6658, device='cuda:0')  (tensor(2.6156, device='cuda:0'))\n",
      "     | > current_lr: 3.5e-06 \n",
      "     | > step_time: 0.7845  (0.711816376653211)\n",
      "     | > loader_time: 0.0076  (0.013486574436056203)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:43:01 -- STEP: 83/203 -- GLOBAL_STEP: 2925\u001b[0m\n",
      "     | > loss: 1.1679468154907227  (1.1932409240538822)\n",
      "     | > log_mle: 0.2937472462654114  (0.3167648322610969)\n",
      "     | > loss_dur: 0.8741995692253113  (0.8764760917927845)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.5234, device='cuda:0')  (tensor(2.6106, device='cuda:0'))\n",
      "     | > current_lr: 3.5e-06 \n",
      "     | > step_time: 1.2895  (0.795353961278157)\n",
      "     | > loader_time: 0.5072  (0.04282107410660709)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:43:37 -- STEP: 108/203 -- GLOBAL_STEP: 2950\u001b[0m\n",
      "     | > loss: 1.1567902565002441  (1.1878639923201677)\n",
      "     | > log_mle: 0.2884056568145752  (0.31202135593802843)\n",
      "     | > loss_dur: 0.868384599685669  (0.8758426391416125)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.5083, device='cuda:0')  (tensor(2.5950, device='cuda:0'))\n",
      "     | > current_lr: 3.5e-06 \n",
      "     | > step_time: 0.9786  (0.8521095779207017)\n",
      "     | > loader_time: 0.1879  (0.12854275659278588)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:44:10 -- STEP: 133/203 -- GLOBAL_STEP: 2975\u001b[0m\n",
      "     | > loss: 1.1632459163665771  (1.1829956142525933)\n",
      "     | > log_mle: 0.29107046127319336  (0.30792071451818137)\n",
      "     | > loss_dur: 0.872175395488739  (0.8750749001825663)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.4774, device='cuda:0')  (tensor(2.5895, device='cuda:0'))\n",
      "     | > current_lr: 3.5e-06 \n",
      "     | > step_time: 1.1759  (0.8878758294241769)\n",
      "     | > loader_time: 0.3008  (0.1508405047251766)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:44:44 -- STEP: 158/203 -- GLOBAL_STEP: 3000\u001b[0m\n",
      "     | > loss: 1.1556692123413086  (1.1786786747884155)\n",
      "     | > log_mle: 0.28765588998794556  (0.3043515923656993)\n",
      "     | > loss_dur: 0.868013322353363  (0.8743270812909815)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.6345, device='cuda:0')  (tensor(2.6002, device='cuda:0'))\n",
      "     | > current_lr: 3.5e-06 \n",
      "     | > step_time: 1.1948  (0.9172687696505196)\n",
      "     | > loader_time: 0.3117  (0.1706754859489731)\n",
      "\n",
      "\n",
      " > CHECKPOINT : ./models/train/run-June-12-2024_04+39PM-2c81749/checkpoint_3000.pth\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:45:22 -- STEP: 183/203 -- GLOBAL_STEP: 3025\u001b[0m\n",
      "     | > loss: 1.1334037780761719  (1.1746968004872902)\n",
      "     | > log_mle: 0.276735782623291  (0.3009589193948629)\n",
      "     | > loss_dur: 0.8566679358482361  (0.8737378778353415)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.5614, device='cuda:0')  (tensor(2.5867, device='cuda:0'))\n",
      "     | > current_lr: 3.5e-06 \n",
      "     | > step_time: 1.5739  (0.9392917143191145)\n",
      "     | > loader_time: 0.3002  (0.17886778565703862)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.04081982374191284 \u001b[0m(+0.007355183362960815)\n",
      "     | > avg_loss:\u001b[92m 1.1047376841306686 \u001b[0m(-0.07468566298484802)\n",
      "     | > avg_log_mle:\u001b[92m 0.28089871257543564 \u001b[0m(-0.02698172628879547)\n",
      "     | > avg_loss_dur:\u001b[92m 0.8238389641046524 \u001b[0m(-0.047703973948955536)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_3045.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 15/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 17:46:06) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:46:13 -- STEP: 5/203 -- GLOBAL_STEP: 3050\u001b[0m\n",
      "     | > loss: 1.1615357398986816  (1.1648892402648925)\n",
      "     | > log_mle: 0.30705726146698  (0.3124950885772705)\n",
      "     | > loss_dur: 0.8544784188270569  (0.8523941278457642)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.4277, device='cuda:0')  (tensor(2.4892, device='cuda:0'))\n",
      "     | > current_lr: 3.7499999999999997e-06 \n",
      "     | > step_time: 0.5846  (0.5793005943298339)\n",
      "     | > loader_time: 0.0042  (0.019246912002563475)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:46:29 -- STEP: 30/203 -- GLOBAL_STEP: 3075\u001b[0m\n",
      "     | > loss: 1.1263656616210938  (1.1295135498046875)\n",
      "     | > log_mle: 0.29611384868621826  (0.30470191438992816)\n",
      "     | > loss_dur: 0.8302517533302307  (0.8248116354147593)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.5447, device='cuda:0')  (tensor(2.4048, device='cuda:0'))\n",
      "     | > current_lr: 3.7499999999999997e-06 \n",
      "     | > step_time: 0.9983  (0.6226811965306599)\n",
      "     | > loader_time: 0.0084  (0.013401603698730469)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:46:52 -- STEP: 55/203 -- GLOBAL_STEP: 3100\u001b[0m\n",
      "     | > loss: 1.093549132347107  (1.118673918463967)\n",
      "     | > log_mle: 0.27432090044021606  (0.2969358281655745)\n",
      "     | > loss_dur: 0.8192282319068909  (0.8217380946332759)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.2874, device='cuda:0')  (tensor(2.3813, device='cuda:0'))\n",
      "     | > current_lr: 3.7499999999999997e-06 \n",
      "     | > step_time: 0.3786  (0.7260340127077969)\n",
      "     | > loader_time: 0.0075  (0.018504181775179776)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:47:17 -- STEP: 80/203 -- GLOBAL_STEP: 3125\u001b[0m\n",
      "     | > loss: 1.094769835472107  (1.1135501518845554)\n",
      "     | > log_mle: 0.27569907903671265  (0.2907584205269814)\n",
      "     | > loss_dur: 0.8190707564353943  (0.8227917343378067)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.4290, device='cuda:0')  (tensor(2.3914, device='cuda:0'))\n",
      "     | > current_lr: 3.7499999999999997e-06 \n",
      "     | > step_time: 0.817  (0.7854762911796571)\n",
      "     | > loader_time: 0.4779  (0.0393124133348465)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:47:51 -- STEP: 105/203 -- GLOBAL_STEP: 3150\u001b[0m\n",
      "     | > loss: 1.0688368082046509  (1.1087746597471693)\n",
      "     | > log_mle: 0.26671451330184937  (0.28587977488835664)\n",
      "     | > loss_dur: 0.8021222949028015  (0.822894887697129)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.2330, device='cuda:0')  (tensor(2.3850, device='cuda:0'))\n",
      "     | > current_lr: 3.7499999999999997e-06 \n",
      "     | > step_time: 1.0029  (0.8392081101735436)\n",
      "     | > loader_time: 0.6084  (0.11208467256455194)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:48:27 -- STEP: 130/203 -- GLOBAL_STEP: 3175\u001b[0m\n",
      "     | > loss: 1.0478036403656006  (1.1040898075470562)\n",
      "     | > log_mle: 0.2585628628730774  (0.28156073918709396)\n",
      "     | > loss_dur: 0.789240837097168  (0.8225290715694428)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.2354, device='cuda:0')  (tensor(2.3876, device='cuda:0'))\n",
      "     | > current_lr: 3.7499999999999997e-06 \n",
      "     | > step_time: 1.224  (0.8869376457654515)\n",
      "     | > loader_time: 0.3872  (0.1575167949383075)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:49:05 -- STEP: 155/203 -- GLOBAL_STEP: 3200\u001b[0m\n",
      "     | > loss: 1.0663681030273438  (1.1004042402390515)\n",
      "     | > log_mle: 0.2613149881362915  (0.27805558404614855)\n",
      "     | > loss_dur: 0.8050531148910522  (0.8223486600383636)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.2077, device='cuda:0')  (tensor(2.3835, device='cuda:0'))\n",
      "     | > current_lr: 3.7499999999999997e-06 \n",
      "     | > step_time: 1.8019  (0.9350389403681602)\n",
      "     | > loader_time: 0.8868  (0.18724649183211783)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:49:37 -- STEP: 180/203 -- GLOBAL_STEP: 3225\u001b[0m\n",
      "     | > loss: 1.0797247886657715  (1.0974329590797431)\n",
      "     | > log_mle: 0.2562006711959839  (0.27465792364544356)\n",
      "     | > loss_dur: 0.8235241770744324  (0.8227750404013527)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.3676, device='cuda:0')  (tensor(2.3715, device='cuda:0'))\n",
      "     | > current_lr: 3.7499999999999997e-06 \n",
      "     | > step_time: 1.3731  (0.9505968557463752)\n",
      "     | > loader_time: 0.5073  (0.18836790323257438)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.04177290201187134 \u001b[0m(+0.0009530782699584961)\n",
      "     | > avg_loss:\u001b[92m 1.0378181338310242 \u001b[0m(-0.06691955029964447)\n",
      "     | > avg_log_mle:\u001b[92m 0.25298407673835754 \u001b[0m(-0.027914635837078094)\n",
      "     | > avg_loss_dur:\u001b[92m 0.7848340570926666 \u001b[0m(-0.03900490701198578)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_3248.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 16/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 17:50:25) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:50:29 -- STEP: 2/203 -- GLOBAL_STEP: 3250\u001b[0m\n",
      "     | > loss: 1.112408995628357  (1.1092635989189148)\n",
      "     | > log_mle: 0.29796725511550903  (0.2893078029155731)\n",
      "     | > loss_dur: 0.8144417405128479  (0.8199557960033417)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.1673, device='cuda:0')  (tensor(2.2239, device='cuda:0'))\n",
      "     | > current_lr: 4e-06 \n",
      "     | > step_time: 0.687  (0.7978878021240234)\n",
      "     | > loader_time: 0.0048  (0.004208922386169434)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:50:46 -- STEP: 27/203 -- GLOBAL_STEP: 3275\u001b[0m\n",
      "     | > loss: 1.0411982536315918  (1.058508109163355)\n",
      "     | > log_mle: 0.26389479637145996  (0.2783857893060755)\n",
      "     | > loss_dur: 0.7773033976554871  (0.7801223198572794)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.1801, device='cuda:0')  (tensor(2.2139, device='cuda:0'))\n",
      "     | > current_lr: 4e-06 \n",
      "     | > step_time: 0.4697  (0.6717049369105587)\n",
      "     | > loader_time: 0.0048  (0.011699552889223452)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:51:10 -- STEP: 52/203 -- GLOBAL_STEP: 3300\u001b[0m\n",
      "     | > loss: 1.0218114852905273  (1.0501112181406755)\n",
      "     | > log_mle: 0.2683478593826294  (0.27070299249428964)\n",
      "     | > loss_dur: 0.7534635663032532  (0.7794082222076564)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.0386, device='cuda:0')  (tensor(2.1991, device='cuda:0'))\n",
      "     | > current_lr: 4e-06 \n",
      "     | > step_time: 0.3977  (0.7877049308556777)\n",
      "     | > loader_time: 0.0596  (0.016297739285689138)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:51:33 -- STEP: 77/203 -- GLOBAL_STEP: 3325\u001b[0m\n",
      "     | > loss: 1.0082290172576904  (1.0448244345652595)\n",
      "     | > log_mle: 0.2515084743499756  (0.2642045733216521)\n",
      "     | > loss_dur: 0.7567204833030701  (0.780619860469521)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.1140, device='cuda:0')  (tensor(2.1930, device='cuda:0'))\n",
      "     | > current_lr: 4e-06 \n",
      "     | > step_time: 1.8134  (0.8183694628926067)\n",
      "     | > loader_time: 0.5991  (0.025523696626935696)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:52:13 -- STEP: 102/203 -- GLOBAL_STEP: 3350\u001b[0m\n",
      "     | > loss: 1.0203373432159424  (1.041191069518818)\n",
      "     | > log_mle: 0.23923563957214355  (0.259194539458144)\n",
      "     | > loss_dur: 0.7811017036437988  (0.7819965277232377)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.1412, device='cuda:0')  (tensor(2.1839, device='cuda:0'))\n",
      "     | > current_lr: 4e-06 \n",
      "     | > step_time: 0.7693  (0.9049661463382197)\n",
      "     | > loader_time: 0.7971  (0.12761766770306757)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:52:47 -- STEP: 127/203 -- GLOBAL_STEP: 3375\u001b[0m\n",
      "     | > loss: 1.0163121223449707  (1.0372834154001362)\n",
      "     | > log_mle: 0.23778694868087769  (0.25472638785369744)\n",
      "     | > loss_dur: 0.778525173664093  (0.7825570233224884)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.0789, device='cuda:0')  (tensor(2.1894, device='cuda:0'))\n",
      "     | > current_lr: 4e-06 \n",
      "     | > step_time: 0.686  (0.9438471550077904)\n",
      "     | > loader_time: 0.0083  (0.14704977621243698)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:53:19 -- STEP: 152/203 -- GLOBAL_STEP: 3400\u001b[0m\n",
      "     | > loss: 1.0187287330627441  (1.033712645894602)\n",
      "     | > log_mle: 0.23025822639465332  (0.25095070114261214)\n",
      "     | > loss_dur: 0.7884705662727356  (0.7827619412227682)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.1013, device='cuda:0')  (tensor(2.2094, device='cuda:0'))\n",
      "     | > current_lr: 4e-06 \n",
      "     | > step_time: 1.0503  (0.9518563355270185)\n",
      "     | > loader_time: 0.3039  (0.16687835517682528)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:53:53 -- STEP: 177/203 -- GLOBAL_STEP: 3425\u001b[0m\n",
      "     | > loss: 1.0059466361999512  (1.0310207032887946)\n",
      "     | > log_mle: 0.21997976303100586  (0.24750594870518822)\n",
      "     | > loss_dur: 0.7859669327735901  (0.7835147512161127)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.1653, device='cuda:0')  (tensor(2.2024, device='cuda:0'))\n",
      "     | > current_lr: 4e-06 \n",
      "     | > step_time: 1.1211  (0.9734925429026285)\n",
      "     | > loader_time: 0.1836  (0.17921826664337331)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:54:30 -- STEP: 202/203 -- GLOBAL_STEP: 3450\u001b[0m\n",
      "     | > loss: 1.0115962028503418  (1.0268986924449994)\n",
      "     | > log_mle: 0.22119390964508057  (0.2442166849528208)\n",
      "     | > loss_dur: 0.7904022932052612  (0.7826820048365264)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.3202, device='cuda:0')  (tensor(2.1922, device='cuda:0'))\n",
      "     | > current_lr: 4e-06 \n",
      "     | > step_time: 0.3776  (1.005500494843662)\n",
      "     | > loader_time: 0.0073  (0.18537663469220153)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.009624898433685303 \u001b[0m(-0.032148003578186035)\n",
      "     | > avg_loss:\u001b[92m 0.9752435833215714 \u001b[0m(-0.06257455050945282)\n",
      "     | > avg_log_mle:\u001b[92m 0.2246488556265831 \u001b[0m(-0.028335221111774445)\n",
      "     | > avg_loss_dur:\u001b[92m 0.7505947276949883 \u001b[0m(-0.034239329397678375)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_3451.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 17/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 17:54:53) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:55:13 -- STEP: 24/203 -- GLOBAL_STEP: 3475\u001b[0m\n",
      "     | > loss: 0.9905271530151367  (0.9961951300501823)\n",
      "     | > log_mle: 0.24660485982894897  (0.2522451380888621)\n",
      "     | > loss_dur: 0.7439222931861877  (0.7439499944448471)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.0017, device='cuda:0')  (tensor(2.0271, device='cuda:0'))\n",
      "     | > current_lr: 4.25e-06 \n",
      "     | > step_time: 0.5676  (0.699759860833486)\n",
      "     | > loader_time: 0.0051  (0.014887342850367228)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:55:34 -- STEP: 49/203 -- GLOBAL_STEP: 3500\u001b[0m\n",
      "     | > loss: 1.0013506412506104  (0.9870188734969314)\n",
      "     | > log_mle: 0.23474663496017456  (0.24346302480113746)\n",
      "     | > loss_dur: 0.7666040658950806  (0.7435558511286365)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.0058, device='cuda:0')  (tensor(2.0914, device='cuda:0'))\n",
      "     | > current_lr: 4.25e-06 \n",
      "     | > step_time: 0.7211  (0.7466045593728825)\n",
      "     | > loader_time: 0.0754  (0.01567886800182109)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:55:57 -- STEP: 74/203 -- GLOBAL_STEP: 3525\u001b[0m\n",
      "     | > loss: 0.9821557998657227  (0.9812468813883292)\n",
      "     | > log_mle: 0.21714377403259277  (0.2364292338087752)\n",
      "     | > loss_dur: 0.7650120258331299  (0.7448176491904903)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.9820, device='cuda:0')  (tensor(2.0754, device='cuda:0'))\n",
      "     | > current_lr: 4.25e-06 \n",
      "     | > step_time: 0.8964  (0.7931282230325648)\n",
      "     | > loader_time: 0.01  (0.022523135752291295)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:56:30 -- STEP: 99/203 -- GLOBAL_STEP: 3550\u001b[0m\n",
      "     | > loss: 0.9594557285308838  (0.9774433524921687)\n",
      "     | > log_mle: 0.21583408117294312  (0.23131918846958816)\n",
      "     | > loss_dur: 0.7436216473579407  (0.7461241652267148)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.9647, device='cuda:0')  (tensor(2.0725, device='cuda:0'))\n",
      "     | > current_lr: 4.25e-06 \n",
      "     | > step_time: 0.948  (0.858244963366576)\n",
      "     | > loader_time: 0.0115  (0.08157703370758981)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:57:03 -- STEP: 124/203 -- GLOBAL_STEP: 3575\u001b[0m\n",
      "     | > loss: 0.9609482884407043  (0.9730697495322074)\n",
      "     | > log_mle: 0.21193349361419678  (0.22690610106914275)\n",
      "     | > loss_dur: 0.7490147948265076  (0.7461636494244298)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.8531, device='cuda:0')  (tensor(2.0727, device='cuda:0'))\n",
      "     | > current_lr: 4.25e-06 \n",
      "     | > step_time: 0.8189  (0.9026829484970339)\n",
      "     | > loader_time: 0.1113  (0.11208475020623976)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:57:39 -- STEP: 149/203 -- GLOBAL_STEP: 3600\u001b[0m\n",
      "     | > loss: 0.9379361271858215  (0.9695748494775503)\n",
      "     | > log_mle: 0.2003076672554016  (0.22285669762016141)\n",
      "     | > loss_dur: 0.7376284599304199  (0.7467181526574512)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.1715, device='cuda:0')  (tensor(2.0624, device='cuda:0'))\n",
      "     | > current_lr: 4.25e-06 \n",
      "     | > step_time: 1.3113  (0.9544176143287812)\n",
      "     | > loader_time: 0.5196  (0.13054915242547155)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:58:16 -- STEP: 174/203 -- GLOBAL_STEP: 3625\u001b[0m\n",
      "     | > loss: 0.9687231183052063  (0.9672430517344639)\n",
      "     | > log_mle: 0.20191174745559692  (0.21934921645570074)\n",
      "     | > loss_dur: 0.7668113708496094  (0.7478938359638742)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.9606, device='cuda:0')  (tensor(2.0580, device='cuda:0'))\n",
      "     | > current_lr: 4.25e-06 \n",
      "     | > step_time: 1.104  (0.9865422879142323)\n",
      "     | > loader_time: 0.2778  (0.154318932829232)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:58:51 -- STEP: 199/203 -- GLOBAL_STEP: 3650\u001b[0m\n",
      "     | > loss: 0.9358537793159485  (0.9633819116419883)\n",
      "     | > log_mle: 0.18709546327590942  (0.21592628386751492)\n",
      "     | > loss_dur: 0.7487583160400391  (0.7474556283735152)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.0959, device='cuda:0')  (tensor(2.0700, device='cuda:0'))\n",
      "     | > current_lr: 4.25e-06 \n",
      "     | > step_time: 0.5261  (1.0113293621408281)\n",
      "     | > loader_time: 0.0125  (0.16003767928885457)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.05852159857749938 \u001b[0m(+0.04889670014381408)\n",
      "     | > avg_loss:\u001b[92m 0.9161513969302177 \u001b[0m(-0.05909218639135361)\n",
      "     | > avg_log_mle:\u001b[92m 0.1958969607949257 \u001b[0m(-0.02875189483165741)\n",
      "     | > avg_loss_dur:\u001b[92m 0.720254436135292 \u001b[0m(-0.030340291559696198)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_3654.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 18/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 17:59:17) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:59:34 -- STEP: 21/203 -- GLOBAL_STEP: 3675\u001b[0m\n",
      "     | > loss: 0.9068145751953125  (0.9336960486003331)\n",
      "     | > log_mle: 0.21778899431228638  (0.2237210330509004)\n",
      "     | > loss_dur: 0.6890255808830261  (0.7099750155494325)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.9302, device='cuda:0')  (tensor(1.8865, device='cuda:0'))\n",
      "     | > current_lr: 4.5e-06 \n",
      "     | > step_time: 0.7063  (0.614817426318214)\n",
      "     | > loader_time: 0.0044  (0.018362090701148623)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 17:59:54 -- STEP: 46/203 -- GLOBAL_STEP: 3700\u001b[0m\n",
      "     | > loss: 0.8891762495040894  (0.9262744952803073)\n",
      "     | > log_mle: 0.18989187479019165  (0.21491364810777747)\n",
      "     | > loss_dur: 0.6992843747138977  (0.71136084717253)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.7943, device='cuda:0')  (tensor(1.9575, device='cuda:0'))\n",
      "     | > current_lr: 4.5e-06 \n",
      "     | > step_time: 1.1075  (0.6868491846582164)\n",
      "     | > loader_time: 0.0077  (0.016248340192048447)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:00:16 -- STEP: 71/203 -- GLOBAL_STEP: 3725\u001b[0m\n",
      "     | > loss: 0.9070561528205872  (0.9206048837849792)\n",
      "     | > log_mle: 0.18454498052597046  (0.2077682656301579)\n",
      "     | > loss_dur: 0.7225111722946167  (0.7128366181548214)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.0001, device='cuda:0')  (tensor(1.9386, device='cuda:0'))\n",
      "     | > current_lr: 4.5e-06 \n",
      "     | > step_time: 1.1222  (0.7508858089715662)\n",
      "     | > loader_time: 0.0113  (0.01768629315873267)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:00:49 -- STEP: 96/203 -- GLOBAL_STEP: 3750\u001b[0m\n",
      "     | > loss: 0.8942378163337708  (0.9169576403995355)\n",
      "     | > log_mle: 0.18575149774551392  (0.2026031855493784)\n",
      "     | > loss_dur: 0.7084863185882568  (0.7143544548501572)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.8828, device='cuda:0')  (tensor(1.9161, device='cuda:0'))\n",
      "     | > current_lr: 4.5e-06 \n",
      "     | > step_time: 1.2912  (0.8254931544264158)\n",
      "     | > loader_time: 0.1104  (0.0827950562040011)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:01:28 -- STEP: 121/203 -- GLOBAL_STEP: 3775\u001b[0m\n",
      "     | > loss: 0.9162464737892151  (0.9127534169796084)\n",
      "     | > log_mle: 0.164148211479187  (0.19819352991324812)\n",
      "     | > loss_dur: 0.7520982623100281  (0.7145598870663604)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.9890, device='cuda:0')  (tensor(1.9128, device='cuda:0'))\n",
      "     | > current_lr: 4.5e-06 \n",
      "     | > step_time: 0.8442  (0.8981235460801558)\n",
      "     | > loader_time: 0.1828  (0.14286077515152856)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:01:58 -- STEP: 146/203 -- GLOBAL_STEP: 3800\u001b[0m\n",
      "     | > loss: 0.9110256433486938  (0.9095205203311084)\n",
      "     | > log_mle: 0.17395102977752686  (0.19437627596397924)\n",
      "     | > loss_dur: 0.737074613571167  (0.7151442443671292)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.9546, device='cuda:0')  (tensor(1.9333, device='cuda:0'))\n",
      "     | > current_lr: 4.5e-06 \n",
      "     | > step_time: 1.485  (0.9172448197456256)\n",
      "     | > loader_time: 0.2924  (0.15456735597897878)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:02:29 -- STEP: 171/203 -- GLOBAL_STEP: 3825\u001b[0m\n",
      "     | > loss: 0.8938021659851074  (0.9070602271989076)\n",
      "     | > log_mle: 0.1601412296295166  (0.19095408010203943)\n",
      "     | > loss_dur: 0.7336609363555908  (0.7161061470968683)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.7906, device='cuda:0')  (tensor(1.9218, device='cuda:0'))\n",
      "     | > current_lr: 4.5e-06 \n",
      "     | > step_time: 0.9897  (0.93662647057695)\n",
      "     | > loader_time: 0.2143  (0.15919876377484957)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:03:04 -- STEP: 196/203 -- GLOBAL_STEP: 3850\u001b[0m\n",
      "     | > loss: 0.8708580136299133  (0.9036747524324729)\n",
      "     | > log_mle: 0.16382527351379395  (0.18767660643373224)\n",
      "     | > loss_dur: 0.7070327401161194  (0.715998145998741)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.5011, device='cuda:0')  (tensor(1.9403, device='cuda:0'))\n",
      "     | > current_lr: 4.5e-06 \n",
      "     | > step_time: 0.6024  (0.9677938891916859)\n",
      "     | > loader_time: 0.0174  (0.1612039901772323)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.09673216938972473 \u001b[0m(+0.03821057081222535)\n",
      "     | > avg_loss:\u001b[92m 0.8633309081196785 \u001b[0m(-0.052820488810539246)\n",
      "     | > avg_log_mle:\u001b[92m 0.1679912880063057 \u001b[0m(-0.027905672788619995)\n",
      "     | > avg_loss_dur:\u001b[92m 0.6953396201133728 \u001b[0m(-0.02491481602191925)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_3857.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 19/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 18:03:33) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:03:49 -- STEP: 18/203 -- GLOBAL_STEP: 3875\u001b[0m\n",
      "     | > loss: 0.8733658194541931  (0.8791878687010871)\n",
      "     | > log_mle: 0.20170360803604126  (0.19584357738494873)\n",
      "     | > loss_dur: 0.6716622114181519  (0.6833442913161384)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.7208, device='cuda:0')  (tensor(1.7422, device='cuda:0'))\n",
      "     | > current_lr: 4.749999999999999e-06 \n",
      "     | > step_time: 0.5956  (0.7257429361343384)\n",
      "     | > loader_time: 0.0067  (0.022574901580810547)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:04:10 -- STEP: 43/203 -- GLOBAL_STEP: 3900\u001b[0m\n",
      "     | > loss: 0.8435646295547485  (0.8707773630009141)\n",
      "     | > log_mle: 0.17163264751434326  (0.1880573075871135)\n",
      "     | > loss_dur: 0.6719319820404053  (0.6827200554138007)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.7588, device='cuda:0')  (tensor(1.7604, device='cuda:0'))\n",
      "     | > current_lr: 4.749999999999999e-06 \n",
      "     | > step_time: 1.0801  (0.7728368792422983)\n",
      "     | > loader_time: 0.0057  (0.02129775424336278)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:04:32 -- STEP: 68/203 -- GLOBAL_STEP: 3925\u001b[0m\n",
      "     | > loss: 0.8653387427330017  (0.8641703961526647)\n",
      "     | > log_mle: 0.16524547338485718  (0.18088301315027125)\n",
      "     | > loss_dur: 0.7000932693481445  (0.6832873830023933)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.8386, device='cuda:0')  (tensor(1.7566, device='cuda:0'))\n",
      "     | > current_lr: 4.749999999999999e-06 \n",
      "     | > step_time: 0.4894  (0.7939571317504434)\n",
      "     | > loader_time: 0.007  (0.021215326645795035)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:05:05 -- STEP: 93/203 -- GLOBAL_STEP: 3950\u001b[0m\n",
      "     | > loss: 0.8437703847885132  (0.8607891060972727)\n",
      "     | > log_mle: 0.1553390622138977  (0.17556019816347349)\n",
      "     | > loss_dur: 0.6884313225746155  (0.6852289079337991)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.8026, device='cuda:0')  (tensor(1.7527, device='cuda:0'))\n",
      "     | > current_lr: 4.749999999999999e-06 \n",
      "     | > step_time: 0.8984  (0.8691146168657529)\n",
      "     | > loader_time: 0.2368  (0.0784114432591264)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:05:35 -- STEP: 118/203 -- GLOBAL_STEP: 3975\u001b[0m\n",
      "     | > loss: 0.839637815952301  (0.8573176391043905)\n",
      "     | > log_mle: 0.15745782852172852  (0.17108736452409776)\n",
      "     | > loss_dur: 0.6821799874305725  (0.6862302745802927)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.8021, device='cuda:0')  (tensor(1.7483, device='cuda:0'))\n",
      "     | > current_lr: 4.749999999999999e-06 \n",
      "     | > step_time: 0.8295  (0.8934368238610736)\n",
      "     | > loader_time: 0.1724  (0.11173734220407777)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:06:08 -- STEP: 143/203 -- GLOBAL_STEP: 4000\u001b[0m\n",
      "     | > loss: 0.8543322086334229  (0.854282232431265)\n",
      "     | > log_mle: 0.14853638410568237  (0.16710224059911874)\n",
      "     | > loss_dur: 0.7057958245277405  (0.6871799918321463)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.6665, device='cuda:0')  (tensor(1.7642, device='cuda:0'))\n",
      "     | > current_lr: 4.749999999999999e-06 \n",
      "     | > step_time: 1.0064  (0.9177506236763268)\n",
      "     | > loader_time: 0.1201  (0.13753507520769026)\n",
      "\n",
      "\n",
      " > CHECKPOINT : ./models/train/run-June-12-2024_04+39PM-2c81749/checkpoint_4000.pth\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:06:46 -- STEP: 168/203 -- GLOBAL_STEP: 4025\u001b[0m\n",
      "     | > loss: 0.8372253179550171  (0.8518788725847289)\n",
      "     | > log_mle: 0.1476643681526184  (0.16385666635774432)\n",
      "     | > loss_dur: 0.6895609498023987  (0.6880222062269846)\n",
      "     | > amp_scaler: 65536.0  (35693.71428571428)\n",
      "     | > grad_norm: tensor(1.5977, device='cuda:0')  (tensor(1.7522, device='cuda:0'))\n",
      "     | > current_lr: 4.749999999999999e-06 \n",
      "     | > step_time: 1.3817  (0.9315937402702513)\n",
      "     | > loader_time: 0.473  (0.1501251601037525)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:07:20 -- STEP: 193/203 -- GLOBAL_STEP: 4050\u001b[0m\n",
      "     | > loss: 0.8281750679016113  (0.848943923112642)\n",
      "     | > log_mle: 0.14095336198806763  (0.16066345340847352)\n",
      "     | > loss_dur: 0.6872217059135437  (0.6882804697041685)\n",
      "     | > amp_scaler: 65536.0  (39559.29533678755)\n",
      "     | > grad_norm: tensor(1.9600, device='cuda:0')  (tensor(1.7770, device='cuda:0'))\n",
      "     | > current_lr: 4.749999999999999e-06 \n",
      "     | > step_time: 1.0844  (0.9645327409932033)\n",
      "     | > loader_time: 0.3026  (0.15357125855480455)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.003192692995071411 \u001b[0m(-0.09353947639465332)\n",
      "     | > avg_loss:\u001b[92m 0.8147818818688393 \u001b[0m(-0.04854902625083923)\n",
      "     | > avg_log_mle:\u001b[92m 0.14050306379795074 \u001b[0m(-0.02748822420835495)\n",
      "     | > avg_loss_dur:\u001b[92m 0.6742788180708885 \u001b[0m(-0.021060802042484283)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_4060.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 20/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 18:07:52) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:08:06 -- STEP: 15/203 -- GLOBAL_STEP: 4075\u001b[0m\n",
      "     | > loss: 0.8047653436660767  (0.8272322734196981)\n",
      "     | > log_mle: 0.1651645302772522  (0.16939296325047812)\n",
      "     | > loss_dur: 0.6396008133888245  (0.6578393101692199)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.4635, device='cuda:0')  (tensor(1.6265, device='cuda:0'))\n",
      "     | > current_lr: 4.9999999999999996e-06 \n",
      "     | > step_time: 0.7917  (0.6150890668233235)\n",
      "     | > loader_time: 0.0059  (0.013094027837117514)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:08:27 -- STEP: 40/203 -- GLOBAL_STEP: 4100\u001b[0m\n",
      "     | > loss: 0.8083719611167908  (0.8204805761575699)\n",
      "     | > log_mle: 0.15535908937454224  (0.16269612610340117)\n",
      "     | > loss_dur: 0.6530128717422485  (0.6577844500541686)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.6660, device='cuda:0')  (tensor(1.6423, device='cuda:0'))\n",
      "     | > current_lr: 4.9999999999999996e-06 \n",
      "     | > step_time: 0.4592  (0.7235648155212402)\n",
      "     | > loader_time: 0.0077  (0.015505975484848025)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:08:50 -- STEP: 65/203 -- GLOBAL_STEP: 4125\u001b[0m\n",
      "     | > loss: 0.7819610238075256  (0.8123221929256733)\n",
      "     | > log_mle: 0.14384472370147705  (0.15524940490722655)\n",
      "     | > loss_dur: 0.6381163001060486  (0.6570727880184467)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.9214, device='cuda:0')  (tensor(1.7428, device='cuda:0'))\n",
      "     | > current_lr: 4.9999999999999996e-06 \n",
      "     | > step_time: 0.6135  (0.7906761022714469)\n",
      "     | > loader_time: 0.0103  (0.01628577159001277)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:09:16 -- STEP: 90/203 -- GLOBAL_STEP: 4150\u001b[0m\n",
      "     | > loss: 0.7860775589942932  (0.8095335212018755)\n",
      "     | > log_mle: 0.13470536470413208  (0.15009131431579595)\n",
      "     | > loss_dur: 0.6513721942901611  (0.6594422068860796)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.5240, device='cuda:0')  (tensor(1.7154, device='cuda:0'))\n",
      "     | > current_lr: 4.9999999999999996e-06 \n",
      "     | > step_time: 0.9681  (0.8195909473631117)\n",
      "     | > loader_time: 0.2128  (0.060258597797817656)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:09:44 -- STEP: 115/203 -- GLOBAL_STEP: 4175\u001b[0m\n",
      "     | > loss: 0.7939670085906982  (0.8057939094045888)\n",
      "     | > log_mle: 0.1251583695411682  (0.14558734582818086)\n",
      "     | > loss_dur: 0.66880863904953  (0.6602065635764081)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.4915, device='cuda:0')  (tensor(1.7101, device='cuda:0'))\n",
      "     | > current_lr: 4.9999999999999996e-06 \n",
      "     | > step_time: 0.8236  (0.834707440500674)\n",
      "     | > loader_time: 0.1663  (0.09540268856546154)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:10:19 -- STEP: 140/203 -- GLOBAL_STEP: 4200\u001b[0m\n",
      "     | > loss: 0.7854288816452026  (0.8027209596974509)\n",
      "     | > log_mle: 0.11398756504058838  (0.14162625457559322)\n",
      "     | > loss_dur: 0.6714413166046143  (0.6610947051218578)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.6761, device='cuda:0')  (tensor(1.7028, device='cuda:0'))\n",
      "     | > current_lr: 4.9999999999999996e-06 \n",
      "     | > step_time: 0.7162  (0.8720873509134565)\n",
      "     | > loader_time: 0.1202  (0.13844680786132812)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:10:53 -- STEP: 165/203 -- GLOBAL_STEP: 4225\u001b[0m\n",
      "     | > loss: 0.8002521395683289  (0.8003413297913291)\n",
      "     | > log_mle: 0.11202132701873779  (0.13846225666277343)\n",
      "     | > loss_dur: 0.6882308125495911  (0.6618790731285555)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.5014, device='cuda:0')  (tensor(1.6887, device='cuda:0'))\n",
      "     | > current_lr: 4.9999999999999996e-06 \n",
      "     | > step_time: 0.7777  (0.8991718928019206)\n",
      "     | > loader_time: 0.5964  (0.16190470348704944)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:11:27 -- STEP: 190/203 -- GLOBAL_STEP: 4250\u001b[0m\n",
      "     | > loss: 0.7745595574378967  (0.7980103539793115)\n",
      "     | > log_mle: 0.11667615175247192  (0.13526784495303515)\n",
      "     | > loss_dur: 0.6578834056854248  (0.6627425090262761)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3710, device='cuda:0')  (tensor(1.7292, device='cuda:0'))\n",
      "     | > current_lr: 4.9999999999999996e-06 \n",
      "     | > step_time: 1.0235  (0.9309670987882113)\n",
      "     | > loader_time: 0.1861  (0.170071896753813)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.021355390548706055 \u001b[0m(+0.018162697553634644)\n",
      "     | > avg_loss:\u001b[92m 0.7687069624662399 \u001b[0m(-0.046074919402599335)\n",
      "     | > avg_log_mle:\u001b[92m 0.11641066521406174 \u001b[0m(-0.024092398583889008)\n",
      "     | > avg_loss_dur:\u001b[92m 0.6522962972521782 \u001b[0m(-0.021982520818710327)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_4263.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 21/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 18:12:04) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:12:17 -- STEP: 12/203 -- GLOBAL_STEP: 4275\u001b[0m\n",
      "     | > loss: 0.7931566834449768  (0.7816838224728903)\n",
      "     | > log_mle: 0.14359277486801147  (0.14573698242505392)\n",
      "     | > loss_dur: 0.6495639085769653  (0.6359468400478363)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.8492, device='cuda:0')  (tensor(1.5634, device='cuda:0'))\n",
      "     | > current_lr: 5.25e-06 \n",
      "     | > step_time: 0.3787  (0.6561604142189026)\n",
      "     | > loader_time: 0.0047  (0.017100056012471516)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:12:35 -- STEP: 37/203 -- GLOBAL_STEP: 4300\u001b[0m\n",
      "     | > loss: 0.7842200994491577  (0.7722983988555702)\n",
      "     | > log_mle: 0.11647313833236694  (0.13892458097354785)\n",
      "     | > loss_dur: 0.6677469611167908  (0.6333738178820223)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.4928, device='cuda:0')  (tensor(1.5705, device='cuda:0'))\n",
      "     | > current_lr: 5.25e-06 \n",
      "     | > step_time: 0.2609  (0.698639186652931)\n",
      "     | > loader_time: 0.0056  (0.016624856639552762)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:13:04 -- STEP: 62/203 -- GLOBAL_STEP: 4325\u001b[0m\n",
      "     | > loss: 0.742938756942749  (0.7648719693383863)\n",
      "     | > log_mle: 0.10952425003051758  (0.13175261885889114)\n",
      "     | > loss_dur: 0.6334145069122314  (0.6331193504794952)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3974, device='cuda:0')  (tensor(1.5911, device='cuda:0'))\n",
      "     | > current_lr: 5.25e-06 \n",
      "     | > step_time: 0.456  (0.8595616932838194)\n",
      "     | > loader_time: 0.0063  (0.021629710351267168)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:13:35 -- STEP: 87/203 -- GLOBAL_STEP: 4350\u001b[0m\n",
      "     | > loss: 0.7527868747711182  (0.7618352407696604)\n",
      "     | > log_mle: 0.11301165819168091  (0.12648606300353996)\n",
      "     | > loss_dur: 0.6397752165794373  (0.6353491777661203)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3451, device='cuda:0')  (tensor(1.5785, device='cuda:0'))\n",
      "     | > current_lr: 5.25e-06 \n",
      "     | > step_time: 2.5903  (0.917841911315918)\n",
      "     | > loader_time: 0.1121  (0.06431654952038293)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:14:03 -- STEP: 112/203 -- GLOBAL_STEP: 4375\u001b[0m\n",
      "     | > loss: 0.7444453239440918  (0.7581099112118993)\n",
      "     | > log_mle: 0.10701459646224976  (0.12197424631033618)\n",
      "     | > loss_dur: 0.637430727481842  (0.6361356649015633)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.6216, device='cuda:0')  (tensor(1.5891, device='cuda:0'))\n",
      "     | > current_lr: 5.25e-06 \n",
      "     | > step_time: 0.8834  (0.9220893489462989)\n",
      "     | > loader_time: 0.1247  (0.0958276242017746)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:14:32 -- STEP: 137/203 -- GLOBAL_STEP: 4400\u001b[0m\n",
      "     | > loss: 0.746954083442688  (0.7555662950460058)\n",
      "     | > log_mle: 0.10340350866317749  (0.11805192136416465)\n",
      "     | > loss_dur: 0.6435505747795105  (0.6375143736818412)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.4728, device='cuda:0')  (tensor(1.6102, device='cuda:0'))\n",
      "     | > current_lr: 5.25e-06 \n",
      "     | > step_time: 1.0949  (0.92439082591203)\n",
      "     | > loader_time: 0.1148  (0.11243003824331464)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:15:06 -- STEP: 162/203 -- GLOBAL_STEP: 4425\u001b[0m\n",
      "     | > loss: 0.7341980338096619  (0.7532257478178284)\n",
      "     | > log_mle: 0.09870702028274536  (0.11492366592089331)\n",
      "     | > loss_dur: 0.6354910135269165  (0.6383020818969352)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3282, device='cuda:0')  (tensor(1.5909, device='cuda:0'))\n",
      "     | > current_lr: 5.25e-06 \n",
      "     | > step_time: 1.1015  (0.9475905115221753)\n",
      "     | > loader_time: 0.1661  (0.14108435312906908)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:15:42 -- STEP: 187/203 -- GLOBAL_STEP: 4450\u001b[0m\n",
      "     | > loss: 0.7295986413955688  (0.7511776421159346)\n",
      "     | > log_mle: 0.085726797580719  (0.11174502060375108)\n",
      "     | > loss_dur: 0.6438718438148499  (0.6394326215121839)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.8528, device='cuda:0')  (tensor(1.6161, device='cuda:0'))\n",
      "     | > current_lr: 5.25e-06 \n",
      "     | > step_time: 0.9699  (0.9808426834045247)\n",
      "     | > loader_time: 0.2001  (0.15107799468830946)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.04494151473045349 \u001b[0m(+0.023586124181747437)\n",
      "     | > avg_loss:\u001b[92m 0.7262382134795189 \u001b[0m(-0.04246874898672104)\n",
      "     | > avg_log_mle:\u001b[92m 0.09252315014600754 \u001b[0m(-0.0238875150680542)\n",
      "     | > avg_loss_dur:\u001b[92m 0.6337150633335114 \u001b[0m(-0.01858123391866684)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_4466.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 22/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 18:16:21) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:16:29 -- STEP: 9/203 -- GLOBAL_STEP: 4475\u001b[0m\n",
      "     | > loss: 0.7225934863090515  (0.7383821805318197)\n",
      "     | > log_mle: 0.1287539005279541  (0.12217613723542955)\n",
      "     | > loss_dur: 0.5938395857810974  (0.6162060432963901)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.5185, device='cuda:0')  (tensor(1.3854, device='cuda:0'))\n",
      "     | > current_lr: 5.5e-06 \n",
      "     | > step_time: 0.3943  (0.5776757929060194)\n",
      "     | > loader_time: 0.0051  (0.010461886723836264)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:16:45 -- STEP: 34/203 -- GLOBAL_STEP: 4500\u001b[0m\n",
      "     | > loss: 0.7363888025283813  (0.7290925085544586)\n",
      "     | > log_mle: 0.1061941385269165  (0.11718170783099006)\n",
      "     | > loss_dur: 0.6301946640014648  (0.6119108007234685)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3295, device='cuda:0')  (tensor(1.4064, device='cuda:0'))\n",
      "     | > current_lr: 5.5e-06 \n",
      "     | > step_time: 0.6036  (0.5949579196817735)\n",
      "     | > loader_time: 0.0087  (0.013384678784538718)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:17:05 -- STEP: 59/203 -- GLOBAL_STEP: 4525\u001b[0m\n",
      "     | > loss: 0.7048741579055786  (0.7219588110002421)\n",
      "     | > log_mle: 0.09033447504043579  (0.1096244626126047)\n",
      "     | > loss_dur: 0.6145396828651428  (0.6123343483876373)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.2750, device='cuda:0')  (tensor(1.4530, device='cuda:0'))\n",
      "     | > current_lr: 5.5e-06 \n",
      "     | > step_time: 0.8797  (0.6803225945618194)\n",
      "     | > loader_time: 0.0084  (0.015352839130466263)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:17:33 -- STEP: 84/203 -- GLOBAL_STEP: 4550\u001b[0m\n",
      "     | > loss: 0.7152262926101685  (0.7187214976265317)\n",
      "     | > log_mle: 0.09855043888092041  (0.10426910860197884)\n",
      "     | > loss_dur: 0.616675853729248  (0.614452389024553)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.2103, device='cuda:0')  (tensor(1.4644, device='cuda:0'))\n",
      "     | > current_lr: 5.5e-06 \n",
      "     | > step_time: 0.5985  (0.7687479058901472)\n",
      "     | > loader_time: 0.1705  (0.0475994007928031)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:18:03 -- STEP: 109/203 -- GLOBAL_STEP: 4575\u001b[0m\n",
      "     | > loss: 0.693253219127655  (0.715609137071382)\n",
      "     | > log_mle: 0.08613818883895874  (0.09996896654094031)\n",
      "     | > loss_dur: 0.6071150302886963  (0.6156401705304418)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3888, device='cuda:0')  (tensor(1.4640, device='cuda:0'))\n",
      "     | > current_lr: 5.5e-06 \n",
      "     | > step_time: 0.9991  (0.7983617979452153)\n",
      "     | > loader_time: 0.8105  (0.10326256227055824)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:18:31 -- STEP: 134/203 -- GLOBAL_STEP: 4600\u001b[0m\n",
      "     | > loss: 0.7041450142860413  (0.7131122066013849)\n",
      "     | > log_mle: 0.07534998655319214  (0.09613426318809168)\n",
      "     | > loss_dur: 0.6287950277328491  (0.6169779434132933)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.5503, device='cuda:0')  (tensor(1.4593, device='cuda:0'))\n",
      "     | > current_lr: 5.5e-06 \n",
      "     | > step_time: 1.0021  (0.8196880461564706)\n",
      "     | > loader_time: 0.1171  (0.12344269610162995)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:19:03 -- STEP: 159/203 -- GLOBAL_STEP: 4625\u001b[0m\n",
      "     | > loss: 0.6903306245803833  (0.7109407737569989)\n",
      "     | > log_mle: 0.07447147369384766  (0.0929806427385822)\n",
      "     | > loss_dur: 0.6158591508865356  (0.6179601310184167)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.1449, device='cuda:0')  (tensor(1.4617, device='cuda:0'))\n",
      "     | > current_lr: 5.5e-06 \n",
      "     | > step_time: 0.8305  (0.8576546495065752)\n",
      "     | > loader_time: 0.2171  (0.13572867861333884)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:19:39 -- STEP: 184/203 -- GLOBAL_STEP: 4650\u001b[0m\n",
      "     | > loss: 0.6897842288017273  (0.7091201407753908)\n",
      "     | > log_mle: 0.06859058141708374  (0.09001072496175766)\n",
      "     | > loss_dur: 0.6211936473846436  (0.6191094158136324)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.9273, device='cuda:0')  (tensor(1.4467, device='cuda:0'))\n",
      "     | > current_lr: 5.5e-06 \n",
      "     | > step_time: 1.1021  (0.8958755666794987)\n",
      "     | > loader_time: 0.115  (0.15642836301223095)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.010160565376281738 \u001b[0m(-0.03478094935417175)\n",
      "     | > avg_loss:\u001b[92m 0.6894413754343987 \u001b[0m(-0.03679683804512024)\n",
      "     | > avg_log_mle:\u001b[92m 0.07091696560382843 \u001b[0m(-0.021606184542179108)\n",
      "     | > avg_loss_dur:\u001b[92m 0.6185244098305702 \u001b[0m(-0.015190653502941132)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_4669.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 23/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 18:20:22) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:20:29 -- STEP: 6/203 -- GLOBAL_STEP: 4675\u001b[0m\n",
      "     | > loss: 0.6653659343719482  (0.698774665594101)\n",
      "     | > log_mle: 0.0927499532699585  (0.1021890640258789)\n",
      "     | > loss_dur: 0.5726159811019897  (0.596585601568222)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.2539, device='cuda:0')  (tensor(1.3408, device='cuda:0'))\n",
      "     | > current_lr: 5.75e-06 \n",
      "     | > step_time: 0.5883  (0.86170494556427)\n",
      "     | > loader_time: 0.0049  (0.03017258644104004)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:20:49 -- STEP: 31/203 -- GLOBAL_STEP: 4700\u001b[0m\n",
      "     | > loss: 0.6737747192382812  (0.689587858415419)\n",
      "     | > log_mle: 0.09468048810958862  (0.09695730286259804)\n",
      "     | > loss_dur: 0.5790942311286926  (0.5926305555528212)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.2164, device='cuda:0')  (tensor(1.2970, device='cuda:0'))\n",
      "     | > current_lr: 5.75e-06 \n",
      "     | > step_time: 0.5263  (0.7959292165694698)\n",
      "     | > loader_time: 0.0591  (0.01631575246011057)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:21:12 -- STEP: 56/203 -- GLOBAL_STEP: 4725\u001b[0m\n",
      "     | > loss: 0.6756494641304016  (0.6827796697616577)\n",
      "     | > log_mle: 0.07221466302871704  (0.08933901467493602)\n",
      "     | > loss_dur: 0.6034348011016846  (0.593440655086722)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.7800, device='cuda:0')  (tensor(1.3087, device='cuda:0'))\n",
      "     | > current_lr: 5.75e-06 \n",
      "     | > step_time: 1.2074  (0.8363165259361267)\n",
      "     | > loader_time: 0.0845  (0.020001121929713657)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:21:47 -- STEP: 81/203 -- GLOBAL_STEP: 4750\u001b[0m\n",
      "     | > loss: 0.6811428070068359  (0.679816170975014)\n",
      "     | > log_mle: 0.05750662088394165  (0.08378985266626615)\n",
      "     | > loss_dur: 0.6236361861228943  (0.5960263183087482)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.6291, device='cuda:0')  (tensor(1.3436, device='cuda:0'))\n",
      "     | > current_lr: 5.75e-06 \n",
      "     | > step_time: 1.4861  (0.8847744406005482)\n",
      "     | > loader_time: 1.3253  (0.13193026884102527)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:22:22 -- STEP: 106/203 -- GLOBAL_STEP: 4775\u001b[0m\n",
      "     | > loss: 0.6638556718826294  (0.6772858809750035)\n",
      "     | > log_mle: 0.05816525220870972  (0.07950457649410894)\n",
      "     | > loss_dur: 0.6056904196739197  (0.5977813044808951)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.5246, device='cuda:0')  (tensor(1.3729, device='cuda:0'))\n",
      "     | > current_lr: 5.75e-06 \n",
      "     | > step_time: 1.6012  (0.917159136736168)\n",
      "     | > loader_time: 0.2901  (0.18735597268590387)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:22:57 -- STEP: 131/203 -- GLOBAL_STEP: 4800\u001b[0m\n",
      "     | > loss: 0.6622788906097412  (0.6746020544576281)\n",
      "     | > log_mle: 0.06297218799591064  (0.07569778238544025)\n",
      "     | > loss_dur: 0.5993067026138306  (0.5989042720721882)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.2025, device='cuda:0')  (tensor(1.4046, device='cuda:0'))\n",
      "     | > current_lr: 5.75e-06 \n",
      "     | > step_time: 0.8407  (0.9569270046612689)\n",
      "     | > loader_time: 0.1808  (0.20296578734885645)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:23:28 -- STEP: 156/203 -- GLOBAL_STEP: 4825\u001b[0m\n",
      "     | > loss: 0.6747652888298035  (0.6728128848167568)\n",
      "     | > log_mle: 0.05777221918106079  (0.07265982948816735)\n",
      "     | > loss_dur: 0.6169930696487427  (0.6001530553285893)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.7528, device='cuda:0')  (tensor(1.3953, device='cuda:0'))\n",
      "     | > current_lr: 5.75e-06 \n",
      "     | > step_time: 0.7664  (0.9663840364187192)\n",
      "     | > loader_time: 0.0117  (0.2068431835908156)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:23:59 -- STEP: 181/203 -- GLOBAL_STEP: 4850\u001b[0m\n",
      "     | > loss: 0.6482990384101868  (0.6711969981536026)\n",
      "     | > log_mle: 0.04328429698944092  (0.06977147884790408)\n",
      "     | > loss_dur: 0.6050147414207458  (0.601425519305698)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.1848, device='cuda:0')  (tensor(1.3997, device='cuda:0'))\n",
      "     | > current_lr: 5.75e-06 \n",
      "     | > step_time: 0.9937  (0.9800062403494482)\n",
      "     | > loader_time: 0.1886  (0.20340271285884287)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.0030634403228759766 \u001b[0m(-0.007097125053405762)\n",
      "     | > avg_loss:\u001b[92m 0.656677708029747 \u001b[0m(-0.03276366740465164)\n",
      "     | > avg_log_mle:\u001b[92m 0.0516873300075531 \u001b[0m(-0.01922963559627533)\n",
      "     | > avg_loss_dur:\u001b[92m 0.6049903780221939 \u001b[0m(-0.013534031808376312)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_4872.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 24/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 18:24:49) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:24:56 -- STEP: 3/203 -- GLOBAL_STEP: 4875\u001b[0m\n",
      "     | > loss: 0.6549938321113586  (0.674060583114624)\n",
      "     | > log_mle: 0.08593928813934326  (0.087778906027476)\n",
      "     | > loss_dur: 0.5690545439720154  (0.5862816770871481)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(0.9425, device='cuda:0')  (tensor(1.1354, device='cuda:0'))\n",
      "     | > current_lr: 6e-06 \n",
      "     | > step_time: 0.4975  (0.9304990768432617)\n",
      "     | > loader_time: 0.004  (0.03175743420918783)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:25:13 -- STEP: 28/203 -- GLOBAL_STEP: 4900\u001b[0m\n",
      "     | > loss: 0.6449980139732361  (0.6536344417503902)\n",
      "     | > log_mle: 0.07194316387176514  (0.07827376467841014)\n",
      "     | > loss_dur: 0.573054850101471  (0.57536067707198)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3441, device='cuda:0')  (tensor(1.2354, device='cuda:0'))\n",
      "     | > current_lr: 6e-06 \n",
      "     | > step_time: 0.7917  (0.6950400471687317)\n",
      "     | > loader_time: 0.0071  (0.017091572284698483)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:25:34 -- STEP: 53/203 -- GLOBAL_STEP: 4925\u001b[0m\n",
      "     | > loss: 0.6293375492095947  (0.6482538839556137)\n",
      "     | > log_mle: 0.06478619575500488  (0.0712128191624048)\n",
      "     | > loss_dur: 0.5645513534545898  (0.577041064793209)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.2695, device='cuda:0')  (tensor(1.3659, device='cuda:0'))\n",
      "     | > current_lr: 6e-06 \n",
      "     | > step_time: 1.0018  (0.7486428899585077)\n",
      "     | > loader_time: 0.0784  (0.01734903173626594)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:25:57 -- STEP: 78/203 -- GLOBAL_STEP: 4950\u001b[0m\n",
      "     | > loss: 0.6391908526420593  (0.6448952846038037)\n",
      "     | > log_mle: 0.050506591796875  (0.06537824104993772)\n",
      "     | > loss_dur: 0.5886842608451843  (0.579517043553866)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.0162, device='cuda:0')  (tensor(1.3802, device='cuda:0'))\n",
      "     | > current_lr: 6e-06 \n",
      "     | > step_time: 0.4309  (0.7935159359222806)\n",
      "     | > loader_time: 0.1179  (0.027795253655849363)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:26:34 -- STEP: 103/203 -- GLOBAL_STEP: 4975\u001b[0m\n",
      "     | > loss: 0.6370440125465393  (0.6431112590345367)\n",
      "     | > log_mle: 0.05050337314605713  (0.061115619048331585)\n",
      "     | > loss_dur: 0.5865406394004822  (0.5819956399862046)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.2076, device='cuda:0')  (tensor(1.3478, device='cuda:0'))\n",
      "     | > current_lr: 6e-06 \n",
      "     | > step_time: 1.7027  (0.8791200698000716)\n",
      "     | > loader_time: 0.7968  (0.09830106577826933)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:27:10 -- STEP: 128/203 -- GLOBAL_STEP: 5000\u001b[0m\n",
      "     | > loss: 0.6207293272018433  (0.6405599364079537)\n",
      "     | > log_mle: 0.039542555809020996  (0.05720273032784465)\n",
      "     | > loss_dur: 0.5811867713928223  (0.5833572060801088)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3605, device='cuda:0')  (tensor(1.3564, device='cuda:0'))\n",
      "     | > current_lr: 6e-06 \n",
      "     | > step_time: 1.4323  (0.9268891997635367)\n",
      "     | > loader_time: 0.1609  (0.14038945734500885)\n",
      "\n",
      "\n",
      " > CHECKPOINT : ./models/train/run-June-12-2024_04+39PM-2c81749/checkpoint_5000.pth\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:27:50 -- STEP: 153/203 -- GLOBAL_STEP: 5025\u001b[0m\n",
      "     | > loss: 0.6392521858215332  (0.6388005213020675)\n",
      "     | > log_mle: 0.03982746601104736  (0.05409499827553246)\n",
      "     | > loss_dur: 0.5994247198104858  (0.5847055230265349)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.2586, device='cuda:0')  (tensor(1.3550, device='cuda:0'))\n",
      "     | > current_lr: 6e-06 \n",
      "     | > step_time: 1.5876  (0.9565968840729958)\n",
      "     | > loader_time: 0.289  (0.14770560015260778)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:28:21 -- STEP: 178/203 -- GLOBAL_STEP: 5050\u001b[0m\n",
      "     | > loss: 0.6212095618247986  (0.6375166971362038)\n",
      "     | > log_mle: 0.03511321544647217  (0.0513175190164802)\n",
      "     | > loss_dur: 0.5860963463783264  (0.5861991781197237)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.1653, device='cuda:0')  (tensor(1.3280, device='cuda:0'))\n",
      "     | > current_lr: 6e-06 \n",
      "     | > step_time: 2.001  (0.9688555363858685)\n",
      "     | > loader_time: 0.5735  (0.15833656975392546)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.036106258630752563 \u001b[0m(+0.03304281830787659)\n",
      "     | > avg_loss:\u001b[92m 0.6292060390114784 \u001b[0m(-0.027471669018268585)\n",
      "     | > avg_log_mle:\u001b[92m 0.033779971301555634 \u001b[0m(-0.017907358705997467)\n",
      "     | > avg_loss_dur:\u001b[92m 0.5954260677099228 \u001b[0m(-0.009564310312271118)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_5075.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 25/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 18:29:14) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:29:17 -- STEP: 0/203 -- GLOBAL_STEP: 5075\u001b[0m\n",
      "     | > loss: 0.6679320335388184  (0.6679320335388184)\n",
      "     | > log_mle: 0.07036316394805908  (0.07036316394805908)\n",
      "     | > loss_dur: 0.5975688695907593  (0.5975688695907593)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.1842, device='cuda:0')  (tensor(1.1842, device='cuda:0'))\n",
      "     | > current_lr: 6.2499999999999995e-06 \n",
      "     | > step_time: 1.0078  (1.007826566696167)\n",
      "     | > loader_time: 1.8909  (1.8908727169036865)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:29:33 -- STEP: 25/203 -- GLOBAL_STEP: 5100\u001b[0m\n",
      "     | > loss: 0.6267611384391785  (0.6236770915985107)\n",
      "     | > log_mle: 0.03726518154144287  (0.06056777715682984)\n",
      "     | > loss_dur: 0.5894959568977356  (0.563109314441681)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.8999, device='cuda:0')  (tensor(1.1287, device='cuda:0'))\n",
      "     | > current_lr: 6.2499999999999995e-06 \n",
      "     | > step_time: 0.6129  (0.6272877025604249)\n",
      "     | > loader_time: 0.0074  (0.007603931427001953)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:29:57 -- STEP: 50/203 -- GLOBAL_STEP: 5125\u001b[0m\n",
      "     | > loss: 0.6104161143302917  (0.6177598750591277)\n",
      "     | > log_mle: 0.04490852355957031  (0.05354451656341553)\n",
      "     | > loss_dur: 0.5655075907707214  (0.5642153584957121)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.0566, device='cuda:0')  (tensor(1.1883, device='cuda:0'))\n",
      "     | > current_lr: 6.2499999999999995e-06 \n",
      "     | > step_time: 1.5122  (0.7827504348754881)\n",
      "     | > loader_time: 0.0084  (0.00934939384460449)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:30:24 -- STEP: 75/203 -- GLOBAL_STEP: 5150\u001b[0m\n",
      "     | > loss: 0.6046691536903381  (0.6137684051195779)\n",
      "     | > log_mle: 0.03865468502044678  (0.04752485831578573)\n",
      "     | > loss_dur: 0.5660144686698914  (0.5662435468037924)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.1614, device='cuda:0')  (tensor(1.2327, device='cuda:0'))\n",
      "     | > current_lr: 6.2499999999999995e-06 \n",
      "     | > step_time: 1.0694  (0.8419656880696613)\n",
      "     | > loader_time: 1.5121  (0.03851644515991211)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:31:02 -- STEP: 100/203 -- GLOBAL_STEP: 5175\u001b[0m\n",
      "     | > loss: 0.5948977470397949  (0.611956224441528)\n",
      "     | > log_mle: 0.03243708610534668  (0.04336167812347412)\n",
      "     | > loss_dur: 0.5624606609344482  (0.5685945463180545)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.0583, device='cuda:0')  (tensor(1.2474, device='cuda:0'))\n",
      "     | > current_lr: 6.2499999999999995e-06 \n",
      "     | > step_time: 0.6075  (0.9351079607009888)\n",
      "     | > loader_time: 0.0085  (0.10561257123947143)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:31:42 -- STEP: 125/203 -- GLOBAL_STEP: 5200\u001b[0m\n",
      "     | > loss: 0.6227700114250183  (0.6097172708511349)\n",
      "     | > log_mle: 0.018122851848602295  (0.039723301887512205)\n",
      "     | > loss_dur: 0.604647159576416  (0.5699939689636233)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.5065, device='cuda:0')  (tensor(1.2687, device='cuda:0'))\n",
      "     | > current_lr: 6.2499999999999995e-06 \n",
      "     | > step_time: 1.817  (1.0044446334838868)\n",
      "     | > loader_time: 0.1912  (0.14906516838073736)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:32:16 -- STEP: 150/203 -- GLOBAL_STEP: 5225\u001b[0m\n",
      "     | > loss: 0.6157681941986084  (0.6078517369429269)\n",
      "     | > log_mle: 0.021415352821350098  (0.03669922312100729)\n",
      "     | > loss_dur: 0.5943528413772583  (0.5711525138219199)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.2299, device='cuda:0')  (tensor(1.3340, device='cuda:0'))\n",
      "     | > current_lr: 6.2499999999999995e-06 \n",
      "     | > step_time: 1.0658  (1.0211956373850506)\n",
      "     | > loader_time: 0.2764  (0.1684237941106161)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:32:48 -- STEP: 175/203 -- GLOBAL_STEP: 5250\u001b[0m\n",
      "     | > loss: 0.5854130983352661  (0.6066526831899369)\n",
      "     | > log_mle: 0.016727447509765625  (0.03407419954027448)\n",
      "     | > loss_dur: 0.5686856508255005  (0.5725784836496627)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3102, device='cuda:0')  (tensor(1.3343, device='cuda:0'))\n",
      "     | > current_lr: 6.2499999999999995e-06 \n",
      "     | > step_time: 1.8403  (1.032008612496513)\n",
      "     | > loader_time: 0.0124  (0.1678337669372559)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:33:19 -- STEP: 200/203 -- GLOBAL_STEP: 5275\u001b[0m\n",
      "     | > loss: 0.5854584574699402  (0.6045096653699877)\n",
      "     | > log_mle: 0.009942471981048584  (0.03145378321409225)\n",
      "     | > loss_dur: 0.5755159854888916  (0.5730558821558954)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(3.2871, device='cuda:0')  (tensor(1.4193, device='cuda:0'))\n",
      "     | > current_lr: 6.2499999999999995e-06 \n",
      "     | > step_time: 0.6257  (1.0328601002693185)\n",
      "     | > loader_time: 0.0135  (0.16910396695137028)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.07506230473518372 \u001b[0m(+0.03895604610443115)\n",
      "     | > avg_loss:\u001b[92m 0.5988088175654411 \u001b[0m(-0.030397221446037292)\n",
      "     | > avg_log_mle:\u001b[92m 0.016362465918064117 \u001b[0m(-0.017417505383491516)\n",
      "     | > avg_loss_dur:\u001b[92m 0.582446351647377 \u001b[0m(-0.012979716062545776)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_5278.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 26/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 18:33:49) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:34:07 -- STEP: 22/203 -- GLOBAL_STEP: 5300\u001b[0m\n",
      "     | > loss: 0.5846452713012695  (0.5937274884093892)\n",
      "     | > log_mle: 0.042990148067474365  (0.045505266297947274)\n",
      "     | > loss_dur: 0.5416551232337952  (0.5482222221114419)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(0.9411, device='cuda:0')  (tensor(1.0658, device='cuda:0'))\n",
      "     | > current_lr: 6.5e-06 \n",
      "     | > step_time: 0.5807  (0.5812027562748302)\n",
      "     | > loader_time: 0.006  (0.005359107797796076)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:34:29 -- STEP: 47/203 -- GLOBAL_STEP: 5325\u001b[0m\n",
      "     | > loss: 0.5697319507598877  (0.589073914162656)\n",
      "     | > log_mle: 0.02573549747467041  (0.037541187824086934)\n",
      "     | > loss_dur: 0.5439964532852173  (0.5515327263385693)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.0615, device='cuda:0')  (tensor(1.1993, device='cuda:0'))\n",
      "     | > current_lr: 6.5e-06 \n",
      "     | > step_time: 0.3068  (0.7395815595667413)\n",
      "     | > loader_time: 0.0071  (0.017255803371997586)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:34:54 -- STEP: 72/203 -- GLOBAL_STEP: 5350\u001b[0m\n",
      "     | > loss: 0.5711784958839417  (0.585410212477048)\n",
      "     | > log_mle: 0.023118674755096436  (0.031614041990703994)\n",
      "     | > loss_dur: 0.5480598211288452  (0.5537961704863446)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(0.9697, device='cuda:0')  (tensor(1.1527, device='cuda:0'))\n",
      "     | > current_lr: 6.5e-06 \n",
      "     | > step_time: 0.5034  (0.7892444233099619)\n",
      "     | > loader_time: 0.008  (0.05028928981886968)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:35:35 -- STEP: 97/203 -- GLOBAL_STEP: 5375\u001b[0m\n",
      "     | > loss: 0.5686327815055847  (0.583500636607101)\n",
      "     | > log_mle: 0.020527958869934082  (0.027453134354856815)\n",
      "     | > loss_dur: 0.5481048226356506  (0.5560475022522446)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3059, device='cuda:0')  (tensor(1.1958, device='cuda:0'))\n",
      "     | > current_lr: 6.5e-06 \n",
      "     | > step_time: 1.5912  (0.8790500803092092)\n",
      "     | > loader_time: 0.5171  (0.15504973696679183)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:36:20 -- STEP: 122/203 -- GLOBAL_STEP: 5400\u001b[0m\n",
      "     | > loss: 0.563284158706665  (0.580779580796351)\n",
      "     | > log_mle: 0.011117100715637207  (0.023777977364962212)\n",
      "     | > loss_dur: 0.5521670579910278  (0.5570016034313894)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(0.8209, device='cuda:0')  (tensor(1.2632, device='cuda:0'))\n",
      "     | > current_lr: 6.5e-06 \n",
      "     | > step_time: 0.5994  (0.9748064377268807)\n",
      "     | > loader_time: 0.2016  (0.2133839931644377)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:37:09 -- STEP: 147/203 -- GLOBAL_STEP: 5425\u001b[0m\n",
      "     | > loss: 0.5621260404586792  (0.5791787115895016)\n",
      "     | > log_mle: 0.002847015857696533  (0.020795343684501384)\n",
      "     | > loss_dur: 0.5592790246009827  (0.5583833679050006)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.4807, device='cuda:0')  (tensor(1.2954, device='cuda:0'))\n",
      "     | > current_lr: 6.5e-06 \n",
      "     | > step_time: 1.1285  (1.0601890898075232)\n",
      "     | > loader_time: 0.7761  (0.2603176483491651)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:37:43 -- STEP: 172/203 -- GLOBAL_STEP: 5450\u001b[0m\n",
      "     | > loss: 0.5657651424407959  (0.5776351461576859)\n",
      "     | > log_mle: -0.001104593276977539  (0.018215262612631157)\n",
      "     | > loss_dur: 0.5668697357177734  (0.5594198835450549)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.4423, device='cuda:0')  (tensor(1.2791, device='cuda:0'))\n",
      "     | > current_lr: 6.5e-06 \n",
      "     | > step_time: 1.4817  (1.0714433248652966)\n",
      "     | > loader_time: 0.1167  (0.25545829534530645)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:38:22 -- STEP: 197/203 -- GLOBAL_STEP: 5475\u001b[0m\n",
      "     | > loss: 0.560706615447998  (0.575787768146108)\n",
      "     | > log_mle: -0.0044384002685546875  (0.015725003281220562)\n",
      "     | > loss_dur: 0.5651450157165527  (0.5600627648648878)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.5448, device='cuda:0')  (tensor(1.2605, device='cuda:0'))\n",
      "     | > current_lr: 6.5e-06 \n",
      "     | > step_time: 0.6356  (1.0958317657412604)\n",
      "     | > loader_time: 0.1952  (0.2571233395997645)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.1473155915737152 \u001b[0m(+0.0722532868385315)\n",
      "     | > avg_loss:\u001b[92m 0.5730816349387169 \u001b[0m(-0.025727182626724243)\n",
      "     | > avg_log_mle:\u001b[92m 0.0008618608117103577 \u001b[0m(-0.01550060510635376)\n",
      "     | > avg_loss_dur:\u001b[92m 0.5722197741270065 \u001b[0m(-0.010226577520370483)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_5481.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 27/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 18:38:52) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:39:09 -- STEP: 19/203 -- GLOBAL_STEP: 5500\u001b[0m\n",
      "     | > loss: 0.5564190745353699  (0.5636626638864217)\n",
      "     | > log_mle: 0.024614274501800537  (0.02951963638004504)\n",
      "     | > loss_dur: 0.5318048000335693  (0.5341430275063768)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.0726, device='cuda:0')  (tensor(1.0907, device='cuda:0'))\n",
      "     | > current_lr: 6.75e-06 \n",
      "     | > step_time: 0.5973  (0.6561678334286338)\n",
      "     | > loader_time: 0.0053  (0.021093544207121197)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:39:30 -- STEP: 44/203 -- GLOBAL_STEP: 5525\u001b[0m\n",
      "     | > loss: 0.5694788694381714  (0.5600300864739851)\n",
      "     | > log_mle: 0.022151410579681396  (0.022891842506148598)\n",
      "     | > loss_dur: 0.54732745885849  (0.5371382439678367)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3114, device='cuda:0')  (tensor(1.2092, device='cuda:0'))\n",
      "     | > current_lr: 6.75e-06 \n",
      "     | > step_time: 1.3991  (0.7562170191244649)\n",
      "     | > loader_time: 0.0805  (0.02430062402378429)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:39:55 -- STEP: 69/203 -- GLOBAL_STEP: 5550\u001b[0m\n",
      "     | > loss: 0.5508087873458862  (0.5558030510294265)\n",
      "     | > log_mle: -0.00019550323486328125  (0.01661186287368553)\n",
      "     | > loss_dur: 0.5510042905807495  (0.5391911881557412)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.5117, device='cuda:0')  (tensor(1.2304, device='cuda:0'))\n",
      "     | > current_lr: 6.75e-06 \n",
      "     | > step_time: 0.5194  (0.8020506734433384)\n",
      "     | > loader_time: 0.0065  (0.04300478921420332)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:40:29 -- STEP: 94/203 -- GLOBAL_STEP: 5575\u001b[0m\n",
      "     | > loss: 0.5693280100822449  (0.5540220287251982)\n",
      "     | > log_mle: -0.011551141738891602  (0.01225754238189535)\n",
      "     | > loss_dur: 0.5808791518211365  (0.5417644863433027)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.0929, device='cuda:0')  (tensor(1.2087, device='cuda:0'))\n",
      "     | > current_lr: 6.75e-06 \n",
      "     | > step_time: 1.2289  (0.8444228654212141)\n",
      "     | > loader_time: 0.396  (0.13641491088461372)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:41:10 -- STEP: 119/203 -- GLOBAL_STEP: 5600\u001b[0m\n",
      "     | > loss: 0.5425926446914673  (0.5513460801429109)\n",
      "     | > log_mle: -0.012104570865631104  (0.008660291423316762)\n",
      "     | > loss_dur: 0.5546972155570984  (0.5426857887195939)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.0590, device='cuda:0')  (tensor(1.4834, device='cuda:0'))\n",
      "     | > current_lr: 6.75e-06 \n",
      "     | > step_time: 0.7824  (0.8962416588759223)\n",
      "     | > loader_time: 0.1943  (0.22809346383359255)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:41:50 -- STEP: 144/203 -- GLOBAL_STEP: 5625\u001b[0m\n",
      "     | > loss: 0.5299931168556213  (0.5494571117063364)\n",
      "     | > log_mle: -0.00531458854675293  (0.005602247599098418)\n",
      "     | > loss_dur: 0.5353077054023743  (0.5438548641072378)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.9589, device='cuda:0')  (tensor(1.5314, device='cuda:0'))\n",
      "     | > current_lr: 6.75e-06 \n",
      "     | > step_time: 1.8819  (0.9557310011651781)\n",
      "     | > loader_time: 0.8073  (0.24872714446650612)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:42:24 -- STEP: 169/203 -- GLOBAL_STEP: 5650\u001b[0m\n",
      "     | > loss: 0.5429597496986389  (0.5481223703841485)\n",
      "     | > log_mle: -0.01345759630203247  (0.003135244169178799)\n",
      "     | > loss_dur: 0.5564173460006714  (0.5449871262149698)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(0.9431, device='cuda:0')  (tensor(1.5048, device='cuda:0'))\n",
      "     | > current_lr: 6.75e-06 \n",
      "     | > step_time: 0.8996  (0.9792767177672076)\n",
      "     | > loader_time: 0.1767  (0.24556230652261768)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:42:59 -- STEP: 194/203 -- GLOBAL_STEP: 5675\u001b[0m\n",
      "     | > loss: 0.522994875907898  (0.5463184720462124)\n",
      "     | > log_mle: -0.021177709102630615  (0.0006644566034533315)\n",
      "     | > loss_dur: 0.5441725850105286  (0.5456540154427595)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.7652, device='cuda:0')  (tensor(1.5139, device='cuda:0'))\n",
      "     | > current_lr: 6.75e-06 \n",
      "     | > step_time: 1.3657  (1.0031171705304962)\n",
      "     | > loader_time: 0.015  (0.24345314994300762)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.019773662090301514 \u001b[0m(-0.1275419294834137)\n",
      "     | > avg_loss:\u001b[92m 0.5450829118490219 \u001b[0m(-0.027998723089694977)\n",
      "     | > avg_log_mle:\u001b[92m -0.01463966816663742 \u001b[0m(-0.015501528978347778)\n",
      "     | > avg_loss_dur:\u001b[92m 0.5597225800156593 \u001b[0m(-0.012497194111347198)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_5684.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 28/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 18:43:32) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:43:49 -- STEP: 16/203 -- GLOBAL_STEP: 5700\u001b[0m\n",
      "     | > loss: 0.5447678565979004  (0.5337864980101584)\n",
      "     | > log_mle: 0.013743937015533447  (0.015134621411561966)\n",
      "     | > loss_dur: 0.5310239195823669  (0.5186518803238869)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3063, device='cuda:0')  (tensor(1.2841, device='cuda:0'))\n",
      "     | > current_lr: 7e-06 \n",
      "     | > step_time: 0.6004  (0.7784572690725327)\n",
      "     | > loader_time: 0.0073  (0.025875002145767212)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:44:10 -- STEP: 41/203 -- GLOBAL_STEP: 5725\u001b[0m\n",
      "     | > loss: 0.5228431820869446  (0.5302026111905167)\n",
      "     | > log_mle: -0.003129720687866211  (0.009225547313690187)\n",
      "     | > loss_dur: 0.5259729027748108  (0.5209770653305983)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.7130, device='cuda:0')  (tensor(1.4030, device='cuda:0'))\n",
      "     | > current_lr: 7e-06 \n",
      "     | > step_time: 0.8552  (0.7927216436804795)\n",
      "     | > loader_time: 0.0065  (0.03326404966959138)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:44:33 -- STEP: 66/203 -- GLOBAL_STEP: 5750\u001b[0m\n",
      "     | > loss: 0.5264337062835693  (0.525579114754995)\n",
      "     | > log_mle: -0.016764402389526367  (0.002706492489034481)\n",
      "     | > loss_dur: 0.5431981086730957  (0.5228726231690607)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.0574, device='cuda:0')  (tensor(1.3840, device='cuda:0'))\n",
      "     | > current_lr: 7e-06 \n",
      "     | > step_time: 0.4056  (0.8091794974876173)\n",
      "     | > loader_time: 0.0062  (0.057211467714020685)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:45:04 -- STEP: 91/203 -- GLOBAL_STEP: 5775\u001b[0m\n",
      "     | > loss: 0.5138370990753174  (0.5237235007705272)\n",
      "     | > log_mle: -0.01909691095352173  (-0.0015547707840636524)\n",
      "     | > loss_dur: 0.5329340100288391  (0.5252782722095864)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.1489, device='cuda:0')  (tensor(1.3461, device='cuda:0'))\n",
      "     | > current_lr: 7e-06 \n",
      "     | > step_time: 0.4855  (0.8436163100567493)\n",
      "     | > loader_time: 0.007  (0.11827314030993114)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:45:43 -- STEP: 116/203 -- GLOBAL_STEP: 5800\u001b[0m\n",
      "     | > loss: 0.5071593523025513  (0.5216779164199175)\n",
      "     | > log_mle: -0.024826884269714355  (-0.005244328030224504)\n",
      "     | > loss_dur: 0.5319862365722656  (0.526922244963975)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3841, device='cuda:0')  (tensor(1.3682, device='cuda:0'))\n",
      "     | > current_lr: 7e-06 \n",
      "     | > step_time: 1.9148  (0.9148642256342131)\n",
      "     | > loader_time: 0.9085  (0.1755691067925815)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:46:20 -- STEP: 141/203 -- GLOBAL_STEP: 5825\u001b[0m\n",
      "     | > loss: 0.5098460912704468  (0.519984103686421)\n",
      "     | > log_mle: -0.025159060955047607  (-0.008343368980056005)\n",
      "     | > loss_dur: 0.5350051522254944  (0.5283274730892048)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.6044, device='cuda:0')  (tensor(1.3755, device='cuda:0'))\n",
      "     | > current_lr: 7e-06 \n",
      "     | > step_time: 0.6353  (0.9586343460894645)\n",
      "     | > loader_time: 0.1828  (0.20211208965761443)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:46:54 -- STEP: 166/203 -- GLOBAL_STEP: 5850\u001b[0m\n",
      "     | > loss: 0.5167502760887146  (0.5187479042145144)\n",
      "     | > log_mle: -0.02637934684753418  (-0.010711302958339095)\n",
      "     | > loss_dur: 0.5431296229362488  (0.5294592075319179)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.2559, device='cuda:0')  (tensor(1.3586, device='cuda:0'))\n",
      "     | > current_lr: 7e-06 \n",
      "     | > step_time: 1.0067  (0.979406507618456)\n",
      "     | > loader_time: 0.2963  (0.20819639585104333)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:47:30 -- STEP: 191/203 -- GLOBAL_STEP: 5875\u001b[0m\n",
      "     | > loss: 0.5090614557266235  (0.5172136578884425)\n",
      "     | > log_mle: -0.028834640979766846  (-0.01318129101348797)\n",
      "     | > loss_dur: 0.5378960967063904  (0.5303949492139968)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.6824, device='cuda:0')  (tensor(1.4071, device='cuda:0'))\n",
      "     | > current_lr: 7e-06 \n",
      "     | > step_time: 1.7011  (1.0110946375662118)\n",
      "     | > loader_time: 0.1752  (0.20831386331488325)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.07452329993247986 \u001b[0m(+0.054749637842178345)\n",
      "     | > avg_loss:\u001b[92m 0.513682596385479 \u001b[0m(-0.03140031546354294)\n",
      "     | > avg_log_mle:\u001b[92m -0.02910749614238739 \u001b[0m(-0.01446782797574997)\n",
      "     | > avg_loss_dur:\u001b[92m 0.5427900925278664 \u001b[0m(-0.01693248748779297)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_5887.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 29/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 18:48:08) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:48:20 -- STEP: 13/203 -- GLOBAL_STEP: 5900\u001b[0m\n",
      "     | > loss: 0.49155235290527344  (0.5028978036000178)\n",
      "     | > log_mle: -0.002266526222229004  (0.0016844593561612642)\n",
      "     | > loss_dur: 0.49381887912750244  (0.5012133442438566)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.0364, device='cuda:0')  (tensor(1.0924, device='cuda:0'))\n",
      "     | > current_lr: 7.25e-06 \n",
      "     | > step_time: 0.7089  (0.6317055408771222)\n",
      "     | > loader_time: 0.0069  (0.03239681170536922)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:48:42 -- STEP: 38/203 -- GLOBAL_STEP: 5925\u001b[0m\n",
      "     | > loss: 0.5034505128860474  (0.5016493052244186)\n",
      "     | > log_mle: -0.004609942436218262  (-0.0037853435466164037)\n",
      "     | > loss_dur: 0.5080604553222656  (0.505434648771035)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.5511, device='cuda:0')  (tensor(1.2482, device='cuda:0'))\n",
      "     | > current_lr: 7.25e-06 \n",
      "     | > step_time: 0.1955  (0.751018254380477)\n",
      "     | > loader_time: 0.0061  (0.02366824526535837)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:49:15 -- STEP: 63/203 -- GLOBAL_STEP: 5950\u001b[0m\n",
      "     | > loss: 0.48797786235809326  (0.4967249683917514)\n",
      "     | > log_mle: -0.01891767978668213  (-0.010282224132901148)\n",
      "     | > loss_dur: 0.5068955421447754  (0.5070071925246525)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(0.8320, device='cuda:0')  (tensor(1.3358, device='cuda:0'))\n",
      "     | > current_lr: 7.25e-06 \n",
      "     | > step_time: 1.0993  (0.830425360846141)\n",
      "     | > loader_time: 0.0091  (0.17179431234087264)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:49:48 -- STEP: 88/203 -- GLOBAL_STEP: 5975\u001b[0m\n",
      "     | > loss: 0.4976103901863098  (0.4947808300229635)\n",
      "     | > log_mle: -0.03067302703857422  (-0.014766215600750664)\n",
      "     | > loss_dur: 0.528283417224884  (0.5095470456237141)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.2828, device='cuda:0')  (tensor(1.4209, device='cuda:0'))\n",
      "     | > current_lr: 7.25e-06 \n",
      "     | > step_time: 0.7833  (0.8714049756526947)\n",
      "     | > loader_time: 0.072  (0.20962741700085727)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:50:27 -- STEP: 113/203 -- GLOBAL_STEP: 6000\u001b[0m\n",
      "     | > loss: 0.48426496982574463  (0.49257166986971823)\n",
      "     | > log_mle: -0.030981898307800293  (-0.01840218928007953)\n",
      "     | > loss_dur: 0.5152468681335449  (0.5109738591497974)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.2637, device='cuda:0')  (tensor(1.5262, device='cuda:0'))\n",
      "     | > current_lr: 7.25e-06 \n",
      "     | > step_time: 0.524  (0.9243258417180155)\n",
      "     | > loader_time: 0.2864  (0.26201615502349046)\n",
      "\n",
      "\n",
      " > CHECKPOINT : ./models/train/run-June-12-2024_04+39PM-2c81749/checkpoint_6000.pth\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:51:03 -- STEP: 138/203 -- GLOBAL_STEP: 6025\u001b[0m\n",
      "     | > loss: 0.47348999977111816  (0.4906400759582934)\n",
      "     | > log_mle: -0.03574025630950928  (-0.021606187025705974)\n",
      "     | > loss_dur: 0.5092302560806274  (0.512246262983999)\n",
      "     | > amp_scaler: 131072.0  (72659.47826086957)\n",
      "     | > grad_norm: tensor(1.9247, device='cuda:0')  (tensor(1.5949, device='cuda:0'))\n",
      "     | > current_lr: 7.25e-06 \n",
      "     | > step_time: 1.1127  (0.9187244194141333)\n",
      "     | > loader_time: 0.1161  (0.25373691925104125)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:51:37 -- STEP: 163/203 -- GLOBAL_STEP: 6050\u001b[0m\n",
      "     | > loss: 0.4790976643562317  (0.4890838099037943)\n",
      "     | > log_mle: -0.03732717037200928  (-0.02392888581094567)\n",
      "     | > loss_dur: 0.516424834728241  (0.5130126957147395)\n",
      "     | > amp_scaler: 131072.0  (81618.45398773006)\n",
      "     | > grad_norm: tensor(1.2280, device='cuda:0')  (tensor(1.5610, device='cuda:0'))\n",
      "     | > current_lr: 7.25e-06 \n",
      "     | > step_time: 0.8786  (0.9508487227504239)\n",
      "     | > loader_time: 0.0142  (0.2468432473258739)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:52:13 -- STEP: 188/203 -- GLOBAL_STEP: 6075\u001b[0m\n",
      "     | > loss: 0.48533308506011963  (0.4874188961817863)\n",
      "     | > log_mle: -0.03946787118911743  (-0.026363434309655053)\n",
      "     | > loss_dur: 0.5248009562492371  (0.5137823304914407)\n",
      "     | > amp_scaler: 131072.0  (88194.72340425529)\n",
      "     | > grad_norm: tensor(1.3127, device='cuda:0')  (tensor(1.5796, device='cuda:0'))\n",
      "     | > current_lr: 7.25e-06 \n",
      "     | > step_time: 1.4202  (0.9806403454313887)\n",
      "     | > loader_time: 0.2775  (0.24712439800830602)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.04263731837272644 \u001b[0m(-0.03188598155975342)\n",
      "     | > avg_loss:\u001b[92m 0.47816458716988564 \u001b[0m(-0.03551800921559334)\n",
      "     | > avg_log_mle:\u001b[92m -0.0419943705201149 \u001b[0m(-0.012886874377727509)\n",
      "     | > avg_loss_dur:\u001b[92m 0.5201589576900005 \u001b[0m(-0.02263113483786583)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_6090.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 30/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 18:52:54) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:53:04 -- STEP: 10/203 -- GLOBAL_STEP: 6100\u001b[0m\n",
      "     | > loss: 0.46252205967903137  (0.4694185435771942)\n",
      "     | > log_mle: -0.006505787372589111  (-0.010896188020706177)\n",
      "     | > loss_dur: 0.4690278470516205  (0.4803147315979004)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.5744, device='cuda:0')  (tensor(1.2554, device='cuda:0'))\n",
      "     | > current_lr: 7.499999999999999e-06 \n",
      "     | > step_time: 0.5997  (0.5641072750091553)\n",
      "     | > loader_time: 0.0045  (0.009718227386474609)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:53:23 -- STEP: 35/203 -- GLOBAL_STEP: 6125\u001b[0m\n",
      "     | > loss: 0.46497201919555664  (0.47072871838297164)\n",
      "     | > log_mle: -0.02847808599472046  (-0.01569305317742484)\n",
      "     | > loss_dur: 0.4934501051902771  (0.48642177156039645)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.0181, device='cuda:0')  (tensor(1.3258, device='cuda:0'))\n",
      "     | > current_lr: 7.499999999999999e-06 \n",
      "     | > step_time: 0.6444  (0.6894667216709682)\n",
      "     | > loader_time: 0.0108  (0.008661011287144253)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:53:45 -- STEP: 60/203 -- GLOBAL_STEP: 6150\u001b[0m\n",
      "     | > loss: 0.46849703788757324  (0.4671286294857661)\n",
      "     | > log_mle: -0.035556674003601074  (-0.022200859586397805)\n",
      "     | > loss_dur: 0.5040537118911743  (0.4893294890721639)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.6176, device='cuda:0')  (tensor(1.3601, device='cuda:0'))\n",
      "     | > current_lr: 7.499999999999999e-06 \n",
      "     | > step_time: 0.4116  (0.7689695318539937)\n",
      "     | > loader_time: 0.0072  (0.010457813739776611)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:54:13 -- STEP: 85/203 -- GLOBAL_STEP: 6175\u001b[0m\n",
      "     | > loss: 0.4517478346824646  (0.46445437213953805)\n",
      "     | > log_mle: -0.04079389572143555  (-0.026740860237794765)\n",
      "     | > loss_dur: 0.49254173040390015  (0.4911952323773328)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.3553, device='cuda:0')  (tensor(1.3405, device='cuda:0'))\n",
      "     | > current_lr: 7.499999999999999e-06 \n",
      "     | > step_time: 0.8226  (0.8188874693477854)\n",
      "     | > loader_time: 0.285  (0.05331755245433134)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:54:49 -- STEP: 110/203 -- GLOBAL_STEP: 6200\u001b[0m\n",
      "     | > loss: 0.4576922655105591  (0.4623338347131556)\n",
      "     | > log_mle: -0.04897606372833252  (-0.030339104479009456)\n",
      "     | > loss_dur: 0.5066683292388916  (0.49267293919216504)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.0429, device='cuda:0')  (tensor(1.3363, device='cuda:0'))\n",
      "     | > current_lr: 7.499999999999999e-06 \n",
      "     | > step_time: 0.6865  (0.880631329796531)\n",
      "     | > loader_time: 0.2056  (0.11633957516063344)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:55:22 -- STEP: 135/203 -- GLOBAL_STEP: 6225\u001b[0m\n",
      "     | > loss: 0.44719815254211426  (0.46073155403137206)\n",
      "     | > log_mle: -0.05491667985916138  (-0.033445214783703846)\n",
      "     | > loss_dur: 0.5021148324012756  (0.4941767688150759)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(2.7389, device='cuda:0')  (tensor(1.3745, device='cuda:0'))\n",
      "     | > current_lr: 7.499999999999999e-06 \n",
      "     | > step_time: 0.6987  (0.9103985344922101)\n",
      "     | > loader_time: 0.1986  (0.1477412082530834)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:55:58 -- STEP: 160/203 -- GLOBAL_STEP: 6250\u001b[0m\n",
      "     | > loss: 0.4506577253341675  (0.4590741438791156)\n",
      "     | > log_mle: -0.04373764991760254  (-0.03583223074674608)\n",
      "     | > loss_dur: 0.49439537525177  (0.4949063746258616)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.4928, device='cuda:0')  (tensor(1.4382, device='cuda:0'))\n",
      "     | > current_lr: 7.499999999999999e-06 \n",
      "     | > step_time: 1.0783  (0.9500010401010514)\n",
      "     | > loader_time: 0.2182  (0.16451423019170758)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:56:34 -- STEP: 185/203 -- GLOBAL_STEP: 6275\u001b[0m\n",
      "     | > loss: 0.43678179383277893  (0.457469892985112)\n",
      "     | > log_mle: -0.06290894746780396  (-0.03821601706582146)\n",
      "     | > loss_dur: 0.4996907413005829  (0.49568591005093343)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(2.2044, device='cuda:0')  (tensor(1.6179, device='cuda:0'))\n",
      "     | > current_lr: 7.499999999999999e-06 \n",
      "     | > step_time: 1.4078  (0.9857901147893957)\n",
      "     | > loader_time: 0.1678  (0.17668905515928518)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.06179264187812806 \u001b[0m(+0.01915532350540162)\n",
      "     | > avg_loss:\u001b[92m 0.4465176798403263 \u001b[0m(-0.031646907329559326)\n",
      "     | > avg_log_mle:\u001b[92m -0.05450056493282318 \u001b[0m(-0.012506194412708282)\n",
      "     | > avg_loss_dur:\u001b[92m 0.5010182447731495 \u001b[0m(-0.019140712916851044)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_6293.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 31/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 18:57:18) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:57:28 -- STEP: 7/203 -- GLOBAL_STEP: 6300\u001b[0m\n",
      "     | > loss: 0.4473209083080292  (0.4449889234134129)\n",
      "     | > log_mle: -0.03334999084472656  (-0.024374570165361677)\n",
      "     | > loss_dur: 0.48067089915275574  (0.4693634935787746)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.5577, device='cuda:0')  (tensor(1.3101, device='cuda:0'))\n",
      "     | > current_lr: 7.75e-06 \n",
      "     | > step_time: 0.5529  (0.8427668980189732)\n",
      "     | > loader_time: 0.0037  (0.015850986753191267)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:57:49 -- STEP: 32/203 -- GLOBAL_STEP: 6325\u001b[0m\n",
      "     | > loss: 0.4163685441017151  (0.44010346103459597)\n",
      "     | > log_mle: -0.03097224235534668  (-0.02682691253721714)\n",
      "     | > loss_dur: 0.44734078645706177  (0.4669303735718131)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.5862, device='cuda:0')  (tensor(1.3191, device='cuda:0'))\n",
      "     | > current_lr: 7.75e-06 \n",
      "     | > step_time: 0.9271  (0.8282448276877403)\n",
      "     | > loader_time: 0.0888  (0.01813904196023941)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:58:11 -- STEP: 57/203 -- GLOBAL_STEP: 6350\u001b[0m\n",
      "     | > loss: 0.43524110317230225  (0.4364551917502755)\n",
      "     | > log_mle: -0.0457913875579834  (-0.03357719852213275)\n",
      "     | > loss_dur: 0.48103249073028564  (0.4700323902724082)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.6758, device='cuda:0')  (tensor(1.4011, device='cuda:0'))\n",
      "     | > current_lr: 7.75e-06 \n",
      "     | > step_time: 0.8019  (0.8089190951564854)\n",
      "     | > loader_time: 0.4889  (0.04475847461767364)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:58:42 -- STEP: 82/203 -- GLOBAL_STEP: 6375\u001b[0m\n",
      "     | > loss: 0.43422600626945496  (0.4339418756525691)\n",
      "     | > log_mle: -0.050335586071014404  (-0.03821145179795055)\n",
      "     | > loss_dur: 0.48456159234046936  (0.47215332745051963)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.9173, device='cuda:0')  (tensor(1.5006, device='cuda:0'))\n",
      "     | > current_lr: 7.75e-06 \n",
      "     | > step_time: 0.8834  (0.8763630768147911)\n",
      "     | > loader_time: 0.1299  (0.09011374450311427)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:59:17 -- STEP: 107/203 -- GLOBAL_STEP: 6400\u001b[0m\n",
      "     | > loss: 0.4117436408996582  (0.4317707859467123)\n",
      "     | > log_mle: -0.057558298110961914  (-0.04188413040660252)\n",
      "     | > loss_dur: 0.4693019390106201  (0.47365491635331486)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.3039, device='cuda:0')  (tensor(1.5390, device='cuda:0'))\n",
      "     | > current_lr: 7.75e-06 \n",
      "     | > step_time: 1.2957  (0.9234252154270065)\n",
      "     | > loader_time: 1.6021  (0.14417431733318578)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 18:59:51 -- STEP: 132/203 -- GLOBAL_STEP: 6425\u001b[0m\n",
      "     | > loss: 0.4236268103122711  (0.42982930780360196)\n",
      "     | > log_mle: -0.05911564826965332  (-0.04499529798825582)\n",
      "     | > loss_dur: 0.48274245858192444  (0.47482460579185776)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.2125, device='cuda:0')  (tensor(1.5978, device='cuda:0'))\n",
      "     | > current_lr: 7.75e-06 \n",
      "     | > step_time: 0.7963  (0.956281992522153)\n",
      "     | > loader_time: 0.1777  (0.16754300666577887)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:00:24 -- STEP: 157/203 -- GLOBAL_STEP: 6450\u001b[0m\n",
      "     | > loss: 0.40119120478630066  (0.42784700689801747)\n",
      "     | > log_mle: -0.06957894563674927  (-0.0474577405650145)\n",
      "     | > loss_dur: 0.4707701504230499  (0.4753047474630319)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.9448, device='cuda:0')  (tensor(1.6412, device='cuda:0'))\n",
      "     | > current_lr: 7.75e-06 \n",
      "     | > step_time: 1.2843  (0.9787097842830002)\n",
      "     | > loader_time: 0.4377  (0.1767569970173441)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:00:57 -- STEP: 182/203 -- GLOBAL_STEP: 6475\u001b[0m\n",
      "     | > loss: 0.41262808442115784  (0.42641663518580764)\n",
      "     | > log_mle: -0.06441634893417358  (-0.049668834104642755)\n",
      "     | > loss_dur: 0.4770444333553314  (0.47608546929045037)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(2.3761, device='cuda:0')  (tensor(1.6760, device='cuda:0'))\n",
      "     | > current_lr: 7.75e-06 \n",
      "     | > step_time: 1.2882  (0.99463203058138)\n",
      "     | > loader_time: 0.226  (0.18295204115437938)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.11155712604522706 \u001b[0m(+0.049764484167099006)\n",
      "     | > avg_loss:\u001b[92m 0.4061789698898792 \u001b[0m(-0.04033870995044708)\n",
      "     | > avg_log_mle:\u001b[92m -0.06740027666091919 \u001b[0m(-0.012899711728096008)\n",
      "     | > avg_loss_dur:\u001b[92m 0.4735792465507984 \u001b[0m(-0.027438998222351074)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_6496.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 32/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 19:01:45) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:01:51 -- STEP: 4/203 -- GLOBAL_STEP: 6500\u001b[0m\n",
      "     | > loss: 0.4197515845298767  (0.41379911452531815)\n",
      "     | > log_mle: -0.03827989101409912  (-0.03164385259151459)\n",
      "     | > loss_dur: 0.45803147554397583  (0.44544296711683273)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.2683, device='cuda:0')  (tensor(1.2535, device='cuda:0'))\n",
      "     | > current_lr: 8e-06 \n",
      "     | > step_time: 0.8134  (0.6439785361289978)\n",
      "     | > loader_time: 0.006  (0.0043517351150512695)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:02:09 -- STEP: 29/203 -- GLOBAL_STEP: 6525\u001b[0m\n",
      "     | > loss: 0.4107130467891693  (0.4099247969430068)\n",
      "     | > log_mle: -0.0462380051612854  (-0.03742290776351403)\n",
      "     | > loss_dur: 0.4569510519504547  (0.44734770470652085)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.4458, device='cuda:0')  (tensor(1.3443, device='cuda:0'))\n",
      "     | > current_lr: 8e-06 \n",
      "     | > step_time: 0.5871  (0.6690235137939452)\n",
      "     | > loader_time: 0.0073  (0.012572033651943865)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:02:31 -- STEP: 54/203 -- GLOBAL_STEP: 6550\u001b[0m\n",
      "     | > loss: 0.39409059286117554  (0.4046823051240709)\n",
      "     | > log_mle: -0.05929368734359741  (-0.04382226202223035)\n",
      "     | > loss_dur: 0.45338428020477295  (0.44850456714630127)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.2566, device='cuda:0')  (tensor(1.5556, device='cuda:0'))\n",
      "     | > current_lr: 8e-06 \n",
      "     | > step_time: 0.4183  (0.7405706158390751)\n",
      "     | > loader_time: 0.0061  (0.03806678895597105)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:02:58 -- STEP: 79/203 -- GLOBAL_STEP: 6575\u001b[0m\n",
      "     | > loss: 0.39541229605674744  (0.4014536417737792)\n",
      "     | > log_mle: -0.06385350227355957  (-0.04887828872173646)\n",
      "     | > loss_dur: 0.459265798330307  (0.4503319304955157)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(2.7169, device='cuda:0')  (tensor(1.7685, device='cuda:0'))\n",
      "     | > current_lr: 8e-06 \n",
      "     | > step_time: 1.3805  (0.7939820259432252)\n",
      "     | > loader_time: 0.512  (0.08000802691978742)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:03:38 -- STEP: 104/203 -- GLOBAL_STEP: 6600\u001b[0m\n",
      "     | > loss: 0.3881504237651825  (0.399451633485464)\n",
      "     | > log_mle: -0.07369911670684814  (-0.052508879739504584)\n",
      "     | > loss_dur: 0.46184954047203064  (0.45196051322496855)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(2.0392, device='cuda:0')  (tensor(1.8311, device='cuda:0'))\n",
      "     | > current_lr: 8e-06 \n",
      "     | > step_time: 1.6623  (0.8894814252853395)\n",
      "     | > loader_time: 0.4028  (0.15697957002199606)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:04:08 -- STEP: 129/203 -- GLOBAL_STEP: 6625\u001b[0m\n",
      "     | > loss: 0.38951563835144043  (0.39717410493266675)\n",
      "     | > log_mle: -0.075286865234375  (-0.055790108303691056)\n",
      "     | > loss_dur: 0.46480250358581543  (0.4529642132363578)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.5501, device='cuda:0')  (tensor(1.8194, device='cuda:0'))\n",
      "     | > current_lr: 8e-06 \n",
      "     | > step_time: 0.8322  (0.9139041845188586)\n",
      "     | > loader_time: 0.1677  (0.16140294259832805)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:04:42 -- STEP: 154/203 -- GLOBAL_STEP: 6650\u001b[0m\n",
      "     | > loss: 0.3761749863624573  (0.39512978416758704)\n",
      "     | > log_mle: -0.07285654544830322  (-0.05827609981809344)\n",
      "     | > loss_dur: 0.4490315318107605  (0.45340588398568044)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.4472, device='cuda:0')  (tensor(1.8099, device='cuda:0'))\n",
      "     | > current_lr: 8e-06 \n",
      "     | > step_time: 1.2324  (0.9371406397262179)\n",
      "     | > loader_time: 0.1978  (0.18163965894030282)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:05:15 -- STEP: 179/203 -- GLOBAL_STEP: 6675\u001b[0m\n",
      "     | > loss: 0.37110456824302673  (0.39359863194007455)\n",
      "     | > log_mle: -0.08081787824630737  (-0.06044626269260598)\n",
      "     | > loss_dur: 0.4519224464893341  (0.45404489463268044)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.7028, device='cuda:0')  (tensor(1.8158, device='cuda:0'))\n",
      "     | > current_lr: 8e-06 \n",
      "     | > step_time: 1.3169  (0.959678466093607)\n",
      "     | > loader_time: 0.1725  (0.1832420919194567)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.06136435270309449 \u001b[0m(-0.050192773342132575)\n",
      "     | > avg_loss:\u001b[92m 0.3717752620577812 \u001b[0m(-0.03440370783209801)\n",
      "     | > avg_log_mle:\u001b[92m -0.07746132463216782 \u001b[0m(-0.010061047971248627)\n",
      "     | > avg_loss_dur:\u001b[92m 0.44923658668994904 \u001b[0m(-0.02434265986084938)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_6699.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 33/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 19:06:07) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:06:11 -- STEP: 1/203 -- GLOBAL_STEP: 6700\u001b[0m\n",
      "     | > loss: 0.3873642683029175  (0.3873642683029175)\n",
      "     | > log_mle: -0.05108439922332764  (-0.05108439922332764)\n",
      "     | > loss_dur: 0.4384486675262451  (0.4384486675262451)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.6063, device='cuda:0')  (tensor(1.6063, device='cuda:0'))\n",
      "     | > current_lr: 8.25e-06 \n",
      "     | > step_time: 0.8035  (0.8035109043121338)\n",
      "     | > loader_time: 0.0041  (0.004072666168212891)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:06:32 -- STEP: 26/203 -- GLOBAL_STEP: 6725\u001b[0m\n",
      "     | > loss: 0.3641282916069031  (0.3777588365169672)\n",
      "     | > log_mle: -0.04910385608673096  (-0.04766416778931251)\n",
      "     | > loss_dur: 0.41323214769363403  (0.4254230043062797)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(1.6182, device='cuda:0')  (tensor(1.4586, device='cuda:0'))\n",
      "     | > current_lr: 8.25e-06 \n",
      "     | > step_time: 0.9634  (0.7986682194929856)\n",
      "     | > loader_time: 0.0045  (0.023716247998751126)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:06:54 -- STEP: 51/203 -- GLOBAL_STEP: 6750\u001b[0m\n",
      "     | > loss: 0.35400086641311646  (0.37299545430669595)\n",
      "     | > log_mle: -0.07974898815155029  (-0.054236921609616746)\n",
      "     | > loss_dur: 0.43374985456466675  (0.42723237591631275)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(3.0974, device='cuda:0')  (tensor(1.5851, device='cuda:0'))\n",
      "     | > current_lr: 8.25e-06 \n",
      "     | > step_time: 0.4323  (0.8090583670373056)\n",
      "     | > loader_time: 0.0085  (0.04055372406454648)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:07:22 -- STEP: 76/203 -- GLOBAL_STEP: 6775\u001b[0m\n",
      "     | > loss: 0.36631497740745544  (0.3698658880434538)\n",
      "     | > log_mle: -0.06644958257675171  (-0.058952497808556804)\n",
      "     | > loss_dur: 0.43276455998420715  (0.42881838585201065)\n",
      "     | > amp_scaler: 131072.0  (131072.0)\n",
      "     | > grad_norm: tensor(2.8189, device='cuda:0')  (tensor(1.7987, device='cuda:0'))\n",
      "     | > current_lr: 8.25e-06 \n",
      "     | > step_time: 1.6662  (0.8462399558017129)\n",
      "     | > loader_time: 0.6212  (0.0869009118331106)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:08:00 -- STEP: 101/203 -- GLOBAL_STEP: 6800\u001b[0m\n",
      "     | > loss: 0.3690909445285797  (0.36804118663957797)\n",
      "     | > log_mle: -0.06612282991409302  (-0.06241622773727568)\n",
      "     | > loss_dur: 0.43521377444267273  (0.4304574143768537)\n",
      "     | > amp_scaler: 65536.0  (120041.18811881189)\n",
      "     | > grad_norm: tensor(1.6264, device='cuda:0')  (tensor(1.9257, device='cuda:0'))\n",
      "     | > current_lr: 8.25e-06 \n",
      "     | > step_time: 1.0296  (0.9029128999993352)\n",
      "     | > loader_time: 0.3977  (0.17130394265203192)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:08:37 -- STEP: 126/203 -- GLOBAL_STEP: 6825\u001b[0m\n",
      "     | > loss: 0.35531699657440186  (0.3657303930275023)\n",
      "     | > log_mle: -0.08725547790527344  (-0.06578423579533892)\n",
      "     | > loss_dur: 0.4425724744796753  (0.43151462882284136)\n",
      "     | > amp_scaler: 65536.0  (109226.66666666667)\n",
      "     | > grad_norm: tensor(1.5275, device='cuda:0')  (tensor(1.9656, device='cuda:0'))\n",
      "     | > current_lr: 8.25e-06 \n",
      "     | > step_time: 2.0819  (0.9424635890930418)\n",
      "     | > loader_time: 1.6039  (0.2076889087283422)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:09:11 -- STEP: 151/203 -- GLOBAL_STEP: 6850\u001b[0m\n",
      "     | > loss: 0.3586484491825104  (0.36359249716562936)\n",
      "     | > log_mle: -0.08169806003570557  (-0.06824249621258664)\n",
      "     | > loss_dur: 0.44034650921821594  (0.43183499337821607)\n",
      "     | > amp_scaler: 65536.0  (101993.11258278145)\n",
      "     | > grad_norm: tensor(2.1311, device='cuda:0')  (tensor(1.9198, device='cuda:0'))\n",
      "     | > current_lr: 8.25e-06 \n",
      "     | > step_time: 1.1959  (0.9586004800354408)\n",
      "     | > loader_time: 0.1256  (0.22934978845103687)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:09:48 -- STEP: 176/203 -- GLOBAL_STEP: 6875\u001b[0m\n",
      "     | > loss: 0.3468179702758789  (0.36218073185194616)\n",
      "     | > log_mle: -0.08690178394317627  (-0.07034298879179088)\n",
      "     | > loss_dur: 0.4337197542190552  (0.4325237206437371)\n",
      "     | > amp_scaler: 65536.0  (96814.54545454546)\n",
      "     | > grad_norm: tensor(1.8211, device='cuda:0')  (tensor(1.8931, device='cuda:0'))\n",
      "     | > current_lr: 8.25e-06 \n",
      "     | > step_time: 0.9924  (0.9959877593950792)\n",
      "     | > loader_time: 0.1942  (0.23131056129932404)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:10:20 -- STEP: 201/203 -- GLOBAL_STEP: 6900\u001b[0m\n",
      "     | > loss: 0.3424469828605652  (0.36013167414499153)\n",
      "     | > log_mle: -0.0882793664932251  (-0.07247870270885635)\n",
      "     | > loss_dur: 0.4307263493537903  (0.43261037685384796)\n",
      "     | > amp_scaler: 65536.0  (92924.17910447762)\n",
      "     | > grad_norm: tensor(1.8835, device='cuda:0')  (tensor(1.9089, device='cuda:0'))\n",
      "     | > current_lr: 8.25e-06 \n",
      "     | > step_time: 0.5401  (1.0068722316874794)\n",
      "     | > loader_time: 0.0111  (0.2278016040574259)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.031494468450546265 \u001b[0m(-0.029869884252548225)\n",
      "     | > avg_loss:\u001b[92m 0.33849093317985535 \u001b[0m(-0.03328432887792587)\n",
      "     | > avg_log_mle:\u001b[92m -0.08800674974918365 \u001b[0m(-0.010545425117015839)\n",
      "     | > avg_loss_dur:\u001b[92m 0.426497682929039 \u001b[0m(-0.022738903760910034)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_6902.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 34/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 19:10:47) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:11:06 -- STEP: 23/203 -- GLOBAL_STEP: 6925\u001b[0m\n",
      "     | > loss: 0.34672150015830994  (0.3442834784155307)\n",
      "     | > log_mle: -0.06449025869369507  (-0.05622196456660395)\n",
      "     | > loss_dur: 0.411211758852005  (0.4005054429821346)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3605, device='cuda:0')  (tensor(1.4847, device='cuda:0'))\n",
      "     | > current_lr: 8.5e-06 \n",
      "     | > step_time: 1.1031  (0.656683755957562)\n",
      "     | > loader_time: 0.0077  (0.013568359872569208)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:11:26 -- STEP: 48/203 -- GLOBAL_STEP: 6950\u001b[0m\n",
      "     | > loss: 0.34028878808021545  (0.34238840639591217)\n",
      "     | > log_mle: -0.07200145721435547  (-0.0632892685631911)\n",
      "     | > loss_dur: 0.4122902452945709  (0.4056776749591033)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.4654, device='cuda:0')  (tensor(1.8079, device='cuda:0'))\n",
      "     | > current_lr: 8.5e-06 \n",
      "     | > step_time: 1.1916  (0.7136613627274832)\n",
      "     | > loader_time: 0.0152  (0.015955333908398945)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:11:49 -- STEP: 73/203 -- GLOBAL_STEP: 6975\u001b[0m\n",
      "     | > loss: 0.3290022611618042  (0.3393574557075762)\n",
      "     | > log_mle: -0.07477414608001709  (-0.06844660190686785)\n",
      "     | > loss_dur: 0.4037764072418213  (0.40780405761444405)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.7065, device='cuda:0')  (tensor(1.8456, device='cuda:0'))\n",
      "     | > current_lr: 8.5e-06 \n",
      "     | > step_time: 0.5082  (0.7748539121183631)\n",
      "     | > loader_time: 0.0069  (0.01904556196029872)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:12:24 -- STEP: 98/203 -- GLOBAL_STEP: 7000\u001b[0m\n",
      "     | > loss: 0.32633906602859497  (0.33797192634368445)\n",
      "     | > log_mle: -0.08436357975006104  (-0.07204070504830806)\n",
      "     | > loss_dur: 0.410702645778656  (0.4100126313919924)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.3260, device='cuda:0')  (tensor(1.8937, device='cuda:0'))\n",
      "     | > current_lr: 8.5e-06 \n",
      "     | > step_time: 1.3777  (0.843152430592751)\n",
      "     | > loader_time: 0.2162  (0.10309326891996423)\n",
      "\n",
      "\n",
      " > CHECKPOINT : ./models/train/run-June-12-2024_04+39PM-2c81749/checkpoint_7000.pth\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:12:58 -- STEP: 123/203 -- GLOBAL_STEP: 7025\u001b[0m\n",
      "     | > loss: 0.31591618061065674  (0.33587752488570494)\n",
      "     | > log_mle: -0.08006024360656738  (-0.07512605917163008)\n",
      "     | > loss_dur: 0.3959764242172241  (0.41100358405733495)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.7681, device='cuda:0')  (tensor(1.8593, device='cuda:0'))\n",
      "     | > current_lr: 8.5e-06 \n",
      "     | > step_time: 0.9162  (0.853055705869101)\n",
      "     | > loader_time: 0.222  (0.12814932140877575)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:13:32 -- STEP: 148/203 -- GLOBAL_STEP: 7050\u001b[0m\n",
      "     | > loss: 0.32338428497314453  (0.3339515966740816)\n",
      "     | > log_mle: -0.08829879760742188  (-0.07762438864321317)\n",
      "     | > loss_dur: 0.4116830825805664  (0.41157598531729467)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.1605, device='cuda:0')  (tensor(1.8880, device='cuda:0'))\n",
      "     | > current_lr: 8.5e-06 \n",
      "     | > step_time: 1.0194  (0.8927699633546777)\n",
      "     | > loader_time: 0.0094  (0.1539835414370975)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:14:06 -- STEP: 173/203 -- GLOBAL_STEP: 7075\u001b[0m\n",
      "     | > loss: 0.3265792727470398  (0.33284124645883645)\n",
      "     | > log_mle: -0.09528684616088867  (-0.07970168480294279)\n",
      "     | > loss_dur: 0.42186611890792847  (0.41254293126177927)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(3.2739, device='cuda:0')  (tensor(1.9427, device='cuda:0'))\n",
      "     | > current_lr: 8.5e-06 \n",
      "     | > step_time: 0.9006  (0.9280423759725053)\n",
      "     | > loader_time: 0.1824  (0.16629009164137654)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:14:40 -- STEP: 198/203 -- GLOBAL_STEP: 7100\u001b[0m\n",
      "     | > loss: 0.3170410096645355  (0.3312144496224145)\n",
      "     | > log_mle: -0.10120725631713867  (-0.08167087790941942)\n",
      "     | > loss_dur: 0.4182482659816742  (0.4128853275318338)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3449, device='cuda:0')  (tensor(1.9647, device='cuda:0'))\n",
      "     | > current_lr: 8.5e-06 \n",
      "     | > step_time: 0.6052  (0.9575015137893985)\n",
      "     | > loader_time: 0.0122  (0.16791085040930556)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.1414254903793335 \u001b[0m(+0.10993102192878723)\n",
      "     | > avg_loss:\u001b[92m 0.30658839643001556 \u001b[0m(-0.03190253674983978)\n",
      "     | > avg_log_mle:\u001b[92m -0.09697341173887253 \u001b[0m(-0.008966661989688873)\n",
      "     | > avg_loss_dur:\u001b[92m 0.4035618081688881 \u001b[0m(-0.02293587476015091)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_7105.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 35/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 19:15:09) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:15:25 -- STEP: 20/203 -- GLOBAL_STEP: 7125\u001b[0m\n",
      "     | > loss: 0.3130914568901062  (0.3187669590115547)\n",
      "     | > log_mle: -0.0578608512878418  (-0.0643995851278305)\n",
      "     | > loss_dur: 0.370952308177948  (0.3831665441393853)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3354, device='cuda:0')  (tensor(1.4713, device='cuda:0'))\n",
      "     | > current_lr: 8.750000000000001e-06 \n",
      "     | > step_time: 0.5067  (0.5751839280128479)\n",
      "     | > loader_time: 0.0052  (0.01834074258804321)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:15:47 -- STEP: 45/203 -- GLOBAL_STEP: 7150\u001b[0m\n",
      "     | > loss: 0.3093808889389038  (0.3169044044282701)\n",
      "     | > log_mle: -0.0881187915802002  (-0.07113591167661878)\n",
      "     | > loss_dur: 0.397499680519104  (0.3880403161048889)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.8923, device='cuda:0')  (tensor(1.6010, device='cuda:0'))\n",
      "     | > current_lr: 8.750000000000001e-06 \n",
      "     | > step_time: 1.3778  (0.7350431760152181)\n",
      "     | > loader_time: 0.0112  (0.015344280666775172)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:16:13 -- STEP: 70/203 -- GLOBAL_STEP: 7175\u001b[0m\n",
      "     | > loss: 0.305056631565094  (0.3138470364468438)\n",
      "     | > log_mle: -0.09158432483673096  (-0.07675803899765014)\n",
      "     | > loss_dur: 0.39664095640182495  (0.39060507544449397)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.9879, device='cuda:0')  (tensor(1.7415, device='cuda:0'))\n",
      "     | > current_lr: 8.750000000000001e-06 \n",
      "     | > step_time: 0.6699  (0.7852046966552735)\n",
      "     | > loader_time: 0.1362  (0.057728637967790875)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:16:47 -- STEP: 95/203 -- GLOBAL_STEP: 7200\u001b[0m\n",
      "     | > loss: 0.3035610020160675  (0.3125387527440722)\n",
      "     | > log_mle: -0.09569132328033447  (-0.08041302028455231)\n",
      "     | > loss_dur: 0.399252325296402  (0.39295177302862466)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(3.6034, device='cuda:0')  (tensor(1.7920, device='cuda:0'))\n",
      "     | > current_lr: 8.750000000000001e-06 \n",
      "     | > step_time: 1.0034  (0.8376504496524209)\n",
      "     | > loader_time: 0.1843  (0.1330327084189967)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:17:20 -- STEP: 120/203 -- GLOBAL_STEP: 7225\u001b[0m\n",
      "     | > loss: 0.3056100308895111  (0.31117632165551173)\n",
      "     | > log_mle: -0.09595906734466553  (-0.08341712554295858)\n",
      "     | > loss_dur: 0.40156909823417664  (0.39459344719847045)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.6360, device='cuda:0')  (tensor(1.9212, device='cuda:0'))\n",
      "     | > current_lr: 8.750000000000001e-06 \n",
      "     | > step_time: 0.5945  (0.8729486286640167)\n",
      "     | > loader_time: 0.1769  (0.17376790245374044)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:17:56 -- STEP: 145/203 -- GLOBAL_STEP: 7250\u001b[0m\n",
      "     | > loss: 0.3036108911037445  (0.3090650560527011)\n",
      "     | > log_mle: -0.09731924533843994  (-0.08598280364069445)\n",
      "     | > loss_dur: 0.40093013644218445  (0.3950478596933957)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.1437, device='cuda:0')  (tensor(1.9317, device='cuda:0'))\n",
      "     | > current_lr: 8.750000000000001e-06 \n",
      "     | > step_time: 1.4769  (0.9234296091671648)\n",
      "     | > loader_time: 0.1215  (0.18857973855117274)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:18:32 -- STEP: 170/203 -- GLOBAL_STEP: 7275\u001b[0m\n",
      "     | > loss: 0.29994261264801025  (0.3081986451850217)\n",
      "     | > log_mle: -0.10356903076171875  (-0.08802368009791654)\n",
      "     | > loss_dur: 0.403511643409729  (0.3962223252829383)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.0633, device='cuda:0')  (tensor(1.8711, device='cuda:0'))\n",
      "     | > current_lr: 8.750000000000001e-06 \n",
      "     | > step_time: 1.4804  (0.9544087984982659)\n",
      "     | > loader_time: 0.113  (0.20557688544778263)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:19:09 -- STEP: 195/203 -- GLOBAL_STEP: 7300\u001b[0m\n",
      "     | > loss: 0.2993825376033783  (0.30672345589368755)\n",
      "     | > log_mle: -0.11205399036407471  (-0.09006055624057085)\n",
      "     | > loss_dur: 0.411436527967453  (0.39678401213425857)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.0240, device='cuda:0')  (tensor(1.9464, device='cuda:0'))\n",
      "     | > current_lr: 8.750000000000001e-06 \n",
      "     | > step_time: 1.58  (0.9843122225541335)\n",
      "     | > loader_time: 0.5186  (0.21520046698741424)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.07166752219200134 \u001b[0m(-0.06975796818733215)\n",
      "     | > avg_loss:\u001b[92m 0.2766258828341961 \u001b[0m(-0.029962513595819473)\n",
      "     | > avg_log_mle:\u001b[92m -0.10618926584720612 \u001b[0m(-0.009215854108333588)\n",
      "     | > avg_loss_dur:\u001b[92m 0.3828151486814022 \u001b[0m(-0.020746659487485886)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_7308.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 36/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 19:19:38) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:19:54 -- STEP: 17/203 -- GLOBAL_STEP: 7325\u001b[0m\n",
      "     | > loss: 0.3045748472213745  (0.2961044854977552)\n",
      "     | > log_mle: -0.0778459906578064  (-0.07359717873965993)\n",
      "     | > loss_dur: 0.3824208378791809  (0.3697016642374151)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.9390, device='cuda:0')  (tensor(1.7501, device='cuda:0'))\n",
      "     | > current_lr: 9e-06 \n",
      "     | > step_time: 0.4081  (0.7218650088590735)\n",
      "     | > loader_time: 0.0846  (0.02335266505970674)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:20:24 -- STEP: 42/203 -- GLOBAL_STEP: 7350\u001b[0m\n",
      "     | > loss: 0.2876075506210327  (0.2946715354919433)\n",
      "     | > log_mle: -0.0966482162475586  (-0.07905859748522441)\n",
      "     | > loss_dur: 0.3842557668685913  (0.3737301329771677)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.0129, device='cuda:0')  (tensor(1.8546, device='cuda:0'))\n",
      "     | > current_lr: 9e-06 \n",
      "     | > step_time: 0.6894  (0.7470603159495762)\n",
      "     | > loader_time: 0.0051  (0.26197587876092815)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:20:48 -- STEP: 67/203 -- GLOBAL_STEP: 7375\u001b[0m\n",
      "     | > loss: 0.30701956152915955  (0.2923080916724987)\n",
      "     | > log_mle: -0.09563207626342773  (-0.08460033117835201)\n",
      "     | > loss_dur: 0.4026516377925873  (0.37690842285085074)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.9520, device='cuda:0')  (tensor(1.8552, device='cuda:0'))\n",
      "     | > current_lr: 9e-06 \n",
      "     | > step_time: 0.8928  (0.7950305654041803)\n",
      "     | > loader_time: 0.008  (0.2017263440943476)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:21:22 -- STEP: 92/203 -- GLOBAL_STEP: 7400\u001b[0m\n",
      "     | > loss: 0.2860606610774994  (0.29092310822528333)\n",
      "     | > log_mle: -0.1056743860244751  (-0.08822262935016466)\n",
      "     | > loss_dur: 0.3917350471019745  (0.379145737575448)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.8394, device='cuda:0')  (tensor(1.8946, device='cuda:0'))\n",
      "     | > current_lr: 9e-06 \n",
      "     | > step_time: 1.3016  (0.8433626397796299)\n",
      "     | > loader_time: 0.2077  (0.24374914169311523)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:21:57 -- STEP: 117/203 -- GLOBAL_STEP: 7425\u001b[0m\n",
      "     | > loss: 0.2779465913772583  (0.28963113849998534)\n",
      "     | > log_mle: -0.10807061195373535  (-0.091408922122075)\n",
      "     | > loss_dur: 0.38601720333099365  (0.38104006062206036)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.7249, device='cuda:0')  (tensor(2.0152, device='cuda:0'))\n",
      "     | > current_lr: 9e-06 \n",
      "     | > step_time: 0.603  (0.8983298366905278)\n",
      "     | > loader_time: 0.1921  (0.2542593866331964)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:22:32 -- STEP: 142/203 -- GLOBAL_STEP: 7450\u001b[0m\n",
      "     | > loss: 0.2763859033584595  (0.28783672754193684)\n",
      "     | > log_mle: -0.10779595375061035  (-0.09407615241870074)\n",
      "     | > loss_dur: 0.3841818571090698  (0.38191287996063766)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.4647, device='cuda:0')  (tensor(2.0055, device='cuda:0'))\n",
      "     | > current_lr: 9e-06 \n",
      "     | > step_time: 1.4936  (0.9366040699918505)\n",
      "     | > loader_time: 0.117  (0.2581518854893429)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:23:06 -- STEP: 167/203 -- GLOBAL_STEP: 7475\u001b[0m\n",
      "     | > loss: 0.28816381096839905  (0.286845943349564)\n",
      "     | > log_mle: -0.11479008197784424  (-0.09608891338645344)\n",
      "     | > loss_dur: 0.4029538929462433  (0.38293485673601746)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.6821, device='cuda:0')  (tensor(2.0050, device='cuda:0'))\n",
      "     | > current_lr: 9e-06 \n",
      "     | > step_time: 1.2055  (0.9573558290561516)\n",
      "     | > loader_time: 0.2679  (0.2615829841819352)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:23:37 -- STEP: 192/203 -- GLOBAL_STEP: 7500\u001b[0m\n",
      "     | > loss: 0.27501967549324036  (0.2858142351421215)\n",
      "     | > log_mle: -0.10757279396057129  (-0.09808138882120453)\n",
      "     | > loss_dur: 0.38259246945381165  (0.38389562396332616)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.4594, device='cuda:0')  (tensor(2.0415, device='cuda:0'))\n",
      "     | > current_lr: 9e-06 \n",
      "     | > step_time: 1.1177  (0.9707166080673536)\n",
      "     | > loader_time: 0.3215  (0.2527770126859345)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.0615694522857666 \u001b[0m(-0.010098069906234741)\n",
      "     | > avg_loss:\u001b[92m 0.25887269526720047 \u001b[0m(-0.01775318756699562)\n",
      "     | > avg_log_mle:\u001b[92m -0.11400635540485382 \u001b[0m(-0.007817089557647705)\n",
      "     | > avg_loss_dur:\u001b[92m 0.3728790506720543 \u001b[0m(-0.009936098009347916)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_7511.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 37/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 19:24:15) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:24:30 -- STEP: 14/203 -- GLOBAL_STEP: 7525\u001b[0m\n",
      "     | > loss: 0.274350643157959  (0.2753272971936635)\n",
      "     | > log_mle: -0.08102685213088989  (-0.08144392711775643)\n",
      "     | > loss_dur: 0.3553774952888489  (0.3567712243114199)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.4290, device='cuda:0')  (tensor(1.4628, device='cuda:0'))\n",
      "     | > current_lr: 9.250000000000001e-06 \n",
      "     | > step_time: 0.5077  (0.7656199080603463)\n",
      "     | > loader_time: 0.0813  (0.010930725506373815)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:24:50 -- STEP: 39/203 -- GLOBAL_STEP: 7550\u001b[0m\n",
      "     | > loss: 0.2823726534843445  (0.2770068477361631)\n",
      "     | > log_mle: -0.09399867057800293  (-0.08631854790907639)\n",
      "     | > loss_dur: 0.3763713240623474  (0.3633253956452394)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(3.6562, device='cuda:0')  (tensor(1.8203, device='cuda:0'))\n",
      "     | > current_lr: 9.250000000000001e-06 \n",
      "     | > step_time: 0.8793  (0.78593888038244)\n",
      "     | > loader_time: 0.0064  (0.01200567759000338)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:25:14 -- STEP: 64/203 -- GLOBAL_STEP: 7575\u001b[0m\n",
      "     | > loss: 0.26840612292289734  (0.27353534335270524)\n",
      "     | > log_mle: -0.10256350040435791  (-0.09218011610209942)\n",
      "     | > loss_dur: 0.37096962332725525  (0.3657154594548046)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.4415, device='cuda:0')  (tensor(2.2204, device='cuda:0'))\n",
      "     | > current_lr: 9.250000000000001e-06 \n",
      "     | > step_time: 0.8426  (0.8087252825498581)\n",
      "     | > loader_time: 0.0067  (0.049081843346357346)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:25:44 -- STEP: 89/203 -- GLOBAL_STEP: 7600\u001b[0m\n",
      "     | > loss: 0.2699374556541443  (0.27186737736959127)\n",
      "     | > log_mle: -0.10283470153808594  (-0.0959815965609604)\n",
      "     | > loss_dur: 0.3727721571922302  (0.3678489739305516)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.5553, device='cuda:0')  (tensor(2.2251, device='cuda:0'))\n",
      "     | > current_lr: 9.250000000000001e-06 \n",
      "     | > step_time: 1.2211  (0.8428316946779746)\n",
      "     | > loader_time: 0.6053  (0.1049981786963645)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:26:18 -- STEP: 114/203 -- GLOBAL_STEP: 7625\u001b[0m\n",
      "     | > loss: 0.2612386643886566  (0.2704531971299857)\n",
      "     | > log_mle: -0.11267077922821045  (-0.09922092107304356)\n",
      "     | > loss_dur: 0.37390944361686707  (0.3696741182030292)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.5677, device='cuda:0')  (tensor(2.3279, device='cuda:0'))\n",
      "     | > current_lr: 9.250000000000001e-06 \n",
      "     | > step_time: 0.9856  (0.9025989206213701)\n",
      "     | > loader_time: 0.2119  (0.13912163073556466)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:26:52 -- STEP: 139/203 -- GLOBAL_STEP: 7650\u001b[0m\n",
      "     | > loss: 0.2623712420463562  (0.2686451932937979)\n",
      "     | > log_mle: -0.10413837432861328  (-0.10189914360320826)\n",
      "     | > loss_dur: 0.3665096163749695  (0.37054433689700605)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.3270, device='cuda:0')  (tensor(2.3108, device='cuda:0'))\n",
      "     | > current_lr: 9.250000000000001e-06 \n",
      "     | > step_time: 1.4005  (0.9300916212068188)\n",
      "     | > loader_time: 0.1712  (0.16795790967323798)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:27:30 -- STEP: 164/203 -- GLOBAL_STEP: 7675\u001b[0m\n",
      "     | > loss: 0.2704750597476959  (0.2675761496875345)\n",
      "     | > log_mle: -0.11190676689147949  (-0.10389962065510633)\n",
      "     | > loss_dur: 0.3823818266391754  (0.3714757703426408)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.0151, device='cuda:0')  (tensor(2.3697, device='cuda:0'))\n",
      "     | > current_lr: 9.250000000000001e-06 \n",
      "     | > step_time: 1.2074  (0.9794566355100492)\n",
      "     | > loader_time: 0.3023  (0.1791832330750256)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:28:03 -- STEP: 189/203 -- GLOBAL_STEP: 7700\u001b[0m\n",
      "     | > loss: 0.2600230574607849  (0.2666175956763921)\n",
      "     | > log_mle: -0.117806077003479  (-0.10593192350296747)\n",
      "     | > loss_dur: 0.3778291344642639  (0.37254951917935947)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(3.3326, device='cuda:0')  (tensor(2.3614, device='cuda:0'))\n",
      "     | > current_lr: 9.250000000000001e-06 \n",
      "     | > step_time: 0.9983  (0.9975233784428349)\n",
      "     | > loader_time: 0.2774  (0.18442858337725282)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.06661754846572876 \u001b[0m(+0.005048096179962158)\n",
      "     | > avg_loss:\u001b[92m 0.24125155806541443 \u001b[0m(-0.01762113720178604)\n",
      "     | > avg_log_mle:\u001b[92m -0.12119483947753906 \u001b[0m(-0.007188484072685242)\n",
      "     | > avg_loss_dur:\u001b[92m 0.3624463975429535 \u001b[0m(-0.0104326531291008)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_7714.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 38/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 19:28:45) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:28:57 -- STEP: 11/203 -- GLOBAL_STEP: 7725\u001b[0m\n",
      "     | > loss: 0.2619068920612335  (0.25905276157639245)\n",
      "     | > log_mle: -0.08681356906890869  (-0.08856700225309892)\n",
      "     | > loss_dur: 0.3487204611301422  (0.34761976382949134)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.4182, device='cuda:0')  (tensor(1.7279, device='cuda:0'))\n",
      "     | > current_lr: 9.499999999999999e-06 \n",
      "     | > step_time: 0.5353  (0.8082634102214467)\n",
      "     | > loader_time: 0.0053  (0.0049846605821089315)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:29:16 -- STEP: 36/203 -- GLOBAL_STEP: 7750\u001b[0m\n",
      "     | > loss: 0.24611550569534302  (0.26022252688805264)\n",
      "     | > log_mle: -0.10316073894500732  (-0.09326544238461389)\n",
      "     | > loss_dur: 0.34927624464035034  (0.3534879692726665)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.1257, device='cuda:0')  (tensor(1.7313, device='cuda:0'))\n",
      "     | > current_lr: 9.499999999999999e-06 \n",
      "     | > step_time: 0.8145  (0.7732048167122735)\n",
      "     | > loader_time: 0.0076  (0.010945134692721896)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:29:41 -- STEP: 61/203 -- GLOBAL_STEP: 7775\u001b[0m\n",
      "     | > loss: 0.24412891268730164  (0.257457857249213)\n",
      "     | > log_mle: -0.1074293851852417  (-0.09916485235339303)\n",
      "     | > loss_dur: 0.35155829787254333  (0.356622709602606)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.8278, device='cuda:0')  (tensor(1.8523, device='cuda:0'))\n",
      "     | > current_lr: 9.499999999999999e-06 \n",
      "     | > step_time: 0.9855  (0.8436785877728071)\n",
      "     | > loader_time: 0.0186  (0.01875683518706775)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:30:08 -- STEP: 86/203 -- GLOBAL_STEP: 7800\u001b[0m\n",
      "     | > loss: 0.2521275281906128  (0.255321085799572)\n",
      "     | > log_mle: -0.11250245571136475  (-0.10306627459304275)\n",
      "     | > loss_dur: 0.36462998390197754  (0.3583873603926147)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.8070, device='cuda:0')  (tensor(2.0356, device='cuda:0'))\n",
      "     | > current_lr: 9.499999999999999e-06 \n",
      "     | > step_time: 1.5945  (0.8819067284118297)\n",
      "     | > loader_time: 0.1833  (0.042747056761453316)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:30:46 -- STEP: 111/203 -- GLOBAL_STEP: 7825\u001b[0m\n",
      "     | > loss: 0.25237128138542175  (0.2540126064339198)\n",
      "     | > log_mle: -0.11892497539520264  (-0.10627666518494888)\n",
      "     | > loss_dur: 0.3712962567806244  (0.3602892716188687)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.5946, device='cuda:0')  (tensor(2.2793, device='cuda:0'))\n",
      "     | > current_lr: 9.499999999999999e-06 \n",
      "     | > step_time: 0.8104  (0.9272930321392713)\n",
      "     | > loader_time: 0.1806  (0.13301768174042572)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:31:17 -- STEP: 136/203 -- GLOBAL_STEP: 7850\u001b[0m\n",
      "     | > loss: 0.2230362594127655  (0.25208981072201436)\n",
      "     | > log_mle: -0.12714886665344238  (-0.10908112236682105)\n",
      "     | > loss_dur: 0.3501851260662079  (0.36117093308883547)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(3.1697, device='cuda:0')  (tensor(2.3651, device='cuda:0'))\n",
      "     | > current_lr: 9.499999999999999e-06 \n",
      "     | > step_time: 0.6141  (0.9343743902795455)\n",
      "     | > loader_time: 0.1823  (0.15615780739223253)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:31:51 -- STEP: 161/203 -- GLOBAL_STEP: 7875\u001b[0m\n",
      "     | > loss: 0.24954825639724731  (0.2510770881398124)\n",
      "     | > log_mle: -0.12645196914672852  (-0.11108769356093792)\n",
      "     | > loss_dur: 0.37600022554397583  (0.3621647817007501)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.6921, device='cuda:0')  (tensor(2.3537, device='cuda:0'))\n",
      "     | > current_lr: 9.499999999999999e-06 \n",
      "     | > step_time: 1.0965  (0.9573544315669847)\n",
      "     | > loader_time: 0.8052  (0.17475326461081184)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:32:23 -- STEP: 186/203 -- GLOBAL_STEP: 7900\u001b[0m\n",
      "     | > loss: 0.24394097924232483  (0.25015643511408103)\n",
      "     | > log_mle: -0.1242440938949585  (-0.11305798670297028)\n",
      "     | > loss_dur: 0.3681850731372833  (0.3632144218170512)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.5101, device='cuda:0')  (tensor(2.2907, device='cuda:0'))\n",
      "     | > current_lr: 9.499999999999999e-06 \n",
      "     | > step_time: 0.9238  (0.972630339284097)\n",
      "     | > loader_time: 0.2757  (0.179407742715651)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.09721839427947998 \u001b[0m(+0.03060084581375122)\n",
      "     | > avg_loss:\u001b[92m 0.2235541231930256 \u001b[0m(-0.01769743487238884)\n",
      "     | > avg_log_mle:\u001b[92m -0.1294260025024414 \u001b[0m(-0.008231163024902344)\n",
      "     | > avg_loss_dur:\u001b[92m 0.352980125695467 \u001b[0m(-0.009466271847486496)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_7917.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 39/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 19:33:08) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:33:18 -- STEP: 8/203 -- GLOBAL_STEP: 7925\u001b[0m\n",
      "     | > loss: 0.2400549352169037  (0.23923655226826668)\n",
      "     | > log_mle: -0.09766173362731934  (-0.09729829430580139)\n",
      "     | > loss_dur: 0.337716668844223  (0.33653484657406807)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.5786, device='cuda:0')  (tensor(1.6977, device='cuda:0'))\n",
      "     | > current_lr: 9.75e-06 \n",
      "     | > step_time: 0.1649  (0.5845776200294495)\n",
      "     | > loader_time: 0.0039  (0.004598677158355713)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:33:35 -- STEP: 33/203 -- GLOBAL_STEP: 7950\u001b[0m\n",
      "     | > loss: 0.23869293928146362  (0.24373186628023782)\n",
      "     | > log_mle: -0.10839998722076416  (-0.0993164416515466)\n",
      "     | > loss_dur: 0.3470929265022278  (0.3430483079317844)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.8925, device='cuda:0')  (tensor(2.1683, device='cuda:0'))\n",
      "     | > current_lr: 9.75e-06 \n",
      "     | > step_time: 0.3758  (0.6473696304090095)\n",
      "     | > loader_time: 0.0051  (0.01133798107956395)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:33:58 -- STEP: 58/203 -- GLOBAL_STEP: 7975\u001b[0m\n",
      "     | > loss: 0.2491913139820099  (0.24143365436586842)\n",
      "     | > log_mle: -0.11109375953674316  (-0.10543691700902479)\n",
      "     | > loss_dur: 0.36028507351875305  (0.34687057137489313)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.6663, device='cuda:0')  (tensor(2.8536, device='cuda:0'))\n",
      "     | > current_lr: 9.75e-06 \n",
      "     | > step_time: 0.7833  (0.7417784641528953)\n",
      "     | > loader_time: 0.0078  (0.030005746874315984)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:34:26 -- STEP: 83/203 -- GLOBAL_STEP: 8000\u001b[0m\n",
      "     | > loss: 0.22633156180381775  (0.23952464454145317)\n",
      "     | > log_mle: -0.12719261646270752  (-0.10975641371255898)\n",
      "     | > loss_dur: 0.35352417826652527  (0.3492810582540121)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(1.5548, device='cuda:0')  (tensor(2.6812, device='cuda:0'))\n",
      "     | > current_lr: 9.75e-06 \n",
      "     | > step_time: 1.1077  (0.7728758088077408)\n",
      "     | > loader_time: 1.3051  (0.10737440097762878)\n",
      "\n",
      "\n",
      " > CHECKPOINT : ./models/train/run-June-12-2024_04+39PM-2c81749/checkpoint_8000.pth\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:35:03 -- STEP: 108/203 -- GLOBAL_STEP: 8025\u001b[0m\n",
      "     | > loss: 0.23369595408439636  (0.23811830580234528)\n",
      "     | > log_mle: -0.1282886266708374  (-0.1128964137147974)\n",
      "     | > loss_dur: 0.36198458075523376  (0.35101471951714264)\n",
      "     | > amp_scaler: 65536.0  (65536.0)\n",
      "     | > grad_norm: tensor(2.8023, device='cuda:0')  (tensor(2.7053, device='cuda:0'))\n",
      "     | > current_lr: 9.75e-06 \n",
      "     | > step_time: 1.1191  (0.7984211290324176)\n",
      "     | > loader_time: 0.2186  (0.1323987400090253)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:35:33 -- STEP: 133/203 -- GLOBAL_STEP: 8050\u001b[0m\n",
      "     | > loss: 0.22852975130081177  (0.23716460045118978)\n",
      "     | > log_mle: -0.12585997581481934  (-0.1155409472329276)\n",
      "     | > loss_dur: 0.3543897271156311  (0.3527055476841173)\n",
      "     | > amp_scaler: 32768.0  (60608.48120300752)\n",
      "     | > grad_norm: tensor(6.1205, device='cuda:0')  (tensor(3.3465, device='cuda:0'))\n",
      "     | > current_lr: 9.75e-06 \n",
      "     | > step_time: 1.5676  (0.8351279721224218)\n",
      "     | > loader_time: 0.1337  (0.1441371620149541)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:36:06 -- STEP: 158/203 -- GLOBAL_STEP: 8075\u001b[0m\n",
      "     | > loss: 0.23922985792160034  (0.23588961287389829)\n",
      "     | > log_mle: -0.12690675258636475  (-0.1176782982258857)\n",
      "     | > loss_dur: 0.3661366105079651  (0.35356791109978397)\n",
      "     | > amp_scaler: 32768.0  (56203.3417721519)\n",
      "     | > grad_norm: tensor(2.8003, device='cuda:0')  (tensor(3.4878, device='cuda:0'))\n",
      "     | > current_lr: 9.75e-06 \n",
      "     | > step_time: 0.8377  (0.8767833558818958)\n",
      "     | > loader_time: 0.0212  (0.15356720248355138)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:36:39 -- STEP: 183/203 -- GLOBAL_STEP: 8100\u001b[0m\n",
      "     | > loss: 0.2237434685230255  (0.2352798133274245)\n",
      "     | > log_mle: -0.13327383995056152  (-0.11954813772211961)\n",
      "     | > loss_dur: 0.35701730847358704  (0.35482795104954407)\n",
      "     | > amp_scaler: 32768.0  (53001.79234972678)\n",
      "     | > grad_norm: tensor(2.3474, device='cuda:0')  (tensor(3.2705, device='cuda:0'))\n",
      "     | > current_lr: 9.75e-06 \n",
      "     | > step_time: 1.0034  (0.9070774609925314)\n",
      "     | > loader_time: 0.1885  (0.16433972348280942)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.025165438652038574 \u001b[0m(-0.0720529556274414)\n",
      "     | > avg_loss:\u001b[92m 0.20607195794582367 \u001b[0m(-0.01748216524720192)\n",
      "     | > avg_log_mle:\u001b[92m -0.13629820942878723 \u001b[0m(-0.006872206926345825)\n",
      "     | > avg_loss_dur:\u001b[92m 0.3423701673746109 \u001b[0m(-0.010609958320856094)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_8120.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 40/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 19:37:25) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:37:33 -- STEP: 5/203 -- GLOBAL_STEP: 8125\u001b[0m\n",
      "     | > loss: 0.23072549700737  (0.23079859018325805)\n",
      "     | > log_mle: -0.10347175598144531  (-0.10100595951080323)\n",
      "     | > loss_dur: 0.3341972529888153  (0.33180454969406126)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.3248, device='cuda:0')  (tensor(1.4486, device='cuda:0'))\n",
      "     | > current_lr: 9.999999999999999e-06 \n",
      "     | > step_time: 0.4779  (1.0058009147644043)\n",
      "     | > loader_time: 0.0047  (0.006799125671386718)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:37:51 -- STEP: 30/203 -- GLOBAL_STEP: 8150\u001b[0m\n",
      "     | > loss: 0.23181092739105225  (0.23223433593908946)\n",
      "     | > log_mle: -0.11177361011505127  (-0.10538353522618611)\n",
      "     | > loss_dur: 0.3435845375061035  (0.3376178711652756)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.0841, device='cuda:0')  (tensor(2.0920, device='cuda:0'))\n",
      "     | > current_lr: 9.999999999999999e-06 \n",
      "     | > step_time: 0.9016  (0.7490047136942546)\n",
      "     | > loader_time: 0.0079  (0.012514201800028484)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:38:15 -- STEP: 55/203 -- GLOBAL_STEP: 8175\u001b[0m\n",
      "     | > loss: 0.21293851733207703  (0.22771382982080632)\n",
      "     | > log_mle: -0.1334594488143921  (-0.11153842102397572)\n",
      "     | > loss_dur: 0.3463979661464691  (0.33925225084478206)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(5.1748, device='cuda:0')  (tensor(2.0981, device='cuda:0'))\n",
      "     | > current_lr: 9.999999999999999e-06 \n",
      "     | > step_time: 1.3869  (0.8310217987407338)\n",
      "     | > loader_time: 0.09  (0.022277493910356)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:38:44 -- STEP: 80/203 -- GLOBAL_STEP: 8200\u001b[0m\n",
      "     | > loss: 0.22830083966255188  (0.2261029973626137)\n",
      "     | > log_mle: -0.12159252166748047  (-0.11575762182474136)\n",
      "     | > loss_dur: 0.34989336133003235  (0.34186061918735505)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.3381, device='cuda:0')  (tensor(2.4188, device='cuda:0'))\n",
      "     | > current_lr: 9.999999999999999e-06 \n",
      "     | > step_time: 0.5195  (0.8994181215763092)\n",
      "     | > loader_time: 0.1694  (0.04674735069274902)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:39:15 -- STEP: 105/203 -- GLOBAL_STEP: 8225\u001b[0m\n",
      "     | > loss: 0.210922509431839  (0.22461443827265784)\n",
      "     | > log_mle: -0.1350705623626709  (-0.11904252483731224)\n",
      "     | > loss_dur: 0.3459930717945099  (0.34365696310997007)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.0202, device='cuda:0')  (tensor(2.5093, device='cuda:0'))\n",
      "     | > current_lr: 9.999999999999999e-06 \n",
      "     | > step_time: 0.702  (0.9135550181070964)\n",
      "     | > loader_time: 0.2146  (0.09621654692150294)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:39:52 -- STEP: 130/203 -- GLOBAL_STEP: 8250\u001b[0m\n",
      "     | > loss: 0.20389220118522644  (0.2233516615170699)\n",
      "     | > log_mle: -0.13545751571655273  (-0.12195573219886192)\n",
      "     | > loss_dur: 0.3393497169017792  (0.34530739371593183)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.5021, device='cuda:0')  (tensor(2.4653, device='cuda:0'))\n",
      "     | > current_lr: 9.999999999999999e-06 \n",
      "     | > step_time: 1.5966  (0.9577233901390663)\n",
      "     | > loader_time: 0.7763  (0.13683429681337794)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:40:29 -- STEP: 155/203 -- GLOBAL_STEP: 8275\u001b[0m\n",
      "     | > loss: 0.2034170925617218  (0.2219621641020621)\n",
      "     | > log_mle: -0.13496661186218262  (-0.1240647469797442)\n",
      "     | > loss_dur: 0.3383837044239044  (0.34602691108180633)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.8536, device='cuda:0')  (tensor(2.4583, device='cuda:0'))\n",
      "     | > current_lr: 9.999999999999999e-06 \n",
      "     | > step_time: 1.4099  (1.0002080932740243)\n",
      "     | > loader_time: 0.2655  (0.15811006176856252)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:41:04 -- STEP: 180/203 -- GLOBAL_STEP: 8300\u001b[0m\n",
      "     | > loss: 0.2200697362422943  (0.2211743887927797)\n",
      "     | > log_mle: -0.1367889642715454  (-0.125921779870987)\n",
      "     | > loss_dur: 0.3568587005138397  (0.34709616866376675)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.2995, device='cuda:0')  (tensor(2.4804, device='cuda:0'))\n",
      "     | > current_lr: 9.999999999999999e-06 \n",
      "     | > step_time: 1.2824  (1.0183171404732596)\n",
      "     | > loader_time: 0.1802  (0.17565365102556016)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.01103159785270691 \u001b[0m(-0.014133840799331665)\n",
      "     | > avg_loss:\u001b[92m 0.1890578530728817 \u001b[0m(-0.01701410487294197)\n",
      "     | > avg_log_mle:\u001b[92m -0.1420210897922516 \u001b[0m(-0.0057228803634643555)\n",
      "     | > avg_loss_dur:\u001b[92m 0.3310789428651333 \u001b[0m(-0.011291224509477615)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_8323.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 41/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 19:41:56) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:42:00 -- STEP: 2/203 -- GLOBAL_STEP: 8325\u001b[0m\n",
      "     | > loss: 0.22098588943481445  (0.22137577831745148)\n",
      "     | > log_mle: -0.09509944915771484  (-0.1053762435913086)\n",
      "     | > loss_dur: 0.3160853385925293  (0.32675202190876007)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.1505, device='cuda:0')  (tensor(1.4701, device='cuda:0'))\n",
      "     | > current_lr: 1.025e-05 \n",
      "     | > step_time: 0.7114  (0.6513969898223877)\n",
      "     | > loader_time: 0.0041  (0.003473639488220215)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:42:18 -- STEP: 27/203 -- GLOBAL_STEP: 8350\u001b[0m\n",
      "     | > loss: 0.20772844552993774  (0.21852247913678488)\n",
      "     | > log_mle: -0.12032473087310791  (-0.11134012981697365)\n",
      "     | > loss_dur: 0.32805317640304565  (0.3298626089537586)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.5188, device='cuda:0')  (tensor(1.7578, device='cuda:0'))\n",
      "     | > current_lr: 1.025e-05 \n",
      "     | > step_time: 0.6185  (0.6995648189827249)\n",
      "     | > loader_time: 0.007  (0.01369007428487142)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:42:38 -- STEP: 52/203 -- GLOBAL_STEP: 8375\u001b[0m\n",
      "     | > loss: 0.20986270904541016  (0.21536401430001625)\n",
      "     | > log_mle: -0.11862063407897949  (-0.11717575559249291)\n",
      "     | > loss_dur: 0.32848334312438965  (0.33253976989250933)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.9590, device='cuda:0')  (tensor(2.0181, device='cuda:0'))\n",
      "     | > current_lr: 1.025e-05 \n",
      "     | > step_time: 0.563  (0.7223039911343503)\n",
      "     | > loader_time: 0.0076  (0.018216761258932256)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:43:03 -- STEP: 77/203 -- GLOBAL_STEP: 8400\u001b[0m\n",
      "     | > loss: 0.2125667929649353  (0.2131177376617085)\n",
      "     | > log_mle: -0.1257927417755127  (-0.12163779178223053)\n",
      "     | > loss_dur: 0.338359534740448  (0.3347555294439391)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.0596, device='cuda:0')  (tensor(2.3434, device='cuda:0'))\n",
      "     | > current_lr: 1.025e-05 \n",
      "     | > step_time: 1.1028  (0.7942300585957317)\n",
      "     | > loader_time: 0.2972  (0.025957723716636755)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:43:34 -- STEP: 102/203 -- GLOBAL_STEP: 8425\u001b[0m\n",
      "     | > loss: 0.20441266894340515  (0.21204266010546216)\n",
      "     | > log_mle: -0.14010930061340332  (-0.12489078442255656)\n",
      "     | > loss_dur: 0.34452196955680847  (0.3369334445280187)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.9565, device='cuda:0')  (tensor(2.4181, device='cuda:0'))\n",
      "     | > current_lr: 1.025e-05 \n",
      "     | > step_time: 0.7137  (0.8290503258798637)\n",
      "     | > loader_time: 0.0171  (0.09179260683994665)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:44:03 -- STEP: 127/203 -- GLOBAL_STEP: 8450\u001b[0m\n",
      "     | > loss: 0.21028810739517212  (0.21071016319154753)\n",
      "     | > log_mle: -0.13393115997314453  (-0.12778071437295027)\n",
      "     | > loss_dur: 0.34421926736831665  (0.3384908775644978)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.6619, device='cuda:0')  (tensor(2.5200, device='cuda:0'))\n",
      "     | > current_lr: 1.025e-05 \n",
      "     | > step_time: 0.8277  (0.8595047353759525)\n",
      "     | > loader_time: 0.1852  (0.11174707525358424)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:44:35 -- STEP: 152/203 -- GLOBAL_STEP: 8475\u001b[0m\n",
      "     | > loss: 0.2042272686958313  (0.20924629231816844)\n",
      "     | > log_mle: -0.14253950119018555  (-0.13000092537779553)\n",
      "     | > loss_dur: 0.34676676988601685  (0.339247217695964)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.7402, device='cuda:0')  (tensor(2.6842, device='cuda:0'))\n",
      "     | > current_lr: 1.025e-05 \n",
      "     | > step_time: 0.8765  (0.8863660928450132)\n",
      "     | > loader_time: 0.382  (0.13415233712447316)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:45:11 -- STEP: 177/203 -- GLOBAL_STEP: 8500\u001b[0m\n",
      "     | > loss: 0.20417410135269165  (0.20837139443489117)\n",
      "     | > log_mle: -0.14703822135925293  (-0.131819415227168)\n",
      "     | > loss_dur: 0.3512123227119446  (0.3401908096620592)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.1617, device='cuda:0')  (tensor(2.7971, device='cuda:0'))\n",
      "     | > current_lr: 1.025e-05 \n",
      "     | > step_time: 0.9337  (0.9226415332427806)\n",
      "     | > loader_time: 0.4988  (0.15389367011980815)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:45:39 -- STEP: 202/203 -- GLOBAL_STEP: 8525\u001b[0m\n",
      "     | > loss: 0.19788920879364014  (0.20697725320806598)\n",
      "     | > log_mle: -0.1440521478652954  (-0.1336172282105625)\n",
      "     | > loss_dur: 0.34194135665893555  (0.3405944814186284)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.2223, device='cuda:0')  (tensor(3.1038, device='cuda:0'))\n",
      "     | > current_lr: 1.025e-05 \n",
      "     | > step_time: 0.3492  (0.9303916326843866)\n",
      "     | > loader_time: 0.0078  (0.15105439294682868)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.0864420235157013 \u001b[0m(+0.07541042566299438)\n",
      "     | > avg_loss:\u001b[92m 0.1717115268111229 \u001b[0m(-0.017346326261758804)\n",
      "     | > avg_log_mle:\u001b[92m -0.14809578657150269 \u001b[0m(-0.006074696779251099)\n",
      "     | > avg_loss_dur:\u001b[92m 0.3198073133826256 \u001b[0m(-0.011271629482507706)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_8526.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 42/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 19:46:07) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:46:31 -- STEP: 24/203 -- GLOBAL_STEP: 8550\u001b[0m\n",
      "     | > loss: 0.2156027853488922  (0.20628682151436806)\n",
      "     | > log_mle: -0.120452880859375  (-0.1157811979452769)\n",
      "     | > loss_dur: 0.3360556662082672  (0.32206801945964497)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.4461, device='cuda:0')  (tensor(2.1191, device='cuda:0'))\n",
      "     | > current_lr: 1.05e-05 \n",
      "     | > step_time: 0.919  (0.9057007730007172)\n",
      "     | > loader_time: 0.0073  (0.012426634629567465)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:46:54 -- STEP: 49/203 -- GLOBAL_STEP: 8575\u001b[0m\n",
      "     | > loss: 0.20988696813583374  (0.20348193876597345)\n",
      "     | > log_mle: -0.12553942203521729  (-0.12230692104417451)\n",
      "     | > loss_dur: 0.335426390171051  (0.325788859810148)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.5932, device='cuda:0')  (tensor(2.8717, device='cuda:0'))\n",
      "     | > current_lr: 1.05e-05 \n",
      "     | > step_time: 0.6122  (0.8790008535190504)\n",
      "     | > loader_time: 0.2005  (0.041520128444749484)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:47:19 -- STEP: 74/203 -- GLOBAL_STEP: 8600\u001b[0m\n",
      "     | > loss: 0.20058533549308777  (0.20112469349358533)\n",
      "     | > log_mle: -0.1435379981994629  (-0.1272130286371386)\n",
      "     | > loss_dur: 0.34412333369255066  (0.3283377221307239)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.7114, device='cuda:0')  (tensor(2.8512, device='cuda:0'))\n",
      "     | > current_lr: 1.05e-05 \n",
      "     | > step_time: 1.1002  (0.9078346684172347)\n",
      "     | > loader_time: 0.0088  (0.03425178656706939)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:47:49 -- STEP: 99/203 -- GLOBAL_STEP: 8625\u001b[0m\n",
      "     | > loss: 0.1942424476146698  (0.19986074711337234)\n",
      "     | > log_mle: -0.13752758502960205  (-0.13036435541480484)\n",
      "     | > loss_dur: 0.33177003264427185  (0.330225102528177)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(8.5927, device='cuda:0')  (tensor(3.3657, device='cuda:0'))\n",
      "     | > current_lr: 1.05e-05 \n",
      "     | > step_time: 1.1258  (0.9205247127648556)\n",
      "     | > loader_time: 0.3159  (0.08694362158727165)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:48:22 -- STEP: 124/203 -- GLOBAL_STEP: 8650\u001b[0m\n",
      "     | > loss: 0.2002236247062683  (0.19866782162458665)\n",
      "     | > log_mle: -0.13908398151397705  (-0.1331002096976003)\n",
      "     | > loss_dur: 0.33930760622024536  (0.33176803132218685)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.1408, device='cuda:0')  (tensor(3.8949, device='cuda:0'))\n",
      "     | > current_lr: 1.05e-05 \n",
      "     | > step_time: 1.5011  (0.9521313136623751)\n",
      "     | > loader_time: 0.2109  (0.11476335217875819)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:48:57 -- STEP: 149/203 -- GLOBAL_STEP: 8675\u001b[0m\n",
      "     | > loss: 0.17819347977638245  (0.19698573658930374)\n",
      "     | > log_mle: -0.15105271339416504  (-0.1354115289329682)\n",
      "     | > loss_dur: 0.3292461931705475  (0.3323972655222719)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.9504, device='cuda:0')  (tensor(3.8081, device='cuda:0'))\n",
      "     | > current_lr: 1.05e-05 \n",
      "     | > step_time: 1.0916  (0.9900883092176194)\n",
      "     | > loader_time: 0.1923  (0.13369471274766345)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:49:33 -- STEP: 174/203 -- GLOBAL_STEP: 8700\u001b[0m\n",
      "     | > loss: 0.20641180872917175  (0.1964344344604974)\n",
      "     | > log_mle: -0.1439957618713379  (-0.13726043838194044)\n",
      "     | > loss_dur: 0.35040757060050964  (0.33369487284243793)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.8751, device='cuda:0')  (tensor(3.6025, device='cuda:0'))\n",
      "     | > current_lr: 1.05e-05 \n",
      "     | > step_time: 1.7796  (1.0170380699223485)\n",
      "     | > loader_time: 0.1256  (0.1518500659657621)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:50:08 -- STEP: 199/203 -- GLOBAL_STEP: 8725\u001b[0m\n",
      "     | > loss: 0.18434551358222961  (0.19503283635455756)\n",
      "     | > log_mle: -0.1546570062637329  (-0.13916595796843861)\n",
      "     | > loss_dur: 0.3390025198459625  (0.33419879432299615)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(6.6308, device='cuda:0')  (tensor(3.6932, device='cuda:0'))\n",
      "     | > current_lr: 1.05e-05 \n",
      "     | > step_time: 0.6046  (1.0340528955411667)\n",
      "     | > loader_time: 0.0115  (0.16004220564760757)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.09649777412414551 \u001b[0m(+0.010055750608444214)\n",
      "     | > avg_loss:\u001b[92m 0.162139430642128 \u001b[0m(-0.009572096168994904)\n",
      "     | > avg_log_mle:\u001b[92m -0.15246504545211792 \u001b[0m(-0.004369258880615234)\n",
      "     | > avg_loss_dur:\u001b[92m 0.3146044760942459 \u001b[0m(-0.005202837288379669)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_8729.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 43/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 19:50:36) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:50:56 -- STEP: 21/203 -- GLOBAL_STEP: 8750\u001b[0m\n",
      "     | > loss: 0.17960438132286072  (0.19537147311937242)\n",
      "     | > log_mle: -0.12275898456573486  (-0.12142963068825859)\n",
      "     | > loss_dur: 0.3023633658885956  (0.316801103807631)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.3944, device='cuda:0')  (tensor(2.3621, device='cuda:0'))\n",
      "     | > current_lr: 1.075e-05 \n",
      "     | > step_time: 1.1876  (0.6711066563924154)\n",
      "     | > loader_time: 0.0126  (0.017556690034412202)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:51:20 -- STEP: 46/203 -- GLOBAL_STEP: 8775\u001b[0m\n",
      "     | > loss: 0.17761394381523132  (0.19265293815861578)\n",
      "     | > log_mle: -0.1509002447128296  (-0.12815233935480533)\n",
      "     | > loss_dur: 0.3285141885280609  (0.32080527751342114)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.7278, device='cuda:0')  (tensor(2.2604, device='cuda:0'))\n",
      "     | > current_lr: 1.075e-05 \n",
      "     | > step_time: 0.4126  (0.8049398349679034)\n",
      "     | > loader_time: 0.0068  (0.02158679132876189)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:51:46 -- STEP: 71/203 -- GLOBAL_STEP: 8800\u001b[0m\n",
      "     | > loss: 0.17302259802818298  (0.18928040585047762)\n",
      "     | > log_mle: -0.1495429277420044  (-0.13305113013361541)\n",
      "     | > loss_dur: 0.3225655257701874  (0.322331535984093)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.6942, device='cuda:0')  (tensor(2.3123, device='cuda:0'))\n",
      "     | > current_lr: 1.075e-05 \n",
      "     | > step_time: 1.6919  (0.8858703996094179)\n",
      "     | > loader_time: 0.1001  (0.020101506945113062)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:52:24 -- STEP: 96/203 -- GLOBAL_STEP: 8825\u001b[0m\n",
      "     | > loss: 0.1826242208480835  (0.1876893943796555)\n",
      "     | > log_mle: -0.14870095252990723  (-0.13624887913465508)\n",
      "     | > loss_dur: 0.3313251733779907  (0.3239382735143104)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.7639, device='cuda:0')  (tensor(2.6336, device='cuda:0'))\n",
      "     | > current_lr: 1.075e-05 \n",
      "     | > step_time: 2.3662  (0.9536996980508169)\n",
      "     | > loader_time: 0.0201  (0.10452677061160408)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:52:51 -- STEP: 121/203 -- GLOBAL_STEP: 8850\u001b[0m\n",
      "     | > loss: 0.17549216747283936  (0.1867238605810591)\n",
      "     | > log_mle: -0.16398382186889648  (-0.13900971806738993)\n",
      "     | > loss_dur: 0.33947598934173584  (0.3257335786484489)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.2668, device='cuda:0')  (tensor(2.6787, device='cuda:0'))\n",
      "     | > current_lr: 1.075e-05 \n",
      "     | > step_time: 0.7229  (0.9371207489455042)\n",
      "     | > loader_time: 0.1676  (0.1292161586856054)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:53:24 -- STEP: 146/203 -- GLOBAL_STEP: 8875\u001b[0m\n",
      "     | > loss: 0.1844516396522522  (0.18505805159268315)\n",
      "     | > log_mle: -0.15285539627075195  (-0.14117868224235436)\n",
      "     | > loss_dur: 0.33730703592300415  (0.3262367338350373)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.3398, device='cuda:0')  (tensor(2.6844, device='cuda:0'))\n",
      "     | > current_lr: 1.075e-05 \n",
      "     | > step_time: 1.1106  (0.9554339337022337)\n",
      "     | > loader_time: 0.274  (0.15564963425675485)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:53:57 -- STEP: 171/203 -- GLOBAL_STEP: 8900\u001b[0m\n",
      "     | > loss: 0.17548763751983643  (0.18425948351447352)\n",
      "     | > log_mle: -0.1611309051513672  (-0.14304527901766598)\n",
      "     | > loss_dur: 0.3366185426712036  (0.32730476253213936)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.2700, device='cuda:0')  (tensor(2.6365, device='cuda:0'))\n",
      "     | > current_lr: 1.075e-05 \n",
      "     | > step_time: 1.5008  (0.9793986298187435)\n",
      "     | > loader_time: 0.3838  (0.15724627316346643)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:54:29 -- STEP: 196/203 -- GLOBAL_STEP: 8925\u001b[0m\n",
      "     | > loss: 0.17033985257148743  (0.18326992678398996)\n",
      "     | > log_mle: -0.1564345359802246  (-0.1448080223433826)\n",
      "     | > loss_dur: 0.32677438855171204  (0.32807794912737215)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(4.3125, device='cuda:0')  (tensor(2.8150, device='cuda:0'))\n",
      "     | > current_lr: 1.075e-05 \n",
      "     | > step_time: 0.6089  (0.9962661838044926)\n",
      "     | > loader_time: 0.0965  (0.1603522106092803)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.07781407237052917 \u001b[0m(-0.018683701753616333)\n",
      "     | > avg_loss:\u001b[92m 0.14889312535524368 \u001b[0m(-0.013246305286884308)\n",
      "     | > avg_log_mle:\u001b[92m -0.15776890516281128 \u001b[0m(-0.005303859710693359)\n",
      "     | > avg_loss_dur:\u001b[92m 0.30666203051805496 \u001b[0m(-0.007942445576190948)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_8932.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 44/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 19:54:59) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:55:18 -- STEP: 18/203 -- GLOBAL_STEP: 8950\u001b[0m\n",
      "     | > loss: 0.19604754447937012  (0.18344362907939488)\n",
      "     | > log_mle: -0.12141728401184082  (-0.1267468598153856)\n",
      "     | > loss_dur: 0.31746482849121094  (0.31019048889478046)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.8831, device='cuda:0')  (tensor(2.0622, device='cuda:0'))\n",
      "     | > current_lr: 1.1e-05 \n",
      "     | > step_time: 1.32  (0.8574569755130343)\n",
      "     | > loader_time: 0.086  (0.019739985466003418)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:55:39 -- STEP: 43/203 -- GLOBAL_STEP: 8975\u001b[0m\n",
      "     | > loss: 0.16438263654708862  (0.18173977316812026)\n",
      "     | > log_mle: -0.14716863632202148  (-0.1326839452566103)\n",
      "     | > loss_dur: 0.3115512728691101  (0.3144237184247305)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(4.6114, device='cuda:0')  (tensor(2.6370, device='cuda:0'))\n",
      "     | > current_lr: 1.1e-05 \n",
      "     | > step_time: 0.7204  (0.828895934792452)\n",
      "     | > loader_time: 0.0828  (0.01732326662817666)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:56:07 -- STEP: 68/203 -- GLOBAL_STEP: 9000\u001b[0m\n",
      "     | > loss: 0.1812375783920288  (0.1788950032171081)\n",
      "     | > log_mle: -0.1496216058731079  (-0.13785291945233064)\n",
      "     | > loss_dur: 0.3308591842651367  (0.31674792266943874)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.7240, device='cuda:0')  (tensor(2.7841, device='cuda:0'))\n",
      "     | > current_lr: 1.1e-05 \n",
      "     | > step_time: 1.208  (0.9243557908955742)\n",
      "     | > loader_time: 0.0096  (0.021334588527679436)\n",
      "\n",
      "\n",
      " > CHECKPOINT : ./models/train/run-June-12-2024_04+39PM-2c81749/checkpoint_9000.pth\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:56:46 -- STEP: 93/203 -- GLOBAL_STEP: 9025\u001b[0m\n",
      "     | > loss: 0.16438043117523193  (0.17725164371152077)\n",
      "     | > log_mle: -0.15514123439788818  (-0.14123547974453182)\n",
      "     | > loss_dur: 0.3195216655731201  (0.3184871234560525)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.1102, device='cuda:0')  (tensor(2.7804, device='cuda:0'))\n",
      "     | > current_lr: 1.1e-05 \n",
      "     | > step_time: 1.4114  (0.9149473405653431)\n",
      "     | > loader_time: 0.1703  (0.06262638748333019)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:57:16 -- STEP: 118/203 -- GLOBAL_STEP: 9050\u001b[0m\n",
      "     | > loss: 0.17927375435829163  (0.17573529380862996)\n",
      "     | > log_mle: -0.1526123285293579  (-0.1440582527952679)\n",
      "     | > loss_dur: 0.33188608288764954  (0.3197935466038978)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.9317, device='cuda:0')  (tensor(2.8900, device='cuda:0'))\n",
      "     | > current_lr: 1.1e-05 \n",
      "     | > step_time: 0.7785  (0.9196229566962032)\n",
      "     | > loader_time: 0.7106  (0.10639107025275796)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:57:47 -- STEP: 143/203 -- GLOBAL_STEP: 9075\u001b[0m\n",
      "     | > loss: 0.1808757781982422  (0.17390733457111812)\n",
      "     | > log_mle: -0.1549391746520996  (-0.1464417555949071)\n",
      "     | > loss_dur: 0.3358149528503418  (0.32034909016602514)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(5.8177, device='cuda:0')  (tensor(2.9392, device='cuda:0'))\n",
      "     | > current_lr: 1.1e-05 \n",
      "     | > step_time: 0.7892  (0.9331917879464743)\n",
      "     | > loader_time: 0.1955  (0.12787175345254112)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:58:20 -- STEP: 168/203 -- GLOBAL_STEP: 9100\u001b[0m\n",
      "     | > loss: 0.17368733882904053  (0.17323421456274532)\n",
      "     | > log_mle: -0.15610146522521973  (-0.14814410110314688)\n",
      "     | > loss_dur: 0.32978880405426025  (0.3213783156658921)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(10.8585, device='cuda:0')  (tensor(3.4209, device='cuda:0'))\n",
      "     | > current_lr: 1.1e-05 \n",
      "     | > step_time: 1.2123  (0.9587888604118711)\n",
      "     | > loader_time: 0.2111  (0.14067817301977245)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:58:56 -- STEP: 193/203 -- GLOBAL_STEP: 9125\u001b[0m\n",
      "     | > loss: 0.16452693939208984  (0.1725227340206581)\n",
      "     | > log_mle: -0.16063761711120605  (-0.1497924605799462)\n",
      "     | > loss_dur: 0.3251645565032959  (0.32231519460060426)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.1461, device='cuda:0')  (tensor(3.8103, device='cuda:0'))\n",
      "     | > current_lr: 1.1e-05 \n",
      "     | > step_time: 1.1125  (0.9926041123780562)\n",
      "     | > loader_time: 0.1716  (0.14763473105554126)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.018336236476898193 \u001b[0m(-0.05947783589363098)\n",
      "     | > avg_loss:\u001b[92m 0.13530920445919037 \u001b[0m(-0.013583920896053314)\n",
      "     | > avg_log_mle:\u001b[92m -0.16543149948120117 \u001b[0m(-0.007662594318389893)\n",
      "     | > avg_loss_dur:\u001b[92m 0.30074070394039154 \u001b[0m(-0.005921326577663422)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_9135.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 45/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 19:59:29) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 19:59:40 -- STEP: 15/203 -- GLOBAL_STEP: 9150\u001b[0m\n",
      "     | > loss: 0.17517974972724915  (0.17004929979642233)\n",
      "     | > log_mle: -0.13338065147399902  (-0.13181114991505943)\n",
      "     | > loss_dur: 0.30856040120124817  (0.30186044971148174)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.9436, device='cuda:0')  (tensor(1.9670, device='cuda:0'))\n",
      "     | > current_lr: 1.125e-05 \n",
      "     | > step_time: 0.5029  (0.5980157057444254)\n",
      "     | > loader_time: 0.079  (0.00929555892944336)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:00:03 -- STEP: 40/203 -- GLOBAL_STEP: 9175\u001b[0m\n",
      "     | > loss: 0.16755962371826172  (0.17207507565617558)\n",
      "     | > log_mle: -0.14117372035980225  (-0.13647855520248414)\n",
      "     | > loss_dur: 0.30873334407806396  (0.3085536308586598)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(5.1638, device='cuda:0')  (tensor(2.5417, device='cuda:0'))\n",
      "     | > current_lr: 1.125e-05 \n",
      "     | > step_time: 0.4276  (0.7638607561588288)\n",
      "     | > loader_time: 0.0597  (0.017997193336486816)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:00:27 -- STEP: 65/203 -- GLOBAL_STEP: 9200\u001b[0m\n",
      "     | > loss: 0.15263846516609192  (0.1687392541995415)\n",
      "     | > log_mle: -0.1534111499786377  (-0.14200919591463526)\n",
      "     | > loss_dur: 0.3060496151447296  (0.31074845011417684)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.5095, device='cuda:0')  (tensor(3.4755, device='cuda:0'))\n",
      "     | > current_lr: 1.125e-05 \n",
      "     | > step_time: 1.2131  (0.8402437686920166)\n",
      "     | > loader_time: 0.059  (0.019860854515662562)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:01:01 -- STEP: 90/203 -- GLOBAL_STEP: 9225\u001b[0m\n",
      "     | > loss: 0.1690709888935089  (0.16728712519009908)\n",
      "     | > log_mle: -0.15647339820861816  (-0.14549413257175015)\n",
      "     | > loss_dur: 0.3255443871021271  (0.3127812577618493)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(5.9177, device='cuda:0')  (tensor(3.7760, device='cuda:0'))\n",
      "     | > current_lr: 1.125e-05 \n",
      "     | > step_time: 1.5038  (0.9077804856830173)\n",
      "     | > loader_time: 1.1047  (0.08106088373396132)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:01:33 -- STEP: 115/203 -- GLOBAL_STEP: 9250\u001b[0m\n",
      "     | > loss: 0.1734485924243927  (0.16611308714617853)\n",
      "     | > log_mle: -0.1620270013809204  (-0.1484343642773835)\n",
      "     | > loss_dur: 0.3354755938053131  (0.3145474514235621)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.5987, device='cuda:0')  (tensor(4.0151, device='cuda:0'))\n",
      "     | > current_lr: 1.125e-05 \n",
      "     | > step_time: 0.9846  (0.9365686997123387)\n",
      "     | > loader_time: 0.1208  (0.11638557185297427)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:02:04 -- STEP: 140/203 -- GLOBAL_STEP: 9275\u001b[0m\n",
      "     | > loss: 0.14253303408622742  (0.1643696712596076)\n",
      "     | > log_mle: -0.17030131816864014  (-0.15090568406241273)\n",
      "     | > loss_dur: 0.31283435225486755  (0.31527535532202045)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.1929, device='cuda:0')  (tensor(3.7975, device='cuda:0'))\n",
      "     | > current_lr: 1.125e-05 \n",
      "     | > step_time: 0.9522  (0.9524160436221532)\n",
      "     | > loader_time: 0.1452  (0.13431721585137504)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:02:35 -- STEP: 165/203 -- GLOBAL_STEP: 9300\u001b[0m\n",
      "     | > loss: 0.16647234559059143  (0.16351360732858836)\n",
      "     | > log_mle: -0.16655921936035156  (-0.1526386333234382)\n",
      "     | > loss_dur: 0.333031564950943  (0.3161522406520266)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(6.4791, device='cuda:0')  (tensor(4.0035, device='cuda:0'))\n",
      "     | > current_lr: 1.125e-05 \n",
      "     | > step_time: 0.8942  (0.9665164788564046)\n",
      "     | > loader_time: 0.1828  (0.14322430292765298)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:03:09 -- STEP: 190/203 -- GLOBAL_STEP: 9325\u001b[0m\n",
      "     | > loss: 0.16122156381607056  (0.16288234826765569)\n",
      "     | > log_mle: -0.16171038150787354  (-0.15431469427911862)\n",
      "     | > loss_dur: 0.3229319453239441  (0.31719704254677406)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(8.1717, device='cuda:0')  (tensor(4.5700, device='cuda:0'))\n",
      "     | > current_lr: 1.125e-05 \n",
      "     | > step_time: 0.9646  (0.9902996176167538)\n",
      "     | > loader_time: 0.2756  (0.15155657467089206)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.025400549173355103 \u001b[0m(+0.007064312696456909)\n",
      "     | > avg_loss:\u001b[92m 0.12709865346550941 \u001b[0m(-0.008210550993680954)\n",
      "     | > avg_log_mle:\u001b[92m -0.17052370309829712 \u001b[0m(-0.005092203617095947)\n",
      "     | > avg_loss_dur:\u001b[92m 0.29762235656380653 \u001b[0m(-0.0031183473765850067)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_9338.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 46/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 20:03:49) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:03:59 -- STEP: 12/203 -- GLOBAL_STEP: 9350\u001b[0m\n",
      "     | > loss: 0.16446813941001892  (0.15828010439872742)\n",
      "     | > log_mle: -0.13742387294769287  (-0.13574904203414917)\n",
      "     | > loss_dur: 0.3018920123577118  (0.29402914643287664)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(6.0501, device='cuda:0')  (tensor(3.7130, device='cuda:0'))\n",
      "     | > current_lr: 1.15e-05 \n",
      "     | > step_time: 0.2227  (0.4606999357541402)\n",
      "     | > loader_time: 0.004  (0.004336992899576823)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:04:21 -- STEP: 37/203 -- GLOBAL_STEP: 9375\u001b[0m\n",
      "     | > loss: 0.1771135926246643  (0.16259889022724047)\n",
      "     | > log_mle: -0.1592087745666504  (-0.14049145337697622)\n",
      "     | > loss_dur: 0.3363223671913147  (0.3030903436042167)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(7.6915, device='cuda:0')  (tensor(4.1076, device='cuda:0'))\n",
      "     | > current_lr: 1.15e-05 \n",
      "     | > step_time: 0.8034  (0.7135878382502375)\n",
      "     | > loader_time: 0.0079  (0.012391580117715371)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:04:42 -- STEP: 62/203 -- GLOBAL_STEP: 9400\u001b[0m\n",
      "     | > loss: 0.13994821906089783  (0.15948365724855854)\n",
      "     | > log_mle: -0.1645185947418213  (-0.1460647006188669)\n",
      "     | > loss_dur: 0.3044668138027191  (0.3055483578674255)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.5826, device='cuda:0')  (tensor(4.2822, device='cuda:0'))\n",
      "     | > current_lr: 1.15e-05 \n",
      "     | > step_time: 0.5119  (0.7502053360785207)\n",
      "     | > loader_time: 0.0807  (0.022127947499675137)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:05:14 -- STEP: 87/203 -- GLOBAL_STEP: 9425\u001b[0m\n",
      "     | > loss: 0.1638968288898468  (0.15846838827790885)\n",
      "     | > log_mle: -0.15686702728271484  (-0.14955303038673834)\n",
      "     | > loss_dur: 0.32076385617256165  (0.3080214186646472)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.4716, device='cuda:0')  (tensor(4.1646, device='cuda:0'))\n",
      "     | > current_lr: 1.15e-05 \n",
      "     | > step_time: 0.3793  (0.8248718141139239)\n",
      "     | > loader_time: 1.2163  (0.08954953325205836)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:05:53 -- STEP: 112/203 -- GLOBAL_STEP: 9450\u001b[0m\n",
      "     | > loss: 0.16392654180526733  (0.15714778405215063)\n",
      "     | > log_mle: -0.16136562824249268  (-0.1525838545390537)\n",
      "     | > loss_dur: 0.32529217004776  (0.3097316385912045)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(4.4772, device='cuda:0')  (tensor(4.1553, device='cuda:0'))\n",
      "     | > current_lr: 1.15e-05 \n",
      "     | > step_time: 2.2276  (0.908540387238775)\n",
      "     | > loader_time: 0.2864  (0.1523448526859283)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:06:22 -- STEP: 137/203 -- GLOBAL_STEP: 9475\u001b[0m\n",
      "     | > loss: 0.15641039609909058  (0.15593693378197884)\n",
      "     | > log_mle: -0.16269898414611816  (-0.15505519748604205)\n",
      "     | > loss_dur: 0.31910938024520874  (0.31099213126802105)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(7.3839, device='cuda:0')  (tensor(4.3917, device='cuda:0'))\n",
      "     | > current_lr: 1.15e-05 \n",
      "     | > step_time: 1.1016  (0.918057988159848)\n",
      "     | > loader_time: 0.2944  (0.1586447440794784)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:06:55 -- STEP: 162/203 -- GLOBAL_STEP: 9500\u001b[0m\n",
      "     | > loss: 0.1463584303855896  (0.1551296760638555)\n",
      "     | > log_mle: -0.16186535358428955  (-0.15673842695024273)\n",
      "     | > loss_dur: 0.30822378396987915  (0.31186810301409834)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(7.7773, device='cuda:0')  (tensor(4.7866, device='cuda:0'))\n",
      "     | > current_lr: 1.15e-05 \n",
      "     | > step_time: 2.3112  (0.9464286801255779)\n",
      "     | > loader_time: 0.5976  (0.16932632010660048)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:07:33 -- STEP: 187/203 -- GLOBAL_STEP: 9525\u001b[0m\n",
      "     | > loss: 0.13788408041000366  (0.15439532896414163)\n",
      "     | > log_mle: -0.17538082599639893  (-0.1585030173235398)\n",
      "     | > loss_dur: 0.3132649064064026  (0.3128983462876816)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(5.3451, device='cuda:0')  (tensor(5.0293, device='cuda:0'))\n",
      "     | > current_lr: 1.15e-05 \n",
      "     | > step_time: 1.4098  (0.9926916022989202)\n",
      "     | > loader_time: 0.1245  (0.17368742998908532)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.003431081771850586 \u001b[0m(-0.021969467401504517)\n",
      "     | > avg_loss:\u001b[92m 0.11911841481924057 \u001b[0m(-0.007980238646268845)\n",
      "     | > avg_log_mle:\u001b[92m -0.17481562495231628 \u001b[0m(-0.004291921854019165)\n",
      "     | > avg_loss_dur:\u001b[92m 0.29393403977155685 \u001b[0m(-0.0036883167922496796)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_9541.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 47/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 20:08:15) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:08:26 -- STEP: 9/203 -- GLOBAL_STEP: 9550\u001b[0m\n",
      "     | > loss: 0.1513541340827942  (0.15102122889624703)\n",
      "     | > log_mle: -0.13223743438720703  (-0.14140053590138754)\n",
      "     | > loss_dur: 0.2835915684700012  (0.29242176479763454)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(1.3658, device='cuda:0')  (tensor(1.7510, device='cuda:0'))\n",
      "     | > current_lr: 1.1750000000000001e-05 \n",
      "     | > step_time: 0.3773  (0.7074390782250298)\n",
      "     | > loader_time: 0.004  (0.026126305262247723)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:08:44 -- STEP: 34/203 -- GLOBAL_STEP: 9575\u001b[0m\n",
      "     | > loss: 0.1524934470653534  (0.15355846636435566)\n",
      "     | > log_mle: -0.15344643592834473  (-0.14437272969414205)\n",
      "     | > loss_dur: 0.3059398829936981  (0.29793119605849766)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.5433, device='cuda:0')  (tensor(2.0028, device='cuda:0'))\n",
      "     | > current_lr: 1.1750000000000001e-05 \n",
      "     | > step_time: 0.63  (0.7032970400417552)\n",
      "     | > loader_time: 0.0114  (0.013691250015707576)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:09:05 -- STEP: 59/203 -- GLOBAL_STEP: 9600\u001b[0m\n",
      "     | > loss: 0.1463569700717926  (0.15022972226142883)\n",
      "     | > log_mle: -0.16527044773101807  (-0.15030201410843155)\n",
      "     | > loss_dur: 0.31162741780281067  (0.3005317363698603)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.1851, device='cuda:0')  (tensor(2.2140, device='cuda:0'))\n",
      "     | > current_lr: 1.1750000000000001e-05 \n",
      "     | > step_time: 0.7041  (0.7622219465546689)\n",
      "     | > loader_time: 0.0074  (0.01332007828405348)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:09:31 -- STEP: 84/203 -- GLOBAL_STEP: 9625\u001b[0m\n",
      "     | > loss: 0.15174290537834167  (0.14939184415908088)\n",
      "     | > log_mle: -0.15704262256622314  (-0.15392426082066132)\n",
      "     | > loss_dur: 0.3087855279445648  (0.3033161049797421)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(4.8636, device='cuda:0')  (tensor(2.7874, device='cuda:0'))\n",
      "     | > current_lr: 1.1750000000000001e-05 \n",
      "     | > step_time: 1.4005  (0.8135260485467457)\n",
      "     | > loader_time: 0.5998  (0.04289309751419794)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:10:00 -- STEP: 109/203 -- GLOBAL_STEP: 9650\u001b[0m\n",
      "     | > loss: 0.14393305778503418  (0.14794618082702707)\n",
      "     | > log_mle: -0.16731321811676025  (-0.15666318486589909)\n",
      "     | > loss_dur: 0.31124627590179443  (0.3046093656929262)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(11.0760, device='cuda:0')  (tensor(3.8992, device='cuda:0'))\n",
      "     | > current_lr: 1.1750000000000001e-05 \n",
      "     | > step_time: 0.9805  (0.8395754919139617)\n",
      "     | > loader_time: 0.1115  (0.08183206768210875)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:10:30 -- STEP: 134/203 -- GLOBAL_STEP: 9675\u001b[0m\n",
      "     | > loss: 0.14036816358566284  (0.14686917480248124)\n",
      "     | > log_mle: -0.17449581623077393  (-0.1591271882626548)\n",
      "     | > loss_dur: 0.31486397981643677  (0.30599636306513606)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(5.3357, device='cuda:0')  (tensor(4.3656, device='cuda:0'))\n",
      "     | > current_lr: 1.1750000000000001e-05 \n",
      "     | > step_time: 1.0184  (0.8610391118633214)\n",
      "     | > loader_time: 0.3122  (0.10892581228000014)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:11:00 -- STEP: 159/203 -- GLOBAL_STEP: 9700\u001b[0m\n",
      "     | > loss: 0.14554080367088318  (0.14580343616833474)\n",
      "     | > log_mle: -0.17219960689544678  (-0.16110676489536124)\n",
      "     | > loss_dur: 0.31774041056632996  (0.30691020106369593)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(5.8692, device='cuda:0')  (tensor(4.3221, device='cuda:0'))\n",
      "     | > current_lr: 1.1750000000000001e-05 \n",
      "     | > step_time: 0.803  (0.8798385851038327)\n",
      "     | > loader_time: 0.2791  (0.12924764141346673)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:11:34 -- STEP: 184/203 -- GLOBAL_STEP: 9725\u001b[0m\n",
      "     | > loss: 0.13987767696380615  (0.14498944872099417)\n",
      "     | > log_mle: -0.17306911945343018  (-0.16284893841847128)\n",
      "     | > loss_dur: 0.31294679641723633  (0.30783838713946543)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(13.0422, device='cuda:0')  (tensor(4.5453, device='cuda:0'))\n",
      "     | > current_lr: 1.1750000000000001e-05 \n",
      "     | > step_time: 1.6999  (0.9169261144555133)\n",
      "     | > loader_time: 0.2657  (0.13743185608283334)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.05630508065223694 \u001b[0m(+0.05287399888038635)\n",
      "     | > avg_loss:\u001b[92m 0.11021110787987709 \u001b[0m(-0.00890730693936348)\n",
      "     | > avg_log_mle:\u001b[92m -0.17974065244197845 \u001b[0m(-0.00492502748966217)\n",
      "     | > avg_loss_dur:\u001b[92m 0.28995176032185555 \u001b[0m(-0.003982279449701309)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_9744.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 48/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 20:12:22) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:12:29 -- STEP: 6/203 -- GLOBAL_STEP: 9750\u001b[0m\n",
      "     | > loss: 0.12651607394218445  (0.13668390611807504)\n",
      "     | > log_mle: -0.1531815528869629  (-0.14495543638865152)\n",
      "     | > loss_dur: 0.27969762682914734  (0.28163934250672656)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(4.9350, device='cuda:0')  (tensor(3.2383, device='cuda:0'))\n",
      "     | > current_lr: 1.2e-05 \n",
      "     | > step_time: 0.2671  (0.7156439224878947)\n",
      "     | > loader_time: 0.0039  (0.0038437445958455405)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:12:45 -- STEP: 31/203 -- GLOBAL_STEP: 9775\u001b[0m\n",
      "     | > loss: 0.15343829989433289  (0.1469586962653745)\n",
      "     | > log_mle: -0.1490572690963745  (-0.14734566596246534)\n",
      "     | > loss_dur: 0.3024955689907074  (0.2943043622278398)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.8978, device='cuda:0')  (tensor(3.1201, device='cuda:0'))\n",
      "     | > current_lr: 1.2e-05 \n",
      "     | > step_time: 0.6124  (0.6559185981750488)\n",
      "     | > loader_time: 0.0083  (0.015674983301470356)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:13:08 -- STEP: 56/203 -- GLOBAL_STEP: 9800\u001b[0m\n",
      "     | > loss: 0.13940060138702393  (0.1423430394913469)\n",
      "     | > log_mle: -0.1694011688232422  (-0.15361186223370688)\n",
      "     | > loss_dur: 0.3088017702102661  (0.29595490172505384)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(5.9494, device='cuda:0')  (tensor(3.1306, device='cuda:0'))\n",
      "     | > current_lr: 1.2e-05 \n",
      "     | > step_time: 0.7773  (0.7488654851913452)\n",
      "     | > loader_time: 0.0062  (0.020015052386692593)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:13:40 -- STEP: 81/203 -- GLOBAL_STEP: 9825\u001b[0m\n",
      "     | > loss: 0.1432788372039795  (0.14179332499150876)\n",
      "     | > log_mle: -0.17870497703552246  (-0.15751793207945647)\n",
      "     | > loss_dur: 0.32198381423950195  (0.2993112570709653)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(11.1815, device='cuda:0')  (tensor(4.6718, device='cuda:0'))\n",
      "     | > current_lr: 1.2e-05 \n",
      "     | > step_time: 1.2877  (0.8705246007000959)\n",
      "     | > loader_time: 0.2037  (0.05657832122143404)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:14:15 -- STEP: 106/203 -- GLOBAL_STEP: 9850\u001b[0m\n",
      "     | > loss: 0.13378465175628662  (0.14044043421745298)\n",
      "     | > log_mle: -0.1754244565963745  (-0.1604074003561487)\n",
      "     | > loss_dur: 0.30920910835266113  (0.30084783457360176)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(6.7470, device='cuda:0')  (tensor(5.1040, device='cuda:0'))\n",
      "     | > current_lr: 1.2e-05 \n",
      "     | > step_time: 0.8978  (0.9182071415883191)\n",
      "     | > loader_time: 0.4021  (0.12259265161910148)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:14:56 -- STEP: 131/203 -- GLOBAL_STEP: 9875\u001b[0m\n",
      "     | > loss: 0.1378917396068573  (0.1392221921727857)\n",
      "     | > log_mle: -0.169671893119812  (-0.16306475646623214)\n",
      "     | > loss_dur: 0.3075636327266693  (0.30228694863901795)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(10.1547, device='cuda:0')  (tensor(5.5354, device='cuda:0'))\n",
      "     | > current_lr: 1.2e-05 \n",
      "     | > step_time: 0.903  (0.9664091972904351)\n",
      "     | > loader_time: 0.2776  (0.18189127572620187)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:15:31 -- STEP: 156/203 -- GLOBAL_STEP: 9900\u001b[0m\n",
      "     | > loss: 0.1380448341369629  (0.13810838682529247)\n",
      "     | > log_mle: -0.17216575145721436  (-0.1650645121549948)\n",
      "     | > loss_dur: 0.31021058559417725  (0.30317289898028754)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(5.4895, device='cuda:0')  (tensor(5.5981, device='cuda:0'))\n",
      "     | > current_lr: 1.2e-05 \n",
      "     | > step_time: 1.0003  (0.9801309047601162)\n",
      "     | > loader_time: 0.1156  (0.20826231363492134)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:16:07 -- STEP: 181/203 -- GLOBAL_STEP: 9925\u001b[0m\n",
      "     | > loss: 0.12640297412872314  (0.13736065217803192)\n",
      "     | > log_mle: -0.18522632122039795  (-0.16681709934993336)\n",
      "     | > loss_dur: 0.3116292953491211  (0.3041777515279656)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(12.1218, device='cuda:0')  (tensor(5.5005, device='cuda:0'))\n",
      "     | > current_lr: 1.2e-05 \n",
      "     | > step_time: 1.1079  (1.0074753036815156)\n",
      "     | > loader_time: 0.4115  (0.2149391516796133)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[91m 0.08672553300857544 \u001b[0m(+0.0304204523563385)\n",
      "     | > avg_loss:\u001b[92m 0.10379725694656372 \u001b[0m(-0.00641385093331337)\n",
      "     | > avg_log_mle:\u001b[92m -0.18252012133598328 \u001b[0m(-0.0027794688940048218)\n",
      "     | > avg_loss_dur:\u001b[92m 0.286317378282547 \u001b[0m(-0.003634382039308548)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_9947.pth\n",
      "\n",
      "\u001b[4m\u001b[1m > EPOCH: 49/50\u001b[0m\n",
      " --> ./models/train/run-June-12-2024_04+39PM-2c81749\n",
      "\n",
      "\u001b[1m > TRAINING (2024-06-12 20:16:57) \u001b[0m\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:17:03 -- STEP: 3/203 -- GLOBAL_STEP: 9950\u001b[0m\n",
      "     | > loss: 0.1230449378490448  (0.12861527999242148)\n",
      "     | > log_mle: -0.1440495252609253  (-0.14531183242797852)\n",
      "     | > loss_dur: 0.2670944631099701  (0.27392711242039997)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.5495, device='cuda:0')  (tensor(2.4733, device='cuda:0'))\n",
      "     | > current_lr: 1.225e-05 \n",
      "     | > step_time: 0.4178  (0.817763090133667)\n",
      "     | > loader_time: 0.0044  (0.02703714370727539)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:17:19 -- STEP: 28/203 -- GLOBAL_STEP: 9975\u001b[0m\n",
      "     | > loss: 0.13709300756454468  (0.13595946878194806)\n",
      "     | > log_mle: -0.15422868728637695  (-0.15150595137051173)\n",
      "     | > loss_dur: 0.29132169485092163  (0.28746542015245974)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.1658, device='cuda:0')  (tensor(3.3297, device='cuda:0'))\n",
      "     | > current_lr: 1.225e-05 \n",
      "     | > step_time: 0.7083  (0.6440329466547284)\n",
      "     | > loader_time: 0.0072  (0.00800928899220058)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:17:40 -- STEP: 53/203 -- GLOBAL_STEP: 10000\u001b[0m\n",
      "     | > loss: 0.11264663934707642  (0.13413458219114338)\n",
      "     | > log_mle: -0.1635662317276001  (-0.15733020260648908)\n",
      "     | > loss_dur: 0.2762128710746765  (0.29146478479763255)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(3.6420, device='cuda:0')  (tensor(3.2861, device='cuda:0'))\n",
      "     | > current_lr: 1.225e-05 \n",
      "     | > step_time: 0.7983  (0.7350112312244919)\n",
      "     | > loader_time: 0.0064  (0.01665334881476636)\n",
      "\n",
      "\n",
      " > CHECKPOINT : ./models/train/run-June-12-2024_04+39PM-2c81749/checkpoint_10000.pth\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:18:11 -- STEP: 78/203 -- GLOBAL_STEP: 10025\u001b[0m\n",
      "     | > loss: 0.13338664174079895  (0.1331231651397852)\n",
      "     | > log_mle: -0.17334198951721191  (-0.16175885995229086)\n",
      "     | > loss_dur: 0.30672863125801086  (0.294882025092076)\n",
      "     | > amp_scaler: 32768.0  (32768.0)\n",
      "     | > grad_norm: tensor(2.1197, device='cuda:0')  (tensor(3.3999, device='cuda:0'))\n",
      "     | > current_lr: 1.225e-05 \n",
      "     | > step_time: 1.2895  (0.7699895057922753)\n",
      "     | > loader_time: 0.6022  (0.02595383693010379)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:18:41 -- STEP: 103/203 -- GLOBAL_STEP: 10050\u001b[0m\n",
      "     | > loss: 0.12520816922187805  (0.13184577400244565)\n",
      "     | > log_mle: -0.17015492916107178  (-0.1647645674862908)\n",
      "     | > loss_dur: 0.29536309838294983  (0.2966103414887364)\n",
      "     | > amp_scaler: 65536.0  (39130.71844660192)\n",
      "     | > grad_norm: tensor(7.0763, device='cuda:0')  (tensor(3.8940, device='cuda:0'))\n",
      "     | > current_lr: 1.225e-05 \n",
      "     | > step_time: 1.2489  (0.820669384835993)\n",
      "     | > loader_time: 0.2227  (0.07417480228016678)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:19:09 -- STEP: 128/203 -- GLOBAL_STEP: 10075\u001b[0m\n",
      "     | > loss: 0.11444064974784851  (0.13048919476568704)\n",
      "     | > log_mle: -0.17977464199066162  (-0.16757502872496846)\n",
      "     | > loss_dur: 0.29421529173851013  (0.29806422349065553)\n",
      "     | > amp_scaler: 65536.0  (44287.99999999998)\n",
      "     | > grad_norm: tensor(2.9317, device='cuda:0')  (tensor(4.0818, device='cuda:0'))\n",
      "     | > current_lr: 1.225e-05 \n",
      "     | > step_time: 1.4843  (0.83744528144598)\n",
      "     | > loader_time: 0.1228  (0.10018716380000114)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:19:42 -- STEP: 153/203 -- GLOBAL_STEP: 10100\u001b[0m\n",
      "     | > loss: 0.14346668124198914  (0.12962260335878614)\n",
      "     | > log_mle: -0.1768890619277954  (-0.16956711127087956)\n",
      "     | > loss_dur: 0.32035574316978455  (0.2991897146296658)\n",
      "     | > amp_scaler: 32768.0  (46689.045751633974)\n",
      "     | > grad_norm: tensor(6.8465, device='cuda:0')  (tensor(4.2008, device='cuda:0'))\n",
      "     | > current_lr: 1.225e-05 \n",
      "     | > step_time: 1.5882  (0.8743563695670733)\n",
      "     | > loader_time: 0.3972  (0.12504341088089288)\n",
      "\n",
      "\n",
      "\u001b[1m   --> TIME: 2024-06-12 20:20:15 -- STEP: 178/203 -- GLOBAL_STEP: 10125\u001b[0m\n",
      "     | > loss: 0.12679976224899292  (0.12884104034204166)\n",
      "     | > log_mle: -0.17999112606048584  (-0.17119837476966088)\n",
      "     | > loss_dur: 0.30679088830947876  (0.30003941511170246)\n",
      "     | > amp_scaler: 32768.0  (44733.84269662921)\n",
      "     | > grad_norm: tensor(3.4476, device='cuda:0')  (tensor(4.1976, device='cuda:0'))\n",
      "     | > current_lr: 1.225e-05 \n",
      "     | > step_time: 1.6194  (0.9053338484817677)\n",
      "     | > loader_time: 0.1152  (0.13525530059685867)\n",
      "\n",
      "\n",
      "\u001b[1m > EVALUATION \u001b[0m\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " | > Synthesizing test sentences.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "  \u001b[1m--> EVAL PERFORMANCE\u001b[0m\n",
      "     | > avg_loader_time:\u001b[92m 0.051516443490982056 \u001b[0m(-0.035209089517593384)\n",
      "     | > avg_loss:\u001b[92m 0.0960361585021019 \u001b[0m(-0.0077610984444618225)\n",
      "     | > avg_log_mle:\u001b[92m -0.18774428963661194 \u001b[0m(-0.005224168300628662)\n",
      "     | > avg_loss_dur:\u001b[92m 0.28378044813871384 \u001b[0m(-0.0025369301438331604)\n",
      "\n",
      " > BEST MODEL : ./models/train/run-June-12-2024_04+39PM-2c81749/best_model_10150.pth\n"
     ]
    }
   ],
   "source": [
    "trainer.fit()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(['models/train/run-June-12-2024_03+46PM-2c81749/best_model.pth',\n",
       "  'models/train/run-June-12-2024_03+46PM-2c81749/best_model_3654.pth',\n",
       "  'models/train/run-June-12-2024_03+46PM-2c81749/checkpoint_1000.pth',\n",
       "  'models/train/run-June-12-2024_03+46PM-2c81749/checkpoint_2000.pth',\n",
       "  'models/train/run-June-12-2024_03+46PM-2c81749/checkpoint_3000.pth',\n",
       "  'models/train/run-June-12-2024_04+39PM-2c81749/best_model.pth',\n",
       "  'models/train/run-June-12-2024_04+39PM-2c81749/best_model_10150.pth',\n",
       "  'models/train/run-June-12-2024_04+39PM-2c81749/checkpoint_10000.pth',\n",
       "  'models/train/run-June-12-2024_04+39PM-2c81749/checkpoint_6000.pth',\n",
       "  'models/train/run-June-12-2024_04+39PM-2c81749/checkpoint_7000.pth',\n",
       "  'models/train/run-June-12-2024_04+39PM-2c81749/checkpoint_8000.pth',\n",
       "  'models/train/run-June-12-2024_04+39PM-2c81749/checkpoint_9000.pth'],\n",
       " 'models/train/run-June-12-2024_03+46PM-2c81749/best_model.pth',\n",
       " 'models/train/run-June-12-2024_04+39PM-2c81749/config.json')"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import glob\n",
    "\n",
    "ckpts = sorted([f for f in glob.glob('models/train/*/*.pth')])\n",
    "configs = sorted([f for f in glob.glob('models/train/*/*.json')])\n",
    "\n",
    "test_ckpt = ckpts[0]\n",
    "test_config = configs[-1]\n",
    "\n",
    "ckpts, test_ckpt, test_config"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " > Using model: glow_tts\n",
      " > Setting up Audio Processor...\n",
      " | > sample_rate:22050\n",
      " | > resample:False\n",
      " | > num_mels:80\n",
      " | > log_func:np.log10\n",
      " | > min_level_db:-100\n",
      " | > frame_shift_ms:None\n",
      " | > frame_length_ms:None\n",
      " | > ref_level_db:20\n",
      " | > fft_size:1024\n",
      " | > power:1.5\n",
      " | > preemphasis:0.0\n",
      " | > griffin_lim_iters:60\n",
      " | > signal_norm:True\n",
      " | > symmetric_norm:True\n",
      " | > mel_fmin:0\n",
      " | > mel_fmax:None\n",
      " | > pitch_fmin:1.0\n",
      " | > pitch_fmax:640.0\n",
      " | > spec_gain:20.0\n",
      " | > stft_pad_mode:reflect\n",
      " | > max_norm:4.0\n",
      " | > clip_norm:True\n",
      " | > do_trim_silence:True\n",
      " | > trim_db:45\n",
      " | > do_sound_norm:False\n",
      " | > do_amp_to_db_linear:True\n",
      " | > do_amp_to_db_mel:True\n",
      " | > do_rms_norm:False\n",
      " | > db_level:None\n",
      " | > stats_path:None\n",
      " | > base:10\n",
      " | > hop_length:256\n",
      " | > win_length:1024\n",
      " > Text: It must be remembered, however, that most modern printing is done by machinery on soft paper\n",
      " > Text splitted to sentences.\n",
      "['It must be remembered, however, that most modern printing is done by machinery on soft paper']\n",
      " > Processing time: 0.9726309776306152\n",
      " > Real-time factor: 0.3489734616108283\n",
      " > Saving output to out3.wav\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "\n",
       "                <audio  controls=\"controls\" >\n",
       "                    <source src=\"data:audio/x-wav;base64,UklGRkTgAQBXQVZFZm10IBAAAAABAAEAIlYAAESsAAACABAAZGF0YSDgAQDX0i3Q1dIn2bDcd9sG2UbaGOGN6uLuDvDO8xn6xAJfDIYRFBdbH7YnsTCVNyw8DUDCQoxDlD8iN3IrQyAAGNcRvAua/7rw2+Ou2CfSCslqvom0GasFpriic6HSo9mnra37tA27HMFPymXWX+Ts8FL4EP+ZB5AT4SKGMGc8IkeVUOFYjV+eY8xlkGjbaQhqLmfUX4lYdlNZUJ5N7UfJPhs12yqbICAXhhAqDVENcQxeCDoCUftO9xj1JfN/8HPtKOqI5vbieN+g3N3br9qn2LnVa9EQzt/MF8uPyuLJM8hKyM/GZsWjxfXFJcchyvDKOs0S0CPSWNY+2rre++Ox5wLqLOwD7mLw//Jn9Yf3Ufpb/Cn9nPuT9tfwxu3X7UDxDvVU90f7hgBvBrULwA3DDScP+hEZF2QcUSAPJZcpcC7DMoY0OTXeNP0ykS+pKzcp1imMLGQu4y0CKgslNyDDGlMWyRKOD3kOtg2UDCMMyAueC8kL5wlsCDgJbApLDUMP3A4nD5kPhA8WEYwQwg20CnIHzAY8CaMK7QqNCTMG9QRsBPICHwE+/UH5v/hE+Qf6KvcS7yzlNNy+1WHSU9DuzcfJ28QuwYW+br6VvE67UrzovIS+mr98wInFD8z+0V3au+Hg6q30K/oQ/ysCpAVuC9oOrhGeE8wUxBiBHPcdjx7THdccghstGL0UjhPGFLAWehc/FsEWeBihGWkb1xsFHY8goSRIKdYuWDSBPOlE8EolT/VMb0jGQv058zGPKbsfrxVRCiP/K/Vl62jhFtb3yyLFQsGQvT+5x7SWshy0zbfyujS79LsHvlvCEMiqzcnT/tpM4v/nhO3z82762P6GAggHKAvWDswP4g4JEPoSIxfOGiYdqx5dH/8g5CNPJcMlySOjIN8fGR/XHUkbYxeqE2QPkglIA7z7r/PN6zflwOEl3nXax9bn0/rU/9hA3QjizOQa5U3ozu/H+ksG4g5sFB0bwCIhKuAx7DcPPUVAREFFQgZD+UNyRMhDckIWPmY13ipsIVEa+BPdCr8BGPrx8X/q9OJZ3XTaSNbC0AHLPcZYxIfEvsbDyKbKxMwzzg/Sftgp3gXjKOcN6Szs4fBg9Fv4Jv52AygInwvlClcHiwTAAsQByAAu/7b+dv89/wD+//2T/54Awf9V/Yv74v2fAK4C3QaQClQMhg0fDOAKjw1rDiUNNgrTBOb/8Ps7+Qb5vPgN9Mbr2eNN3mjam9Wdz0XN2s4m0YLSKNQl1z3dveb/7Kzzw/tdBHIQtB6bLTo+Z02wVQZZMluDX95mAG2AbaRnE157VehSpVbCV/ZSN0dNN8UqCSTFH7gZxREPBsX4VvFK7j7wqfET7PXjhNpT1NnTi9M80Y3MxciFy0/OE82oz2bUDdvY4Lffut3D3Yvf3OLB5k/okOZy5EniWt4N2iDWUtJDz4zMhsVHvu+6Qbibs9muMqrrqbatcbKquWTBK8kU0tPbhOI36ELu4vQm/64NIhxXKmkybzUgO0FBKEr+UUhVsFkpXiVeHV2/WYBVz1PzUbxTH1XPUPtGljmvLIwmNCNVHP4Ung9xC9QIqgSa/dL2IO865wLglN2d273atd0W4+DpX+rY4ibaG9iF3wbqmu868j7zWfiu/nMBfALuAhsIcwx0DTUKtwI6AQAGFg/lFYcTEAiO/Of1pPUZ/E0AZQE9ATD+Dvuu+XH0C/Fb8DTzSvrO/cH7qvhb+CT7YP6A/ywAHQTbB2EJmQkRCXwOAxRmFtQWgxalFTMTUQ4BCm8K5g7jE4cU3g/5B8AARftI9vXwresU6KvoRun06Nvk9dzv1DLPM8xkyqfIA8YjxN3DUcXZx6XKIcyazkPTvdiQ3o/jGOmg8DX5/AHRCTISMBuSIDcksCYHKZ0u0jOFN485zTgPNy42JDV9M/Iv9im/I/odYxpLF7YShg09CT0EIf6U9//w4e0X7/Xw9/HL8UXwlfAI83/30vvh/T7/4wAQBGAJ4A5OEpgS9g/RCi4HRgh3C0wNtwzqCUYJzgukDbcORQ1VCrcHbQOM/tj6MfgJ94v09O+l7OHp7OdP5EbeytjC1IXT9dX32FHbAd4E4Wnnde7H8374YvsRABMGlgtjE90aLCDhI/0kNyWoJXgl2STtId0cQhbsD+4NeQ1zC8MIDQRwAHP+0vlS9fLysfAI8J7vpO6G70zx1vCw8X7z4vQj9iH1M/TO9L73hvy1AWsFYwbaBZ0HtQvyEO4UBhUxFMgV4hcSG3AeIR9qHlgcThcaE4IPpQt+CTMHaQPx/i35IvQe8CLqneLU2vPTbc7QymPJVsl4y5LOy88B0lXUkNVM2bjeD+TZ6T3thPCr9jwAJwllD0gTSBQmE60QXw5MDRMNzQsBCpoJ6guZD8USvBPnEd4O3gvPCc0JVgtoD4MVNBySI6oqqDEmOJw8D0EXRBFFXUVTRP5Ce0BrOxw1Mi9nKCcfxxOOBjr5A+2o4TXXxsxCwba2jq27pO2e9JrWmN2YRJmsm2OijazRuMXE5c7L2BzkW/IPA/UVVSg5OENFfE/OV8RfYWfUa5FsVmlgYQZYtE30Qis4ASvtG8YKS/mj6KXX0McvutWtUqF3k9eHBYIRgAGATYHXg72JqZBYl4eeoqVmrzS7Xcg21nfhNusS9hgCrg4oG8MjFymzLXEyNjmfP7RD3kaiSqlON1L3VdNYcVtvX6BhW2NMZctlUmVuY/pgZF2qV3tRIkyARglBADqyMFEoHSE5GiIUUQ9jCxwJ4AgqCG8HYQSm/6z8QfpC+L31dvMD8nrxBPAq7dboBeOc3S/ZhdV80j7PR8muwiu9UbrYuq66CbketRyxK7B6sFWwz6/LsIO0XLlyvZ3ADcRUx/vKRM9z0rnS4tCozy3QDNOW1qPY+9nJ3LHgEOdy7t70VvubAaIIxxH3G3sjTyfxJwMoNSrJLdoxUjTSM+IydjJJM5g0KzVjNdw1YDVfNXo2yDUdNL8w9yzqKawmiiK7Hi8c/RsmHfEdex2IG2sZABjSFp8VMBX0FPAU4hRNFDkUjRWkFpYWkBV8E04RcQ3eBn8An/p19s7z+vBw7dDpo+bf5CPjDeLj4pnjHOZi5y3peetx7frtOO3K7MTtAfD67zbuaOtg6snsW/Da8hP2z/g3/Mn/YgK3BLwHbQoGC08LLQpHCGgG0wPcATIAePwG+GDxpeqN5v3itd9B3BTYk9T505zU69XD1/fYZduq3nHhEuQT5pLpbu/F9YD8sQHKBoMNCxSsGuweeCFeI/YkJScvKaspbigvJkkjuyEoIAsdmhkpFRsRcw26CY8HVQYeBTUDQAE+ACb/9v3v/MD6Tvlp90jzwO7D6oHnKeYS5l7lPuSm4mThvuDk4C/ieOQ659bra/O3/AMHbBC6FwAfKiYkLj83Cj9NRdlJEE1HUGVSAFI0T4RKbUXMPj81TyndGyUOMwEP9GznY9xo0sPJncFCuli0ELDbrSWtpqx0q+CpCaoYrtm19L7ExnvNStXS3QnnSPC19+X9mQM8CY4PDxVlGUkd2yI4KfgtdTAnL/ksaCq8JwInjie/J04n5SV3I2YgRx1HGrMYuhceFfkRpxAFEb4TuhXME1oQOw0rCzQKcAqPC4cMVAynCo4HvwT5AAz84Pat8LjpPOOq3XjZotZa1P7Rn9DGzrLL1McCxE3B+sEExSXJP85V0xHZpN/E5U7qS+468lf2svpe/pEBaQXwCTUPWBT0GWEfQSNwJBgkFiPOImgiESLHIccgsR+oHqsdTBxCGUQUtQ5kCisJqQm2CZcICwgbCPcHuAjDCYAKVgxjDXsNVQ6ZDxsSOhU0F94XSBZLE5QQ9A2JC1cIIwPQ/Jf22PBx6nbjtNyR1m7SOs4vyVnEmb9EvVS+RcH6xCrIHMqXzMjQNdaj3crlI+1C9Hv6YAASBh4LkhBmF/8e8yX6KrstFi+JMKQxhzLPMiMzJzSHNO0zZTGVLWEp8ySFIGscdhiHFPsQKQ79C/wIwgTf/+n6hPdC9XL0n/Qe9dP0k/Tn9af3yfln+8f86f6qAucHlw3/EhIYZxwaIOEhBCIDI8MkeSYEJuMhYRsNFfAPCwzjBjb/EPWH6tPhJNuF1Q3Qv8oGxfS+o7iAs+mxNbKBskuy/LH4s0u3m7wHw/TJJdE917PcAOI35+vsSfON+cn/FAUzCccMrg+zEQkT1BObFA4VjBUEFvoWkRg/Gq4bWxshGdoW+BVGFr8WYxYkFWgTbRGcDhkMsgmxB/0FlASXAxcCWgBM/zL/HAA9AU8BQAFfAY4C+QOcBWUI4ws6ENgToxZeGfAbvR5+IuklvimOLk4ybDUYNgA0PjF6Lm0sUStPKZ8mKSMyHmMZ+xRjEQwPTwv3BEL+pfcP8yjwsuwG6fPlv+NC4r/fXdwV2v7Yt9mO2RbYHNaa1APV19ZM2Rbb8tuO2+TbAd5v4erkHeYa5eLko+ZA6UHrtup66ZjoMuh36D7os+e35wjoMulr7JvwtvRA+Jr6kf3nAdQGtAsPD4kQzBBNEXASzhPjFGYUrxNwEjoQug3ICQgGkwNPAtkBzQDD/gb8Dfpf+Yj5Sfn8+L/4/viK+qz7fPw4/Qb+gv+pAa0E0gjuDVYUdRrtHoci7iQpJ8Mo6ymaK90tYDC9MRgy/DHAMS4x2y8gLdEp0CUdIaocFRk0FmgTuA/xCnwGcALJ/on6hfX975Tq3eV54urej9p/1ZfQFs6IzEjLTsk7yITIVMohzezP/tId1+DbTuF85iHrTPAm9rb9KAUXC+EOKxGFEhsUixXlFcEV8BVJFqQWjhUVEwUQAQ2jCpgHhQNA/gr6F/jY90j4TffT9DryBfCo70zwnPCo8GzwPfBr8I7wKPEt8sbynvIp8XnwCfH/8u71PfgR+kj8FP+dArUF6QdsCeMK3wsjDCUMnwvuC/oMPA3UDAsLPQhTBbkBhv4E/GL61Pl++V/5B/pt+jL7rv3cANwEgAi+CloNZhGAFjoc4CKQKHgtvjCWMlczazMJM0AysTFpMWYwIS0uKI0h1xo2FE8MvQMe+zDz1OxD56zhTNyb1rXQkMyYypnKqMtbzMfLr8ujzFfOyNAr063UAdfA2XLcyt804uPkg+ih67DuBPBM7+/uUO/+8MjzkvZE+Mb5YvtE/U3/3QERBcAJ8w+rFQoatR06Iqgoii8uNXE5pjxzP71BzkNhRX1Fh0S/QmNATD09OPgwhynwI7ke9RitEZwINP/Z9ZPuberQ5w/mJ+Sx4I3dxNtr2yHdW98P4dzjtucq7LDx8vaI+gn9Ff97AEwCbQNqBG8EswHG/U75qPSI8UHvSOy06NTjCN602GbUDdIZz6fLXMmcx7LGu8YXx2DIW8qPylLLMc3Gz03UcNt84yXt+PXc/P0BIgdKDAgSNhrYIM8msCtCL4MzhzhUPdZBWEMEQhA/7jvoOO01mzR8NEUzrDAKK/si5BpUEVcJeATE/6b5e/Op7ebpKOmo6HLnNuXB4rHhm+G34lPknOTP5s7rWvKn+fv9CgHzBAYIPQqwC2IMBQ/oEvgX6Bw6H+AfxR4IHoYc3BjdFLcS1hG/EJAP7gxAC6sKQAmSBz4FSgOSAZf/aP0++/b4ZvWR8dvuZu6g8BTyJvEp7qzqo+pp7fjw8vI58+/xVPC+8WD05/bZ+Xb76fvG+8n5EvcO9oz2ffbG84vuSOoJ6RHpLOmi6J/oxeqW7Fntqu2c7mrwOfPX9p76YP7MAAkCwQPLBcEHTAh0B4kH+glDDFUMVQm2BIQB1v+p/+7/uP/L/ysANwFpAYsAU/6P+3D5U/jy+BT7bv6m/5X/K/5d/qMB2QJFAoAA1/7x/8gAXQCzAJIBgQN1BbgFowXgBZ0GpgipDA8RIhW0F9IYLBsuHw4kDienJ9UnPCgzKewp+ShxJ/UlkyQ3IkIfVhs9FgARrwugCAUHKQUNAf77Rffl8+LyIfOB8mvwBe3857/kSePY4Jnectuw2FHYAtqY3Yrh7uMP44Dg+t5F4J/jTuhG7BPwZPTe+QMAJQSJBToFvwRDBhcJKQthDAMNAQ7oD3ARPhG/D9UMtgn3Bo4DfgDy/mT+wf43/oH6cfRp70DtN+3Y7Sft3uuz6qbpculH6f7p6uoD67nqx+rS6ojr0uxx7sTwZ/I98o7wy+7E7l3w6fKE9e72hPjU+tT93QASAxoDGQMMBFkG4AhMC5QNLA+fEEERwxGwEkIUvBa1GWQa5hotG6UcKyErJukrKzFFNPg1xTYhNnE2YjdnOCQ5ETnpNwQ3WDZeM8EuNimsIyoe4RcJEu0MNgigAwX+D/gN8RbqMeXM4Qzf3NtB11vThdA+zs3MqsvWypfKf8p4ysrK68srzTDPQdIA1djXstkl2hDbg9zQ3pPidOWJ57LoE+ri62Pu9PBP8n7yovI/83z0yfX29VD2Z/aP9mj4xvvB/7sEIQkpDbMRvBXRGLwbjx/4IykoBSw5LvEtWSwKK4AqzCljKBcmvyPvIeMfuB10G0QZORiwF5AWJxTpEKkNEAvZCMgGTgXmAxUE7wVSCCsKXwuZDAMPNhLzFGMXQxj8GCoa4BqVG/McFR1hGw8XZxAiCfMADvlj8fHpfOGJ1iHL6b95tWatrqYZoBmaNZQAkHWOUo/yktqYxZ9Lpw6wGro+xkfUvuNx86gCFBFwHvMpUTP6O1REr0vYUfFVqFhtWiBbelp6VyJT6EwlRTM9jjQkKqQf8BQXCgcBlvcK7hfls9ts09bNtMkPyN7Hycgiy7XOC9SF2fveRuRc6dPuvfQY+sP+KwNQCEoNHBLPFvkZBhxmHRcfpCEfJIImYidAJycnoicFKPgneieYJsomoCevKKooXie2JCsiyx8NHTMZoxPuDSoJ6AVYAyf/Svrw9aXxc+5R6iblFuE+3r/bt9m712XWdNcB2oXcgt8P4kHlyunc7VTxGfT09Qr3AvjD+Kn5Y/rN+KL1C/Nv8KrtkOqA5uXjD+PL4iPkqeUq6FzrIe6C8LDyv/Rs99j6Mv66AQcEpQRcBaUFnwenCUEKwQlgCBYIyAd+BxUGAAQGAj4BMgGaAmsEQwWbBlIIxAg4CPsGrgbxCKUKBwpgCGoHJQnfC20NTw7JDI4KBAlpB/AFPQOb/3j90/3P/3YB3QFpAXQBLwJFAyQEMAOPARAB4AIPBxAMJxDuEWMRkBAKD4ENmgsbCJYE3AGgAA7/qP2D/AX7+PkJ+a74BPnr+eH5gfp9/UQByQTMB3oKfw38D+8QnBCFEE4QKBCFEE4RFRPrFKYWNhf9FRgTeg7dCSsG2QLy/wv92Plq9jTz2u+x7BPpzOQy4EPb7taf09/Qhc59zQ7Ozc9d0gfVHdjD21TgFuY87Fzyvvfs/IMDLgu2E/QbFSP4KLct/jFLNTY3ojfHNus0XzIzLjYoyiDmGFwP+QXc/JLznOsV45/avdKdyzHGS8Kfvm27Z7m3uCW68r2kwqvH3MwQ0qzXDt525Cbrg/Ey90b8OwERB5gOKBZbHXwjziY+KYor7y1wMIMy/DJgMuUwhC8RMCkyADSINPgyejCDLkMtPix1K98qtypNK1ErXSv0K28s0ivPKXEnqyVfJIkiXB/AGw0YchOADq8IfAJC/bv3xvKI7+Ds4OmN5vfi/t6a3IvaTNgd1uLTx9IC03DTX9PE0lvS2tFQ0ZLQdc/3z/fQ9tEw08HTktMk07rSldM21IPVTtej2P3ZY9uI3YffF+EB43PkFuV95j7oGOt/7cfuYvDi8SX0J/a991z6nf4hBMYKhhCSFacZ1hzXIEIknycEKqUq7irSK50s8C2GLrwtYSxYKvAoPCjyJjEltyOcIjMifiI9InMh6yA6ID0fOB5gHE8bIRtdGlgZBRjqFnoW7BX7FLQTqRJuEqsSkxJCEfcOfAz2CbgFTwDT+nP2hvNg8GPrbuaM4Qrd/NkH14nV0dT602vSr9BHz83Oec/L0W/V3Njs2+7dJuD241Dpt++Z9kT8EwF7BKQGvAkzDvUStBYqGBkYkxesF6oY6xmBGuEZ5BgGGBgXRBaNFYUT0BD4DFAIGgTLAMP9vfpr98DzFfC262jm2uFU3ufb2dpn2tTZkNnl2JXX+9bc1gXYhtrS3QrifuaX6nbu6PG89XP6s/8hBbwKiQ6UEXEVVxo1IK8lGCnYKmgrbypvKe8nlibyJZIlayV3JOMhIB4nGhMXkBQ7Em8P3AymCrsIzAbpBLADdgO3A8UCtQAA/s/7j/pv+bf3//Xq9Cv0lvM78m7vXOys6QLoqeYk5ULkCOQk5frmF+hZ6NLn8+fT6RHtjfDK8zr3zPt0AZoH4AyUECcSkxK3EjUTxRTiFWUW2xXbE5MQmgvlBe8AkfyV+Fn0IvCJ7Mzp2+fu5qLmLedR6Szs+u8h9Bv3ivoA/04EKQoVEKAVBBrUHe4gRyTkJq4ndiZfJGsjByI0H/wavxVLEfIN8gn4BP/+N/lN9F7wR+wZ57fhndwG2Y3W29Ta09zTYNTt1ATVV9W31iXZiNyH4NvkKOmM7hf1J/ybAqoH0gtbEKUVhxx8I68pky60MRU1CjkwPJw+wD9IP3E9+jmuNGMufydDIBQZWxJFDOoEivzv85TsT+di42XeV9i80gnPEs4kz7vQltHt0crSl9Xq2cLecOI95cvnkOrW7RvxWPTD9yD8YQAiBDgHzwlxDG4OUQ9kDzYPow7GDikQ4RHxEn4S8BDgD2kP5g7tDdEMIwwJDKEMCw1IDZwMXQsdCvsI7gixCBIINQcDBs4F+wXNBUoFAgXFBJoEawTaA0UDewMxBOUEGwXQA+4BSQBa/7n+0f1g/Jj6XvgK9sjzIvJU8VPwqe6N7JXq+OmE6krqYuk45yvlheMN4tzg5N/W34/gneGs4rDjlOTb5W3otOtT73Hy5PNT9Y/3PvvZ/wwEYweBCmoNBxDkEfISLxTVFQAX7xcZGDUXKhazFe0VzhWKFPERIg8yDbEMKw1dDtAPFBH2ESYShBKJE0UVyBd8GRIbWh1uIJUkzSdbKbYpJSlJKJEnlibZJXMllyQvI7EgVR3ZGX0WhxK2DZ4IMQOZ/Q74//Ia77nsn+rr5x7kheCy3dvbv9rG2WLZM9l02fzY/NeU1wfYe9l2263cYt063VncFNwm3Cfdx95536/fVOBh4WzjyeVz54XoEOhX58jnF+lL65ztN++W8a3zCPa2+Mb6g/x6/kkAfAEAA/ADJAX8BqcIOgpgDOENtg9IEGsPGA/QD/IRWxQeFqgWjhYZFuMU2BIyEGwOFw51DiIPWQ7XDBkMUAwbDqAPJA9fDroNVw3GDSQNxQzQDdoPIxFtEHgPZQ26C3UK6gioCP8HCQbiBOgEXAZQCrENKQ88D4sOPw3zDQ4QXhLBFHIVMBXRFXoXCBjIF5gVAxI2DosJpgSiANr8ivrB+DT4Lvj69qvzOu6w6bDoSepe7JLuiu4e7hHut+267VXsn+nh53bm7eU55wfpVOqN6wTrDOrm6UbpW+m06AnpX+oR7BTufe/O8BjxtvA98Ljv7++r8NTwXfEN8wP29fhA+zL+egFMBPIGwggpC4cOPhKbE1AT5xNsFLMTeBJvEHUO0A0eDIsKagivBCQAp/u+90z0QvFq7ybucO3D7L/r6OtU7QHxqvaO/P8CLQn3Dh8VGBygI5ArKzPDOgZChkblSDxJ/kdQRyZG3EEnPNg1pC5hJ8wdAxJlBj77F/He5n7c9NGsyDfBrbtItnWx/q7YrdytpK7dr4yy9bVFuQq+CsQSy5rRQth94KHp8vPy/doG6A4+FIkXRRpOHQYhkCVrKpgv5TLnM34zzDJ4MqUxbjB/Ljks6Cl2JqUhpR3JGb4WEhTKEKkN4wlZBc0AhP1D+/34b/eG9w/4PPhz+DP4F/cN90v4p/lE+8L6dPl4+Xv5IPkE+I/2XvWT9Rz2EfUs8uTt2+mG55vl3eJG31vckdtq3HfeTuDO4TriqeK5407lEOgn65fv4/RS+pb/WwQLCU4OSBIxFZMXLxmFG7ce5yGFJLElgSQIIoIfvR5NHxcgwh94G3QWKhMCEvAS0xLTEDIOmgvRCSEJMgljCV4IbAacBGkDdQJ4AZ8Ayv+9/jD8jvjp9IvzpPQA9bzyM+3Z5jfjc+K445DkgOOX4kjic+Mm5R7mR+ZS5+ToOus47d/u5fEO9X74lPs7/TP+x/66/lj/dwGTA/YElgUeBn4IPAsHDMMK1ghWB/gFXQRxAxsDyAMFBVAFOgR5AgkAQf5f/jUAmwMCBgkH5QdLCYsLKA5WEFoS+hPYFCcVvxQZFfoWAxhvGE4XtxQ+EoEQzg+7D7AOMAtfB+EDKwLkAQ8CYALhAooDwwM0BOkDtAIXAvIB+wKRAzkDlwJaAj4CjQJ4A1kD6gHV/rr5zvQa8hHxR/FC8dHvS+0462/pc+mP6tzqh+pP6r3q++xn71nwfPCw8Izx0PKH837ywvC37nTtaO0e7lvu++zO6g7pR+lD6jvq8ejn5gzltuVj6Orquu178P3y8PX+9z34xvdj92f3g/jP+t/8rwCbAx0F/AaIB/QImQsCDXwNGg7FD5QTLxjJHJMgpyO4JcImUSbSJF4jMCL2IT4iJSK7ILkefByVGhIYUhUiE34RFxArD9QOSg+GEMQQORAcD9INugwtDMUKkglzCSYKLguNC+4KJQovCiwKWQr4CbIIlQfFBhEH7gdkBzwGSwT6Ak8CRwErAGv+M/2c/DD8i/wB/WH99/xY+wP5gfYO9Xnze/DP7GboFuTr35Pbqtdk1AXSOtCmzsLNas2tzXHOT88u0WnSstMC1mzYQ9tF3ubg5OMt54vqze3W8F3zqvXX9zX6tPy7/lEAlQGJAc4BIgOWBJMGjQcOCIEJWAsCDVgPUhGLFCgYXRufHkohPyPIJIMlsCVsJeojeSIfIE8dXBqHFp0TIBFPDngLcQgbBjsEOwNwA5oEsgQBBTUFoQUHB28HAAgiCSQLww1tEOASJRXCF9MaSR08HwQhviJdJXwo7CqJLMcs7SydLHArICmmJHMfYBliEW4IvP+59ePq/d+c1TrMH8M+uRCv/KUKnlGXCJKEjnSMqIzxjnGTIpm7nyen6q70t6/Cpc5O297nW/SJAU8PAh07KiE39kOTUGJcjmVXa+FtEW43bVBqU2V5XsxWHE+RRww/sTXaK44hiBc1DuYFrv3d9FjsS+aV4pPg4N4B3jneLd8T4YTiyuOE5fHnzeo17Q7vy/C68UPy5PIx86LytvHq70LtzuqE6MXmquVh5HDj3OKF4hzixOFt4bTh0uJU5D3mwufn6Rrt3fCi9Ff4BfwwAPoDpAeKCosMeA0jDXwM8QuYDVsQHBILE6ISahJIE9MTfxRTFXwVGRXdFBQViRUhFoMWjRa5FooXXhgjGHUXvhXhEwkShg+iDW4M0gvrC9sL0woYCZoHXgdCCOoILAgOBnQEpQT9BR0HZgdvBmIFogQEBKYDmQIYATIAxv+W/nL9hvz0+hj6FPnC99P2v/XA9BL05fMk87fx0/CU8MbwzPDJ757uIO497lTv3fDf8ZXyTvPM9Nz1OvZ19nL2N/Zz9SL0vvIL8hfxtO8/7jHsseq46UDpNumI6Zzp4emM6qzrhe2K77fyCPZT+Vz8Xf8cA88G/AkGDF8NXA7TDrcObw4aD2kQmREtEl4RGRBDD+cNYAxvCuQHMQZdBTUFAga0BpkGcAaeBmgHOAhHCKsIAwm5CWkLug3pDwUREBCWDmsNwgvaCfEHfAZ4Bm0HRAijCFYIKwi1CNAJdgolCj8JxgilCWALPwypDI8L/AkBCC0FQQPZAdgA0v9Q/oP9o/3w/aP+Nf+K//L/zP8l/3D+Dv3U+w77/frI+n75TPf89BbzoPFk8Ifu4evZ6E/lqOKN4YHhaeG34DTgTt+G3zvh0OO75sLol+oa7RvxMfYB++n+9QEfBYQHgwlfCjYKMQq/CgkMHA4ZD/kOuAxICUgHPgfAB5wH4wbtBVcGXQc3CD0IdAilB/QGKAYjBfYEUgTcA7UEhAWEBgcHlQZBB2cI2wj3BwwHdwW2BEYFZgUzBj8HOwjACe0KrQrGCR8JRwnTCdwK1As9DAENPQ4XEMgRQBKMEQMRohDgEMkQ+A+NDpcNiQx4ClAJRAirB2QHtgWaArcAEf+C/qn+dP3/+8z5Efft9IHyVPBM74ruXe2T6mLmCOIO3q3bvNp32oXaZNnY1yHXwNcc2mTdqODj4/LlN+cO6nzuxvPP+cb94/9uAjkFRAlcDf8P0REdEzoUSBUPFjIXgRj3GSAbFxtZGm4YeRVhEqMQtw9qDksMFAlPBaABkv26+Fn05PAK7lbrrOho5X/hv93i2RjXHdYh1grXJdgQ2Vza4ttO3Sng0eNA6Kzt8PKt+B7/AgaRDfgUMRv4H80jzicVLT4z6Dh7PG0+JD+VPis9MzupOd43LjWSMVktBilkJdQh8R2LGR4UAQ9UCvIGiQXrA0YCawBh/2r/mf/J/xwADgGyAkwE3wUaCMsKpg3gDsQNEgz0CvcJ2giMBgIDlv4w+jn2oPKB7nXoIeJ+3MXXttN0zz/L5MYuw9rAWr//vZe8f7s8u368xr7jwVfFeMmbzXbRgtVM2Qzd+uAz5hHsU/IL95D6e/0+ATcGIwvYD9wSQRW0F3wZvBqiG6UcyB7yIVwlMSe2JxknCiYPJZIktiTxJP0k7SNOIfQdahrUF3EVbhJ2D54MegpgCH4GtQTXAxUEPgSjAzQC5QAoAWIBdgHcAW4C3QO6BCgFDAWPBH8EUwSwAw0DggIgAmgBIAG9ALL/Sv4U/Sb9Hv6V/u39xPyA+6j7+PtN/IL9Tf7M/tH+f/28/Dn9Xv4CAOQARQC3/73/CwBH//r8wfrZ+Y/6uPof+bT2tPW99f723PdN94n2g/aG9wL5efqk+0H9Ev/UAEYCwgNpBNcD6wKWAhkD+QNUBMkD1wLwAf3/Mv3G+cH1LfPA8QPxJvAJ7rTrIurQ6c3pMen35xbm1uUU6GfsmvH89ff49vqK/YsBDwfeDFgRthQoF5MZoBy2H/giWCXzJTUl/iLoIIwfsx4yHuEcyhkLFpcShw9dDF4HwgKA/1n9GPvd9hHyN+7p6yvqaehv5b/hr95w3ZDe5+A74tDiXePr5MfnMeph7IPvOvMg9xT77f2gAH4DDwbdCA4M5g3YDhoPeA/FEOgR+BHlETIRHhBLDqsLDwkEBwcGsQXRBYUFRwTzAo8CDgNxAzcDvwLoAnAD4QMaBOMDNwOjAt8BzgE0AhoCAAKg/3P8LvmY9hn1YfSY84rzMPQ/9Vf2D/ci97f2b/fV+EH7Cf5rAHQCewUcCeYMDxCIEX8RPhGqEdgScRTrFHATthHIEHkQHRByDyYO2wvTCFQE/v/+/Lb6yfkH+VL3O/Ut8qLv++0U65jnOeVK43ziieFI4LPfot874Y/j3uVn5zrnPufa6HvrXu8R80z22fmt/S4BnANoBZ4HiQoUDZcOpw/OEGgSIBUIGAwbiR0hHmAfxiFGJOIlRydFJ4cmmyZwJVEjnSAGHdMZuxZBEt4N/AgkBCz/7vo/9wv03fBz7CnnUeOM4NHdat3o3W3ffeE2403k4+VN6djtDfPi+Oz9RQJZBzsMgxF8FkAaqB21H4EgLiGxIFIechsmF9oRIg1uCGUDzf8a/bf52vXw75DodeEv283WdNTI08vURte+2czdFeLI5bDpxep/63Pt7e+U81P4iPwLALICEwLTACYANP9o/vf7HPnI90T3//Z09lP2vPfQ+Lj4p/eJ9k/2Zfb/9RP1JfSM8wT04vSp9Y73m/hH+WL6WfxeAeMGiAssELYT3RauGh8eKSIoJg0qtC6QMzE4MzxyP+xA/0FwQopB7T+sPRM8cjneNc8xOCyIJnMhYhz3Fm4Q3ggaATz53fKA7Qnp5uS539va8Na20vDPts7+zoTQ0NHq0rPTOdWl1pfXKNlt2x/dxd7s4O/jxOdk69DtyO8R8gn18vfj+JH3L/Yw9i742/qf+0D75foU+3H7aftu+pz6Hfu++2D8m/vL+wX9b//LAjIG1wiUC5INGQ71Db4NZQ6eEPES/hR2FQcV4RRoE+8QlA1uC5sKzgoJCvgHlgaQBUcF5gVRBT4E9gJ+AYwAXv/3/UH88vlv+FP31PVH9GDypvAh7+Tt9OwO7R7tF+yF6mbpFems6j3tdu5C7sTtIe6e76nxlPNF9U34ivuT/gACdgSgB+kK3A0SEQkUmxYJGYQcMiBjIyol8iVZKPMqkS3tLucuki+oL+EvnC91LuEsVyqZJ+8k7yCIHK0X3BKVDnIJAgRP/dH1jO9t6i3mvuIE36Hc0NsK3PvbndsE2yHb9NxK32jhs+I65O/lG+n/7Bzw9PFH8rXxYvLT9AH4Cvqh+VL3x/XR9fr2FPgO98f0lPIL8D3tdel35hbmDOfo6GPpq+gh6OTnlejN6fzpGuou6h7rY+/q9c/8CAM4B2gKdA0eEMwSnxX9F0Ubox5eIh4moCixKj8sLiwTKpIlWiBlHesbvRqDGBoVahG3DfgIEgOq/fT6qPls+ff4LPeb9uL28vd0+gL8Bv3F/vAARgTEB+sKYg2UDxsShhRQF1QZ3hmwGr8bBx2xHWIc/BhJFdoRxQ4bC/IFOACh+qL2jfOz8PbtUuvb59jjH97L2HbVG9QC1b7W/tc22FvXhdWm03HSwdGI0Z3SINQ/1j3ZK9xr35/iZOTb5Rro1eq87tTyWfYX+hL+ggITB4AKgwyuDvAQahS+F9Ma7RxpHl4gviHbIqkitx9JHBoaKhiYFxMXFBajFpsYJRp2G04aehagEvUQgRC5EOUP0Az5CqcKmQpJCq8JMwjZBrAFwQSwBCUGCgnPCyIPzREzFPAVVhV6Ei4PRwzUCtMLZgtNCm8IRAYsBNQCIgI+AT4A0fzj+Cz3Cflv+7n8lPyp/Lj+8P+vAfsCHgOTArgA0P2X+gD4C/ZD9MHyXfJm8Wnt/uYm31jWpM/sya7DGb8Xu4O2MrJtruerTKtbrKSuLLF1tFu5UMKAzY7YYeQh78b6lQiTFUojBzCtO4lH7lAWWtFh4GSIY/5fB12aXVpdXlc5ThZEXDmWMNwomyH0Go4RAAYk/Jz0tO5a6kPlxuGC3y7dnNs6273c0t7v3+jeP91o3R3gHOOW507sme7C71HxpvOR9+z69/3P/zwA3gCBAR0DOwY1ClwMSw5mD18PvRB2EdUPJw7uC2AKSAqOCasGEwRaA5wCtADU/mr+x/7c/3ABCwLbARIByQBSA4gGGgpaDRwQ1BJlEp0R+BGvEnQVPhckFh8Tdg+ADEMMnQ1eDiYOLAyOCfgFSgPbAeYAsAAY/g/7Uvm29tfz3vBC7R/qJ+hB5l3kOOFa3CfZlNf51ovWJNUp1PLU1dV+1vXUfNKT0/7Y1OD05tDoZ+hP6lXuLfRF+i7/mwPNBi4JJgw7D0UUrxlLHmUifyVyKJUqdyx4LKUsKS4pLxwwXzBGL/Uu+y3pKlQmgx9gGN0S0w1dCZ8D4vt19VjxXe6m6n3lWeBu3QjcVdtt2yvc0t1o4B/l8Opx8DL1KPgT/K8BPQZ4CU8LUAxxDogPMw4hDZkMmAzsDN0JAgX9/xv85Pqt+rP4UPSX7jzqd+mf6wHupO0L7CrrYezN753zf/Zm+Wb9DAIEBxIL1w6WEgUYbB6VIkojMSKKIuQkGCjIKZ4qBCujKeAmgSNQIFgeshvfFjAQkAlxA379K/iK86HuTest5yziMd5L2rDXUNVE0o7QAdCB0LPSM9Vl2SfeyuLp5Ifk9uM65QvpBe7h8hj1OPWT9U/2SvjV+V754fge+bX5kfoF+hb4j/Xz81r07/Ux+KL4CviG90b4H/rU+5f8Xvz1/HQA5gUvDeMTgRe3GTQawRn2GFgXOhZ8FmoXaRjtGA0Z5hh+GDQXbhQBEfANbQvWCj8Ldgs/DdcOQg/HD/MOzA2xDEALgQuIDhUTyBigHa4gSiIrIrghxCBqIEsg0iBLIeQh6iG8IR0hciBXH3scBxryFogTbBB3DcYLdAsiCmwHGwJq+zf1G+/76Rnk2Nwq1rjQm82My0TJIMaOwqy+Qrpvt0+21LWCtvG3irrNvuzCAMVYxqPIA8x20AnVpti122Te/eDt5XDskPMA+k7/OAMNBzQLTg7sERsV6Rb4GJoashq6GjAaqRgdGE0XeBWyEucOoQv4CTMJ8QcmB5YGLQYjB4UJLAwCD0MRwBF3EXIRhhI9FfUXaxo0G9oalRvgHCAfVSG5Iksj4SSgJpcnlCfJJsklzSPFIGkd+xr0GKAWchKeDT4JAQZQBWIFwQSDBGwEbQVhCLcLzA7UEBwS8BNqFrQY4BklGt8Z0RhrFocRkQuHBC39B/VQ6/3gJtc1zUbCHreZq42i5ZvVlnCToZFpkeOR9JJJlUmZt541pQGth7ZCwSfNq9k456f1fgXvFbAlnjRGQiJNn1UxXOpfUmPHZt5oIWn/ZdNg8VoQVdxOpEdhP3M1FiptHroUUw03By0AVfjo8WLtjulm5SzhRN4J3N/ZyddX1qnVENbX1+Dad94h4Yjjj+Z06e3ruu1o7jTvbvA+8hL1LPhO+/f+ZwI2Bk4Ltg/KE6cXdBrEHE8duBsaGswYNxiAFgATEA7tCI0Ec/9++of1SPEl7SbpuuaO5mTnYugO6nfsM+/Z8nP2Pvms+0H+OwK4BmMKwQxuDtAPihHEEj0TkBOvEvQR9BH1EBEQkw40DGoKiQjQBvsEXAKR/xn9jvu2+9D7yvrm+Wb5Gflf+Gv2xfMl8PDrqef04q3e99mx1RDTrNFv0d3QutCk0f3TA9jw3FjiDehx7o31VP3DBdwNsxWtHVElCC1DMzI47TzBQOJD6EWzRb9EyEMyQv8/Rj3NOcs0Ny/eKJ8hbRr3Eg8MIgb8AMn8ZviF81fupuiz41DgMN1L2ejVv9Os1BTXUNnd2kzcp97K4YTlrugV633sj+3k7tDxNvVU+FP73Ptj+9/6xvkN+bH3tvQd8d3tVOrG53vkK+Hj3uvc89uv29Xbutz43TXgCuQq6GDsh/Af9VT6F//dA2sJ1A4sFCAZ4hu+HZcebB6xH1MhiSIsI0Qi8x8RHpwcVxvqGVAXUBSSETEPrQ25DGsMzwyeDPsM2Q0MD8cQahJ1FAUW3xYlGKMa8R4gJFIoKypQKo4reyxKLOUq9SYOI0cg2h0iHPEYaxNbDCUFf/5n+Bbzie1c6JXk7+Hm3wbeB9wX2mrYK9em1c7Um9TS1R7Xgtkw3Enesd864KXhz+N45kro7ekL7AzvJ/JX9T/4hvvf/ab9jvvB+UH5hPpX+3759fWB8VfuZe3w7CPsourT51DlruLT30zen9343qfh6eMh5u/nvekP7aXwnvNn9R31yvTt9S/5nf2tARIF3wddChwNWw/HELcR0RPHFxsdbSJdJjgpdytSLTcvXTDtL6QvTi80L1gwpTGHMiIznzLTMV4x6jApMCsuKys2KbsnTidKJhQjAh+pGhEYCxaJElQNYQeYAkz/Ev1p+yz4/vQH8b7sNemK5f/hLd8O3FPZ59Zr08LQ785vzdXLZMrTyMrHXMfzxvXGTsgwy9/PvdXr2i7fPuOY6G/vevYH/UgCPQiQD8MWYRx6H9EgsiPsKEYugjFYMJwsiSh7JWQj2CGZIEofrx1MGh8VMQ8wCiwHAwSE/8X5O/Qy8r7z8PUG9+D15vGl7e7p9eeO6QjsLe187NHqo+dB5X3jmOF64bLiduSQ5u7mNOXi4zrkpeYY6lLtQe968Dzx+fLl9dX4rvpj+xH8av2H/fH8Nv0a/x0CiASUB1ILthBMFvAYCRr0GuUbxB1oIOkjQijaK50teS3hLK0sqiwALFcq6yZsIoAd2BdPE3gPvwvhB5MC4ftS9SvvFeoR5z/lr+TB4wDi/uAD4lflhekK7THwtPOJ+A3+JQN6B90KUQ7HESkVyxeVGb4Z+hinGNgX5hbgFJ0RnA72CaUEZP96+SHzXu3Y6G7lxeHV3NjXtNOd0UPQjM62zIfMVM5u0hrXGdsy34vjruei62/wG/Up+uD+yAP9CA0NUBDEEjAUrBUIF8wWBhYjFnoWbhf1GIsZ4BisFx4WABXiEzgSlBAyENwQGxHgEGUPSA4XDwIQzg+1DVsJ+AQxAZj+k/2z/Gz6R/Yt8uXvk+4V7uftl+367dzuDPCk8cnz6PXC+Gn8mQBtBS8JUAwFEC0UqxhpHAMeDB7zHNwbVhtJG8IawheiElgMYwaYAYr9hPks9f3vJOkU4tvbZ9cC1dvTU9M4077SO9IP0inTyNVj2Qreg+Kp557sYvEw9o/6XP6EAmoHRQ2cEusVPxg/G0UfHiNSJUkmYyZmJaQi4R7tGxgaZhgvFTAQugq0BckArPyS+BP14fGt7j/s5+pT6o/qsOrr6d7p+Op47Z/wv/Mz9kT5kP02A/cJpQ+lE/YVgRjpG8gfkiO7JncpCiyZLeguBTArMLYvPy2lKZgmxySGI/4gSBxBFpMQbgzyCA0FfwA4+731vfCS7E/nmOHD2y7X1dQV0wrR7c7dzFDLI8vgyhDL7cr2ymjMoM5a0SzTvNPD1KTW69m73f/g8uKc4x7koOWo6KLrA+7p7kPvs++E8R71s/lp/pMCjAVqBwAJ8AqzDX8S2BeFHAghFSSZJzkrsi28Lo8tYCy6Ky0rqCo4Kdgn7SbmJIggAxvZFWYTFhPUEjYROw5UDDkLxQmKB4IE5AFgANb/kP/C/pr93vy//I/80Pqr97r0dfNq9Hj2gPej9hH1PfQR9Vz3f/r5/VUBwgT5BzMKEw23Dz8S6BNlFBYVaRXfFbQWaha4FSgVExRbEo8Pdgz/CTcJRQlyClcLMQujCj4IugXKBJYDggJWAPL9fv0x/mz+hf6A/dD7g/oK+LPzSu+Q65vo6uUN44HhBd8R26DUVcxBxYnAtLtit8SzCrHBsRa1q7ntvjfCQsRUx0PM7NN43CHl2ez980z8LgWWDWQVJhtAHxMhfSH6IfkieSZlKbIqpSm5JokjEiE9HiYbmBgHFowVGxVQFb4U2xKzEEYNYAqbB6IE+wMlBDkE1QRwBVAGywgFDPUOMRNKFokXrBhIGjMdmCIfKMItCDSuOAM8uj1lPSg8dToyOcg4VzbVMPoqDyXwH2wa2REFCFb+tPO759fb5c+gxC+7GrTtrvWreaknqP2omqliq56txa9Ys6u3vbyow4DKHtH/2JnhiukV8f32HvzhAXwHFw5NFF4ZCx1LH2Ii/yTxJkIo9ie0JgAkgR8eGp4VNRLRD9kNNwykCncJ0QdNBoAFkAUlBpgGZQf3B+YHmgheCiYNqxA9E6wT0RHRD4kPExCBDzkMZgiEBPH///zf+gX4KPRu8FntpeuH6erlZuIY4eXhZuM55T7mSeg56xDvxPPY9/f7qgBIBVYLrRFuFdUXsRnxGtobrxsvGikXMhPIEGMOPQxyCk0GuACL/CH5b/e89UjyW+9/7XPrGur16J3nxOdU6SXtqPEx9d32Tfjz+xn/HQEaApQB5wHrAz8G5QdiCUcJFQmzCckKUQzODLkLogr5CM4GoQThAosCvAL6AzQFewWdBAoCpf8RAGACZwUsCDoKOAw5D6gSLRVRF6YYAhn9GeEZBBiaFQIS7Q4JDVULUgijAzP+5vks91f0ovA87dzp8Odt5rflCuYk5oznJOmh63LtCe6Q7kvvNfFC86rzO/QD9rD3q/iq95/1pPXn9Z/1GvOs7nrq9OZf5N7hM9843N7YcNX70xbUvtUG2DHa79wx4GXkcult7/D1kvyNA1IKwRDwFg8ccCKNKhEzNzrKPsZBdUQ9SDxMYk//UWtUblU6VbZTjFEOTzlLU0XSPbc1dC2jJI0bDRMfCkUBdPjo757ofeO93kjZPdMAzrrK+Mkiyt/Ka80k0CbSAdSN1oPbAeNq6TTtgO4f7vPudPKa9lr7d/4WAB8CCAPRAzQEQgM4ATb/9v66ADADUwQQBGQCfgAN/un5qfQ27zPqueZf5BrkmeQe5Ajj4t9G3NrZBNm02Xzbk90c4M/jnOi67rz00Pjo+un8pv+HAxwIkguwDmgR9RPgFdIVZBRZE0sScxEqD5kK8gXiAsEB9AA2AA3/JP1z++P59vhz+aX6iPza/aL+TQByA3kIew3iEJETsxVqGNYb9h4+IlYlDCevKBQq0irNKqAoDSWHIJQaIBVJEFUL1QdzBNQBhwAcAIX/If+I/qP+1ADHAloEygQIBSsGNQjmCUULKAz1C/YKNwkKCDcHLAceBhUEQwLIAMsA8wGNAxYF/wUrBtQFXgX3BHgEKQSmBGcFsAWWBS0E8wF0/3T81vgy9TfyA+976t3jNdyd1P3OW8pbxoDDCcDTvMK6arltuSi6YLvqvYzAPMM7xd3HU8xW0rHYuN445MzoxuzR7xfz1/al+qH+/wJjBycLrQ4HE/IXPB0ZIQwj+CRNJ7UqVS+lNCQ6ij5lQi5GXklGSxBLXUr0SalKaUuVSudIWkYqQ8A/xzo9NC0tbiWEHaoV7AySBFv99/Wr7lDnc9+y2OfStM2wyffF5MKBwcXAwMEoxCrHs8oOzsLRFtZl267hEOkK8YT4NP+SBAoJwgxMELoUixi0GzcevR/FHwEf2RxKGUcU0g5CCaQDQf149lnwTutu5wjkl+CK3BrZS9Yf1Y3VEtZa1jvX5djY3GHiKejr7cbzlvnG/4EF7wrCEIIWHhyfINMj9CW+J3ApTyszLbEu2y7sLXEslyqXKCAmWSJ9HWIYsRJYDCAG6P8C+7D37PNB8JPs9egE5unjYuJa4QPhBeH34S/kXuZf6Kfp8urS7BjvIfKB9QX51PwqADMD4QWwBzwJ3Qr1DBUP0xCeEcwRUhIeE3kTihN2EhsRuhCwEL8QPRCiDVgK+AcjBzYHegY1BL4ANP3U+SD34PNu79fqzeYH5D3iJeAS3UzZd9Xu0lzSH9Oe1M/WrNoF4P/lG+xx8b32efzdAoMJBw8iFKsYyB2DIzQpvi0uMGsxyTEnM6A0eTXxNYY0OTI9L48r7yesI3oeBxmDE/8NJAgIAoL93/oR+jr5jfbK8nXv0+3l7Ybuee9h8HDxBPP79Lj3B/qZ+rD59vcz9nT0S/LQ73Htnuvz6e3nFuXx4UPfBNzE16bT/s+izR3Nn8xHy0HKLMq0yz3OF9Ev1JDYU97p5APrdPAe9Vf6IgFZCIgPNBbiG1shUCbtKDgqzyquK2Atvi6cLqYuXi4NLvMtuy1lLUMtES1HLBUrwynyJ5glSiTxIhAhSR1EFyAR7guDCFYGEgRlAQH+yPqz9/r0vPL18AzwDvCa8ZTzkvWg90D6Qf0qAFkCIgSMBa8GOgfeBwsJZgqEC4YKNwhJBegCWABO/dP5h/Xp8WXvVu2W663pG+hi50bnJ+dN5lTlDOVq5CHkZORs5YHnGuk+6qrrhu628vb1v/fj+Az5jvrY/Mb+OgC7/wr/Lv+C/2P/f/0m+or2tPKo7+jtc+xf68vq9emE6eXpt+oW7X3vMfFZ8gLyNfLI8+v1pPjT+8/+NwK0BbAIewsHDiEQ5BFpEkoSgxKqE+QVZBcIGA4YhBenFgIWdRQ+EgYQGw0fCkEHdASUAiABVf/p/WP8l/oi+c73vvZa9lL20ffa+z4AewTiB8gLJhFXFnsbsh+oIr4l+SeFKYcrwi1JMLsxgzFkMH8v7y2pK7MoQST3H/gcxxrSGb4YZRavEaQLVwY+AlD/9vv293ryB+2o6CnmyOSB48rhIN6w2F7TQdByzhXONM1BzSHP9M8o0O7P4dDA07DXjtqc3CPddN5N4SflAOgS6PDnHOiB6bLpCejM5dzj9+EQ4Ijedd6x31fhIeOx5K/ldOa96I7s3/Gp94X/bghnEdEZwR+0JHYqfjEmOkZCBEhtSiNKNUvrTExOdE4sTEpH8EHRO9k0TS2tJOAbhBUxENQKmAWr/kT29e8t63DmzOMG47XiueIN4jffc92B3WDfg+K85Trot+ru7BfvzvCy8sz1APi9+aD7e/y4/QP/QgCrA2kFWAWjBAoCuAJ5BswJQgw0C9gHYgQuAVb/sAC/BQcLiw7oDmUNWAwyCvcHFAaBBF4CGQBz/SH8RP48/6j9aPkd9LTwuO+Y7pHsUeqc5z7nD+hZ6Ezo8OV34wnkquaC6dfrm+1w8G316frJACkGMQnsCm4MiA+WFN0YDhvnHFUfDiI+I8wg7B3RGvkXkRVrEqIPmAxwCdkGhAQSAhL+jfh38/ruz+tL6mXpy+m+6gLqt+iw50rnDOmm7Czx7vVY+ZP7Sf98BcEMnBOmF1sYIxlSG/keUyT3KEwqyCfDIp0crBf8E0YQBw7bDHQKhAXt/Tj2l/B07PTnEeLA24rWWtOl0RPRV9F10sbTPdQc1ZfXmdpN3krhyOT36rHx0Pfr+1n+xwGgBSAJZAtvDKkObxKsFg4aFBsbG+obZx08IVsleScPKHAonSkaLBov+DCtMI4vti1AK0IouiSPIIEc+xeVFGgRBQ1kByQArfnv8rnrguTA3gfaHNd21HjR886vzXXMq8royODHxMjWy6/PatJH1bfZZt9b5qTrDe4u8B7yqvWU+WX8W/6bAOgD5wfYCbwITwahBBYGyAjfClELYwppCR8KrwuhDSMPOg+XD3IQshF3EmYSMhIsEukRhBIXFHsW5BgjGbYXxBaUF0kZWBtgHAgbihmXGHoYdBmJGRIZ5BdWFZASAw9WDKIKzwiJBqECy/56+5b4NfaP8wzxB+8S7pTt6Ozj69jqWuuJ7GrsdOwk7VTuTPDs8fXyUfTx9W330fgS+fj4EPqN/Kv/mwKxBHYGgQjzCagKugvvDOANPg5UDV4LLgmyBywG9ARPA5D/KPuy9lXzy/Ha727sLuew4VXdCdtG2QjXrNRl0X7Pps/X0TXWvdqa3pDiL+iQ7yb3Cv6rA2QIfA2pElMXWRveHScfFh8DHqYb5RfDE/YPMwyeCGEFwwIDAG39wfrU9431YPNZ8SPvh+2m7G3tHO8l8RDz+vPI8+/z1PS29dr2nfcJ+V36C/w+/58CNgfNC7UQvRW3GY4d6SBYJQsrkjDwNW07oj7HP/Y/zD+APz49+jeoMqwt3Sj4I7IeIxopFlAThBEqDxgNPAsWCI4FWwNVAUoBHAInAsADjAXTBZkFXQSuBFsGjAaYBfACnv+o/br77Pnb9mDyi+3u5xHioduP1CjN0cUkv6O4Y7EVq8OleKJjoH2eSpwtmlOZHZpenNafhKNtpy+tJLTivF/HXdKO3t3rrviDA44MdhWAH0sq3jRTPhRGEEztUEhW2locXihe11uLWAVUM1A+TG1He0LxPHI2STDTKJEeYRM4CMD/D/pL9HTv5eyy69TrK+0B78fx3fU8+VH8SACCBGMJCg/wFO0aKCF9JoAqGSyXKwwqPSeHIuEc1xWhD+sK2wb+AsP9DvjB8fvs4eie5d3jF+Nd46vkquV95tXmyOWa5YPm9ugY7XfxbPVj+Vv9kwDrAW4AyPxp+Mzzwu8p64bljd+W2QbUVM9ByyDIJMWEwtLA/r8hwOHB/MXUyrbRKtne4LTq6vT+/VAGQA64FXQd/iSeK6gwtjMFNpw3szmbO3M7HDqQN4EzEy+jKUgj3hyYFtgQiAsWB4EC9f0L+VD0E/Gs70zuge3B7SvvQ/IZ9t75Hv48AyEIigwgEDYTjhY0G0QfqSNuJpsnrCivKWgrYi04L0gwcTChLg4sqSn0J1UlHSHVGtQSZwotASv4qu+P5w7h8drL04/MLcW0viS6Z7dztZaz0LB+rQWr1aolrsqzx7nVvhHDocfrzKTSgNgz35PmHe96+boE/g9LGjojPioDMEg1jzldPPM96D6gP9k/Pz+iPYs74ziONYgxbSxrJpogChyjGGYVthFbDWMJVAa0A1UA1fu+9qHxce3p6kHnUeLb3cXZVdjB143WOtZd1THVl9bR2OTbd98p453mbOpz7SnvmPAZ9A75VP6SAqAE6QUjBxUI0wjPCQsK9gnzCRAK3gqHCjgJXQjyBwUIuQajBFcDYgMjBeIGMwhWCa4JxglrCpYLhA16DzQPsA1qDDsMyw2nD0gQqxCFEYwS8RN3FZAXJhlpGVwYFReNFsEWhhbxFCQTtRHaEOcPpA36CY4ESP89+xL43PRy8nrwc+7m7CLrqOlL6D7mAeVa5T7mRuh46hHuY/PI+JT9ZgHtA6MFXAZ6BpYGKQdxBlYEOwG9/bH7sPqU+AL11/Bc7HjqXepn6kLreuuD6yftJ++e8fnzR/Xe9d32vPgQ+3X9PgDFAkMEugWPBqUHIQqyDCYPtBBnEZUSBRXEFscWcBXzE4YTuhMiFIMUNRTEEnAQXQ2TCh8I2wQNACz7f/ev9HHy7e8c7I3na+NI4Lfe293x2y7a1Ni02G7ZutmO2efZZdst3UvfT+II5SPnoOik6tftvPAx9JP3hvtPALkDTAfaCuYNFRLsFVkZWBwDHvkf0yJ+JgMr9C56MfQynDPNM0Y0eDTtNDE1tzMBMeUtwCqJJ7IjyB52GBgRsQlXA9v++vtQ+dT1gfH57LnpJ+jO59bntOdi54rnkuiQ67DwwPV6+XD60fkR+Ur4wPdl+Jv5NPvU+1/6pvcn9Yfzy/Jf8vLvPOzw6A7oyerk7sHxwfNC9nP6av8VA9MEHwcbCzMQbRVkGFAZgBrWG3UdER4EHeQa/xe4FUQTfw93CqkE1v2K96HwJuqL5GTfXto41PbNf8ipxAPDsMIAw0vEwMXoxs/HlMnRzBbSV9jY38vnaO9G9pn7DgBeBGIJuQ5fE38XDxueHVUf5B99Hz0fvx86IVsjgyQpJDEiGyCuHrIdDBy/GLEU4RCSDlkNqAzYC9UK5AogC7AK4AlFCKUG8AZtCd0NrRIHFtoXRRkcHDYgzSM5Jk0nfSd7KAQq/Cv+LpAxpTJfMSstcic3IVEc8ReYE3EOGgikAUf6LfJx6UvhPNq21BPPwchZwuG8r7mguLi4vbn1u7i+i8F0wwnGuckTz1DVktsz4rDoRO+39Nv5zf4WBJQIYQsqDIMLWAuXCxkMJAzYCrIHGQRS/8j6TPZR8e/s7ekB6Dzm/OMe4ffe8d2l3eXd592J3dreteHI5hDs5+888mn0LPfn+lP/6gNdCUQPzRReGaYd2CEoJxUthjInN9c6wz2zQNhChUTWRWdGSUazRepEf0Q8Q+xA0DyCN0sy6ixPKBIkHiBnHNIYSxRJD3EKdAXvAFH8EPfY8cns8efT4szduNl419XWotYw1kjVpdTO1FvV/ta12E3apdss3GfdWd/N4cHkYOfY6XzrLOyG7LDtG/Bv8/H1D/eo9zP4bvld+x39QP4Y/6T/jAAlAa0Apf8W/xj/cv8g/zf+sv0e/jz/HACmAMoAqgBkAGEAnAAvAf4BGAISAtoBYAJQBJYGdAjbCAkHkgQsA+YDngZoCfQKRgv0C3ANPA9CEEEQ4w9fD9UOLg8FEEkQVBC5D/oOgg7XDB0L7QgxBnAEWgIWAP39cPtK+Wv3CPWB8lnv0eux6WPpa+kO6j7qn+k86e/o4ei06R7rbux97dPu8vBZ8y72k/hz+u77Nvwq/Wf/QgKdBT8IwwrEDCMOQA86EMYRIBNME8UT1hQgFqIXuBfMFfoSvg+RDAAL/gnCB48ECgFx/t/9X/3J+6n5m/en9jb3B/jm+Pj5J/sd/UX/ewHBAyUFYgYZB9sGowagBK4B1P6G+lf2cvI37VHoHuSt4Pnfxt/T3tjd9N264LDk++jA7f3y8vkgAlALrBRNHP0hpybZK3ExbDVkNjY3GTn6Oig8XjtMOCM0DS8DKa0jkR9UGzIXFhODDsAJGAUuAPH7Ofm29g/0VPBH7EbpiudO52vmTuTn4Dzdg9oq2PnWB9Zp1TTT688JzvPMD8why47JbsjFySrLssxzz7rSC9e92ufbRN1632HjCOq28Yz5gQAlBiwM1xImGr0jMi3fMQg1YzmwPutEuUh6SbVH+UTKQEs8mjcKMt4rXCYHIckbGhbhDqQI2wEi+1P0tOxd5e/ez9nn1AjRH9Bf0ZHUG9cW2Cbap92X46bqpPAJ9cr5BQAyCHQQ+BVeF5sXJBluHCMgDiInIYQdEBimEbwLpwdABdcCbwAj/hH7svYr8XLrB+hH5nzmjOVJ4+/ip+Oi5t7qpe7k87P5d/7aAjMGQAnqDIwQ6BWsHDYj+yg7LuAyGTYGOCU4MjblM2swVSyXKQ8oqibVI+Af1xmrElMK5QA6+I/xVes45Qrfp9ml1Y/SGM55xwrB5rrbtgm1z7MntO+2m7vkweTGPMnayZjLO9Bh1zvfwOUP60vvKfMS9mf49/uAAE4FyAe3B14G1AQiBAkF6waBCYsLbQtACrgHLAYOBggGpAZaCNsJtQz0DzwS9RQ6F6EZsBwsH7Qg+yHaIr8lhSq7L64zRTXuNAkzaTDvLRAs6SmHJ7kksyFrHpMcOxwBHdodWBsDF6ESfhDAEDMRoRCDDjgMoQuFDWIP9Q8sD08NxgvpCSIHNwVPA7QBi/+T/Bb5PfZr9KLyJ/BG68jkbN0u1urP6MrPx2bGcsVExGzClcEywU7BcMIPxUbInMtQzrHRX9cc3mvjUubS58XpDu098Qr2wfmw/SMB3gNiB/cJNgztDeQOQhB8EqMVZhg0GgkbzRqtGUUYOxWZEa8NXAkcBb4AAvxx977z++5O6ivm+eFM303db9ug20/erOJT6JntufKf+JH+sQReCzYSexkEISEouy7SM642DjnFOrw7mjtBOj04SzakNCUy+y5iK5QnJiO1HgoagBXTEL8Lfgd3BDQCEgB+/Yj7HfpN+Or2OfbI9Y/19fTl8zH03fRj9b/1YPX+9AT1FfSO8/LyLvKV8fjw9vBM8JXvQe8M77Pv+PBS8aTxgvCr7tDtJe2g7A7sGOuE6vXqZOwX7t7up+417p3uVfBq8nH0/fXb96T6wP3yAIoDXgVSB7QKsw3KD4EQGg/YDa4N7g0cDioN/grkCGgIAwmLCUoJOQdoBSwEuAJ9AUz/ofxa+pj4ffcc9s7zIfEQ7iXrDOlh5tDiRN/F3Hzc3t2537/g2+BU4cviI+XK6KPtyvP4+qYCWAu1FPIddSbqLYM00zkBPURAAERASPhLhU12TKxJ3UVRQW88TzajLgQlPxq4D4wGm/9C+L3wV+n84A7a3dM3znDJW8ZdxS7Ggsh2ywnP5NMw2pbhB+mn79v1IPzwAjoKvRGQGEAeBiOPJ9wrAS97MBUx/DBUMFgugCqEJgAjjB9xG20WKhCnCR4DWfuu8hDq3uFt2vrT9s6uyuHFG8Lsv2/A5MOex5jKaszKzePP7NJY1aHWSdbQ1QXX0dh/2jncyd0j33/gSeG54Ubj9uRr5uDoruva7tjxePRe91r7NAB0BXoLSRIlGV0fLSYnLCcylziaPlhEN0hxSxZPT1HIUbhR2VASUBlOIUrsRLw+IjhrMqMtjShBIxwdqxYmEQsLwQQg/8j57/Wz8pbv+e3F7HLsvuxE7WDuyO8W8OHuz+xz67Lr+uwa7o7t8uuB6RbnmuRt4azdbdnP1aPTPNIN0s7R4dDqz3nP1c6Lz7vRlNT31x7astow3Ore0eGv5Kzm6eg77NrvQfJh9N31bPfC+UL9GAInCK4OPRUrHLwjnioMMbc3Aj7eRIZKKE7jT+dP7U7MTD1KAUd9Qpw9gTi+M8Qu5Ch5Ir4b5BTKDRkGL/669njwX+zo6Tvoqubi41TgL93L2j7ZZ9mQ2h3cKd464E7jL+co61HvUfKk9LP25Pf++EP5VviC97H2pfVJ9Yv0zPNg8gbwBe5f7Ubu2e8o8eHxNvPM9Er31foK/s0APgPWBXQIPQpzCyoL8QryClEKNAgMBaoBaf5F+0H39vIS7gHq6OZ745DfTdwf2c7WttTB0tfRHNIb0xfVwNgP3mTkh+tl8+L6hwGDBwQN+BKAGZofliWgKr8uSjL3NUc5JTw8Pl0+Ej24Og04OjaNNG4ywy+xLBgpGSWhINoaHBQVDQIHJgIW/lr6nfao8zvxW+5+6ybpeeeE5pPlGeUS5i7pM+1f8v33IP3VAaAF/AiEDIsQ8BMNF0AZFRsKHV8esx40HTsadxauEhwOuAgRAzH+iPoa97fy6u3B6TnmmePu4CbfhN3m263aLtqP2m/b/Ntp3ALdbN143mvfNeGu4sTjp+QF5Z3mhuiO6lvtg+/58Tz0j/U/9/H4iPmy+az6Ovyp/lkANgFSApcE8AfeCgAMcQvJCQAIKwYqBN0CegH4AF4ACP8M/gD+Xf+KAcwDswWOBiQHnwg8Cw4PyhI0FioZKBvlHHoe7B//IWgjlSPbIzEjlSKPIlIiNiJhIcAfGR1lGfoVuBLWD8gNSQuaCBsFeAFC/rL7GPph+PH1XvLI7mLs+OuB7Dntzu1O7oPvcfJ79Wv3//ir+qL93wEABhIKiQ2hEHMU4Bi3HEEehB0yG9EZ1Rg7F8cUlBDxC/UH3APM/sb4ufHB6rvky95j2LbRKsyqyAPHc8U/w/PAY7/jvjDAucLexanKU9DE1YTbQeFD5mTr/vAW9/H9wQN7COMMcBFqFssa3x2SH7cf9B5YHkIech7kHUQcoRmjF8YV1BLCDg8K9gUYA1QA9vyl+Xb3QvYO9nn2nvZo9uX16/Tx87rzLPWF+Hz8oP83AkEEdwZyCMYJvgqXC84M7A3aD2oRtRJbE6sTPxTHFCIVEhUnFBQTOhKWERMSHBPHE3IUdRTsEwIUdxMwExAU4RQwFocXFBjpF1EWoxQNFAQUHBPfEJUNqwpBCMgFMAJo/VL31fDJ6p3lwuKL4ZzheOFE4F3eNNzt2tfaLNwQ31/iVOUb5+nn1uil6efqMu1u7t3uo+7Y7frtxe7S7rHtfevV6Rjq4+ul7gPw5O9I7z/vKvF/9Cj3Q/nz+rz8QP8pAu0EWgjEC8oOfBF0E+IVaxjwGvEcMx5LH4ogKiGuIRkiQCJrI/0kniUuJkolrCOjIhYhxx+eHnUdNBz1Go4ZwheoFbkS8w6gCtkF6wBD/GT4CvVD8Wvs8uaj4ZrdjtrN1gXSBswpxmzBYb7avB29lb7xv6DBN8MgxVvIt81w1BrcH+R67Mn0zPx+BMIKxQ8OFCgYgRzxIIMkeSaTJ4wocyn/KWEpMCjpJs4lAiUkJLAjBSRbJBIkGSNKIf8enBySGhkZThg5F48W2hUIFWYT0RBsDmYMMQqKB8QEEgLx/xT99/gp9KDwU+6W7PjpzeaZ4/vgxd+q3xTgk+Dx4OrhuONa5sfpfO3/8CH0Offb+R/8Fv68AJcDjQY4CGUIEAgCCAcIyQfgBl0FmgM5AZL+Dfzx+TL32vO38P/tkOt36S3nVuUd5DTjWOLJ4XriOOSA5jjoTelW6vXrEu928yL4tPzAAJEEeAg/DCIQLxP/Fc0YshpUHHgdVB55H44gaiGVISMhDiBGHuwbWRgAFUwS8Q/7DcgK8wZbA1MA1v1h+/v4E/f99Wr1JPWH9bn2g/gT+tf7Bf5CAOECRQV4CGwNJBP+F6MapRrNGf8ZAxsDHLwbVxpqGMMWOBVyE/ARjhCIDyoOOwuOB7IDigAj/vr7aPka9xn1rPKP8ALvHe4t7hTuDO2J6+npTOga597mBud85//mF+cV54bn+uj16Qbr9+vV7Mft/e5q8FDyRPT29d73pPkP+pL5mfhs99X2QPW98l3wKe4l7NDqhOml6H3onucr5ynm1eTK5GnlNuf16aTsSPAz9Kb4m/0GAzsInAv9DdAPZBLoFfoZEx0tIF8jiCYqKQIqhCkfKVEpfCkSKYgmpyMDIRsf0R0PHbQbGhlTFZkPcQlGBBL/Svq/9kXz8/DS7gbscOru6aTpmel66UbpQ+iw5zDpKe0Q89P4yP7bBAsKCw8rFL4XLxu3HuohwiYnKo8rCywRLNMrVSpqJ24jQh9EGoMV9hD0C6UGYAHp/BT5k/Xh8R/uEusa6QLo1OcY6ILoy+h96WXp7ejs6fHqNewv7RTsZOs87DvtGO8v8aLxg/H68L/vYe+N7i7tn+ts6nvqJOsX6wnqMeib5njlVOQF4wfhz96q3IXbtNp52sraJtsL3BDe7t874s/j/eWn6VXu3PMu+UP/+wS7Cs4Q2xVnGpsfiSW3K0kw2jJ9NZI4QjxiP9Y/0j3rOnM6BzuJO/44BTI0K80m+iStJfQlwiTXIdwdtxkcFjMUUxIhEOUOxw5fDgIMhge/AR/9kfns9h30xfH+7zfuZe0j7Ljq6efi4knd8dn/1qPUWdNb0hzR4c/LzsjN+838zsXPS9DZ0fDUNNm530HnHe4k9AD4wvsmARcHhw0zFB8ZKR0yH7IfiiF1I/Mj2iFTHGsW1xHUDSoKEgUvAIP7dPXN7jDo4eJe4Mbe+NxO3CjdVeBP5eDqzO678Q/14Pjn/fYDPQoaECYWFhxUIWklhCZ0JnYnKynMKwwu/C4TL88siCjlI/keXRuqF3AT8A6sCUcFbQGU/jz85PnU90T1QPO88QbxqfGG9D73LfdH9t71Avd0+e/5lvfC83zw0u+X8Azx4+/C7Jfp2udw5zroSukt6ozrRu0b7wvxRPMR9r35wv1WAXMEfQXcBdEH8QpwDlARThPYFOMWDBg4GBMYIRj+F9cW0hT8EmsSMRKkEEwNDglSBPX/ePyh+Qz3bvSt8UfvLO366iDoeeSp4JreuN0S3Tjcwtqp2jvcNd6N4PziOOVZ57vpuezl74/zTPfw+i3/9AJRBkwJ2gsfDk8QaxKKFM0VbBVOFV0VChX3FOQTrhJ4EVAPCA1iC/IKQQs1DIMMowyVC9IJCgldCdoJFAqzCVQJbQlxCBQHCAdpCIsKhAxLCwgJfAatBC0FBwa+BhQI7whCCQcJ9QfgBs0FvQT/AoEBr//d/Qj8GfrQ90b18fJt8MbtSes46RDo8+bt5TrlK+VG5uLnWekG67Htt/Ef9jX6QP7FAhgILw0PEpkWGxrBHOgeViBdIqMjQSSOJNAkDyUTJW8jSiCSHDUY3BR2EYIN2gioBOYBLgCx/qL8XvhJ8/vtTunK5Rnij9502w3ZdNf21pPXCdmj2o7b5NsB3MLbjdwq3jzh++VC7Ivy9PbQ+a77RP7DAeEEewZeB5wIiwljCwgNRg4vEO8QWhDyDoAMWgkBB2cE5AFYAEr+wvxd/KL74voT+n/4VfcY9n70x/Om8130gvbv+cn9dQJrBlIJCwzZDcYOlg8TEVIToRUiFxEXsBbFFnwWDhZsFKsRGA+ADAIJywWBAeb8uvgJ9Zby8e8o7enpV+dS5pTmNuaD5S/lpuap6SjtVfG49f/6+wBbB6gNiRPqGKEeICUFLOIx8DUJOZA7kz7HQV1D3UNxQ7FC1UElQLE8/DZbMH0p9yKLHIAVfA1zBZP98PVD717oZOAs2NHQQsoZxEC+CbkKta+y27DHr2ewd7Ffs8e3a73Cw7nJrM7M00PZ29+c52ruIfR7+Jz7rv50AeoDugXcBRYEXAGM/av5d/Z38zLwfezR6EHm8OXV5vTn2+jW6d3sMvHb9uj8ZgK7B3EMXBKbGFUeFyNeJnAoaCkxKV4onSbmI8wgkx2uG14a8RgyFyQVyRMgFKwV3BfaGd8bxx2dH+ghPiSoJqkoYSpSK5wsdy7AL4YxnzMMNsY39zexNmI0yjDdK20lEx7uFs8OLAZP/Uv0JOsM4pzYCtAyyFLBjLxJuL60T7JysJSvQK7FrEOs3qxrr6eyC7Zcugm/xsR0y2bSg9nX38zlh+su8SD3r/3EBD8LYxAiFGUWcRidGgscAB0CHk4eHB7cHS4dIhw2Gt4W5xNeEbMPqQ72DbAN4w2VDT8NqQxOC0sK1wjXB54HGAecBoQGOwfsCAsKmApZCp4JIwkbCqsLgAzZDMUMIAzcC18LhAmuBxsHVwfVB6AHQwfxBhIIxgkQC+oMoA3yDUAOUA/UESkUZhV4FTMUrxKoEQARjA9TDAIIvQOg/xH8svgW9U7x5O0E6+Lor+dX5pvkkuL74DbgBeF/4rPjKOXf5anmTOgA6nrrHu3Y7vPwkfIw9On10vcS+sv7Zv6EAckEwQcLC10OxBGiFE8WNRfOF0sYSBi7F/cViBOuEFIN8AlcBq0Cqf46+efywexp5y7jUd+d25TYWdYb1SbVudVK1uXXgtmH23TeZeKg58bsUfGm9bj6WwBPBokLUA8iEscUoxceG1MefCH8I1ElyCQvIw4hKR5HHOwahxghFn8TihBBD9UNTgyuCo0IGAb4AwwC7wCnAG4AjADNAFgBEAIhAgoC/wHeAccBIQEfAKr/4ADQA9UHjgpPDBwN+gzSDAEMqgqPCT8IGgdTBzQHBAYJBNMB3f8Y/s/6cvZV8qfupOur6W/ok+d05kXlNuWD5kDo4Oih6MHoSeo17XnwQvRo+Tr/8gS4CTYNERBXE+EWfhpxHtcg9SLrJHglLCZJJkUmfiUzI2wffhofFqQRbQxdBxIB1Ppl9cPvfumv4+/d2dgg1erRH9DKzbnLGstjzELPr9JZ1TPYqdvN37Lknuk+7pvy4PYK+lX9/P9HApkFnAgSC70NAhBhEusT0RPyEk4S4xGjEfwQNg82DX0Lpgr1CpYLHwvQCUMILgfxBtUFYgNEAU0A/AAXAW8AXACFAb0CGwJKALT+Mv8PAnoFowf+By0H6QY5CK4KEw3bDYcN0QyoC2EKHggrBeoC6gEfAXH//Pw2+lb4UffD9mv2FfXi8tDwg/Ct8hz2DvnJ+df5V/v0/agBGAb+CUUOVhLOFRYYkRikGE0ZLxsgHfQdmR2SHKwa1RjGFrwUExNkEWMP/Ay7CjYJCQmlCXwKTQrTCAUHogVjBfsEOwTEAtH/rPxW+SX1OvD06mLlluCx3LXZ5dZE08fOzMl6xVTD0sL9wrrDvsN4w2/D3sOjxl/LJdHl1rvbsN+44//nVu0Y9Nb6dAFGB6UMGhKyF7wcTCELJX4nACk3Kk8rYiwwLUctsiyDK8Ip2CekJXsjoyHjH5seaB1WHDIbnhm6GOMX3xb3FAYScA9NDuYOQBAhEdUQxA8pDmAMNwqCCKIGhgW+BLMDiQIbAez+Mvw7+ZL2sPRI82vyrvF88f3xi/Lg8rPz0PN98ybzDvIC8jjz1fRw97z4pfje9wj2afTo8k/xgvA68AfwA+957djra+oG6WHny+Qm4rbg799m4Bnha+FP4V3hBOHU4Gzhx+Lo5MLnVOt173D0j/gP/CL/ggK6BikLhA7/EP4SQRXbF6UZ/RpDG0AbQBtLG1Qbdxs7G4cahhkEGVoaXRykHWgdChy+GnoaIBvrGzkc/hvEGq4YlxZsFOsRJg5CCqQGkwPeAWMAcv4X/Mr4WfWs8v7wVPCF8EjxGfIo8u3xkfGe8m/1ufid+xH9dv1v/SL+Y/+UASAE8QVlB0gIJwlPCiELwQt3DOsM0gyiDKsMNw32DacO7A1KDGgK5QiLB2AGJgQMAXb+Y/wM+2P5svYP8/7vH+0+6vDm9+Jc30HdCtwS25/ZENgl12rXTthn2fbZC9qi2vbbMd5K4MriaeUc6Tzt4fCJ80T1Ovcw+sf9mQGkBJMG4ge5CHoKbw3cEPoTNxb4FrkWVRa/FjEXmBdXFwcWoxSmEqQQ6A6yDTANDwy1CS0GogLBAML/8P8qASkBkADZAJECEgaMCWwLnAyXDqkSPxeRGhYdah95IpklICcwJ5wmWiVUIz4hhx6fG2MYLBRAD5YJ6QOI/r750fTO7hzoMuKC3unb19ia1aDSmtHL0tHUB9Zc1lzWA9fp2VTdQOGY5gvsYvJh+Fj9mAK8B7MMlhG3FRkagB7UIUQkGCYrJ+on7idDJ5MmEiRKIUofKB21GnoXtxRpErEOHQn1Aa78/vew8+LvYeu4557k2uGN3+bdDd1Y3bzd5dw53O3b5dxN3/LhbOWK6GDrue0k8K3xPvMv9d/2Lfkr+y39Yf+wAPgBAgOFAnQAdv2u+vH34/RV8Xfu8+zP6xfq0OjA53XnLei06d7rEO7W8BvzRfYj+54AnAeTD04WjRywIngoZC9+NgI8OEEtRRtGGEYURcFD8EF/PVE3bzA6KrAlYiGsHDIW6A54CKEBKvwN+FL1EPNR74/sk+zL7T3vQvCJ7xvv0+4R7cbrHOz6677rXOt86rPqHOrA557lQeQC5Gjk6eOp47jhed843ireVd+94N7hZ+LU45rnaey/8A311/dq+v/9BQK2BocLjQ6fEOwRFRPOFFsWiBZzFygYMRhOGCkXahRQEZAOmgwDDBMLzgnEB3IGlAX7AsL+Ffl18qDty+uY61Xth+7H7pLwyvJa9aL32Pjq+Sr9TAGnBvoMSBJAFy8chCEwJbwmKya4JJ8jTyPoIekeQhxzGu4amBsSG/cXuRKwDb0JbgZYA+0AKv+p/PP54vbd8SPu3+sh69/rduoE56Xi3t533vrfZeH84RjhEOHb4kvlzeYg5qbk3+S95Tzm/eWp5Orj3eQ85wHqp+xm7kXvVfAH8TXyWPRD9+z6SP8/A90G/gnoC0MODRFRFAIWGhbwFQ4XFxkrG0AcmBwTHawdCx0RGy8ZqRd5FpkV8xR0E3YSxhH+EM4Pgw2uCT4FcAF1/7f+PP/K/2b/Uf6G+wn44/XN9Dz1/PZH+Wv8H//yAL8BuAJMBKAFNgbjBrcHygkjC3UL7gy/DQYN3Qs1CJoDFwFh/kf8YfqN+OT3xfiy+bn4xvbf9Nf0Yvac+Iz6tPsU/H77yPq7+2r9Z/6h/cX6m/cI9VPyfe9J7KPpU+hx5gjkFeFg3gLcU9o32SrZP9ra2i7bbdz53z3leeqe7gLyw/QK+VT+rwQSCzsPSRJoFMkXJxt0HlMhlSIvI+AhUB/xHC4b/hkoGNkVPBK1DQMK2gZlBKgBHv4q+l/2K/Rq80/zZfNV8mvxdfLo8tP0m/g0/Pn/HgPnBXIKUBDuFQQbKiDnJdksKDOENqQ3+TdSOCY5GTvUO8462TbUMJsrBycgIk8c0BT6DC0FP/yN8w/s0OU64b3cg9iD1JnQk82nzF/MNszfzHDOf9G+017WOtpn3hjil+N34+LjseWm6eLtovBN8bPxUvPP9ib7OP4RAEUBGwNyBegGrQcVCAAIeAebBigG0gUHBpgGUgZXBSEDnADw/5z/GP9a/oz9Iv3e+2P5FPco9an0IvVw9d32pPmH/QgBiAOzBV4I3QvnDtwPRxCCEMAQWxL5EwsV6RVaFd8TEhE/DQoJ0wShAIT81vih9PDxb+867T7roOfn4+fgj95x3NDaIdpm2o3cy98k43fneOtA76rztfhN/tYE3Qp+D5EUWhmZHSsiMyUlJ7wohSm5Kigs0SxLLZ0t4i32LbEscyr4JkIjtR+dHH8ZSheKFhwXKRgJGQoaPhtyHOAcKh0SHR8cExvHGYMYjhYIE0IOEwnjAzz+2vd18LTox+Bd2RvTvs1eyJLDEb9Qu024RLU4szOzxLTxt1+7vL3mv+DCDccxzXLTG9nZ3eDhy+aZ7APzwvgq/RsAzwJsBQAINQqMDFoOSBD/ERgT8BOBE2YS/xFtEqASDRNgE5kTbRSdFXYWKxdWF4AWWBVqFAYU/BNMEzcSBhFaEAwQSg+ID5sQcxKOFEQWIhjWGRwbExykHGIcGxzuG/4axBnNFzsULBAKDBgIXAUNA1AAJP0R+T32/PN28azvp+3b69Xqa+rk6ivs/+3D8ArzFPWV9ir3wPdn+WT81gDeBckJgwwoDqUPpRCgEKQPUw5DDCQJbwUMATz80fbx8BXrS+Wg3wza49NUztzJHsZZxLvDDMSlxUjIF8zQ0E/V6tmh3gzj1udQ7NnwQPWR+Tj+zgOWCY4PwRTwGLIcKSCJI50mYilDKwIt6i5VMd8zRTbzN9M40TivN581CjI4LvQqeidTI1YeNhlCFcoRvQ00CWMDJP2z9rLwT+zj6Gfm++Pk4e7gjuBA4Lzfj98g4MLhteOU5Qnn0ujC6wvwDvWq+Qr+UAK6BscLIBBxE3UWMRnkG8kdJR89IBwhKyGYIPwe6xwUGqQVPhA+CroDqfx+9Zfuuej44kPd3dfT0qnOGct6xz7ESMIMwbrAHcHCwQLE8seczJHRx9Va2cXdTOPL6gLz/vr9AhkLeBI6Gd0epSORKGMtFDLeNV840jhwNzs1izPfMSswyS3oKjwogSVuIu0eIhteGHMWtBS+EtcPFw6HDY0N/gy2C9kJ9AgfCe4I+wdMBhAFkQQEBOgC/wHLAKz/3/4n/jX+4f4t/17/8v7A/v3+F/5Z/DH66/ce9kX0ZfH77onsuenR5i7jZt+a25bXt9N80MzNdMy/y3HLS8vjyhDLv8vkzaLRttW42n7fHeUF7O3yBfrvAd0JYRHaFmoa6hyRH2Qi1iMhJLAi/yCaHrEbEBgfFBkRBA/yDA8LCglCB4oGiAZKBzgJ7ApjDHkNGw9YEVsTkRUlFvkVNxUGFSQW2RZyF88XvBbqFPIRcQ6CC0QIuwTz/3b6afT47ljrCumy5ubjc+Ai3LrZsNnI2sjdKuEx45TkfuX+5u/pm+3B8B700vc3+5oA4AYJDU8TuRd8GxwfFSPhJqApfirxKbUpAyuTLXkwrzEtLxUq8iSbII0dlRn7ElYL/ATEATMBxQCT/rj6TvZr8wjyafHW8PHw4PJg9rH5MPvL+g77bvzp/g0BigBZ/hj8SPs3/Hb8r/qr9s7xpesp5TXg1ty021rZvtWK0p3Q+8/aztfNhMxKylvJBcktymDMaM7s0VfXm9ws4gro7uwZ8ff01fis/a0FeQ3HE4IZuB1JIdMlOSm9KdEoiSY0JTwnyCg1Jysk2SBWHtMb6xegE3cPZQ10DD0LAQqIB1EFiwRDBAYE5QOqAm4CWgJ9ARUBXf9E/4IAeAE+A4MDpgEKAPH/kQE/BIYFSQXKBWkGDQiyC50PshLyEycTBhM0FVgYqBsQHhEfNSE9I3Ui5iDlHVQbHBr9F6oUTxFjDAgG1AAU/Pn33vN17r7pqObm4jPgCt0u2cbWu9MG0ZTPV9AK1InYdNy+3hffrN9z4MPhueXc6kDvZPIL9X33GvoC/O/8jPwh/Mz7Hfpu+IL3Ffi6+FD52vnS+uf7qfu1+RD21PNR9ZP4Avyo/goAiwCQARMCgwFhABEAAAObBygLZwveB9gDVgKsAswEjQdRCiEMFQuACYgJkAnsCXcJ0wnFC3QMYQzXDI4PmRQ4GTIZTRYZExoSGhN5FXIYyxqOHDkcdRryGOYY7RjvGL0YXheeFDUSwBAvEeYRKRGQEFIPkw2FCgwHPgRiA1MCQgHm/5j9U/yM+zz7UvoA+UP2zfMG877z9fOC8bbtMOoz6DfnM+YV5SjkyeMr4gDfxtzy27bcEd+m4P7gFOEk4HTgbuPd5jHrPe5g7+XwIvLW9AL5g/tO/Tn/7wHQBbsHnggbCQMK+grUCQcIsQc6CdELlQzoC3EKKgnJB2QGiQa3B0AK0AsyC1oJ7QeJBqoGvQbuBdUEAQO/AAQAzf9v/6X/DP8F/vD75fhl9kv1yPVP9sX2l/fo99f4qPka+s76jPz//RUAVgG7AoYF9QcLCmULgg30EFkUvhXWFKsScRFNEsYT6xNyE7oReBCND8AN1gsQCtAHSAbJBDkDaQIvAgwCqAEcAXv/vv6t//oB+wTDBp4G/gUcBgQHxwdJCAYJQAqFDJQONA0ZCSMFHwJbAUEBOf5/+GfxAeun5+fm9OW54nbdxNd/0y3SltFd0LrNKcvRyVHLsc650TbUsdW+2JXdH+Ku5pDrrvH3+VQCDQotEe0XgB+OJ6MuijMsNww8hUGXRs9JBEpXSZxI+0a8RNJBQz6FOpE3/jMJL5cpDyPIHE8V9gt0Apn50PN473zrfuby3lfVisyRxrfCib8/uwy2MbJ3sBCyR7Zdu7vAOMbAzFLUo9zo5LXsxPRF/RQGiw/iF0AeVCJ+JegohCscK+QobCUzIjUfbRr+FAkPVAhJAUz6ivKu6rHk9eBw33LfuN5c3RTdDN7c35fiyOSh5tLoquvO8CH5iwKtC10V6x3hJJcqxC7uM244FTsDPn9AbEPgRipJcko5SgNIcUTYP6U6MjVcL+go3CJkHNoVvw0yA3H5YO/E5fzdAtauzyvL18aOwk++w7pfuJy377c0uce8qMJOyR/QCdbP2sXfvuR+6ILrbu0m7l/us+3A7YzuBO9T7mfsken05V7jKuLV4VfiAuOr43HmB+ud8PL3HQDQB70OwBX+HIYkziuvMgg6B0E+R0JMJU9aUCxQwE2FSttFbz9FOA4vFCb0HkYZ2RPJDmQJpwNp/nj4MfPn7xLtdepY6czpCuyh7wbzIPbW+LD7oP85A5cGlwhrCHUIawpvDRIRcRPoFEQWRhZgFbcSKA9lDGQJ1wWPAmv/IfzZ957y0uww6PzjoeCa3cnaEtry2tzcF99o4rvlFenB61jtsu7q73rwWvBV7wjs1OjO5cPideHC4Pbfq9/N3iPed97Y3vzekd7o3tTfmuA74W7hR+FN4abgm+FQ49vku+eN6gTunvIc+DD+PQQbCUkOLBO+F/Ebph5iIMMiribOKp4ueTAsMbIytTS4NUg2Fza/NUQ2CTaWNeg0VTORMJEtiirlJ7IkKiB/G80WzROBEQMPIQ2EC/wJBAhDBVkCoP+u/Az7N/ru+XL6F/rr+KX3LPfQ9vj1B/Xb84ryWvJs8pzyQ/OI8z7zQ/PT80j0ZvWQ9hz31fY39jn1RPS688vzA/SU9Jb12vWa9fD0O/SP88/yvPHn8KLxv/MD9zL5Lflb+P/3Avjt9w72NPKx7rfqJuf05KniI+Fa3/7bb9kM1z3VItRk0kfR5tET1PjXltzc4THn3+w08vv3Uv4pA9YHgAs/DyMTrRbgGSkdVB/xIaskTieeKRcqVioCK7cswi6TLxMvdC3EK6cr5ysuLBsrjSj3JUwlbCUgJW4jmSDQHdAbPRr7GPsX1Rb+FaQURRLiDtQKjQb7AhAAPvyS9vHvsOnr5KTg8dtl1pLQVspDxIS/5buuueu3MbbltUS357qpwFnH6c1B1MzazeB858HuuPaA/5EH9A4FFm0d8yQdLDIyADblN5c3wzXhM4YxtC5mKwwn+yHsG50V3g6oBx8By/qW9U3y8+5y6+HoL+io6CHqC+ti68vsmO+P8/D3Cv3iAV4H1AwwEfgUpxf8GHoaYhtuG+MbYRyoHbwfxCHbI7oksCIwHuQY4RQ2E+QRqA51CDgBE/s49ofxV+t05K3dKdff0KnJksIuu1e1mLF+r9Gv9rD6sWS0qrdnu5HAzsagz+DZveR/7yL5igJ3CzcUUh0zJcgr2jEjOLg+4UTOSGtJ10csRoRFcURCQf088jjdNj01jzMXMSQtxyiQI2Md5RVEDtEIzgXoBMICAv7x+G709vAd7TjpAuY34xPhROC54Gnim+M35RHpjuy17mPwAvFi86T2HPkX/L/9vP/lArkFgwZyBX0D1QAnAEn+bfq69gLzIvFs79jsJOll5BTgp91n3c/dUN0/3HbbPNvk287dweEX6DXvAPWr+Bz7Ov39/8wEwwoJEfoVQhlQGggarBmAGWQaSxrKF9cTog+eDOkJkwVmAED81vkr+PT0jfDl7ZntY++T8XfyRPKe8HHvKfBy8mb3qP16AscGIwgGBsoEnQTmB54NDBGTEeMQXg8mD4QPGw4kDDUKRwj5B58IsggQCNcGogcgCj8NfQ9YEI4PKA8zEVUWSh04IwMlnSRjJZIn4Ck6Kk4oLyWiI9oipiJXIl8gcRwSGAkTEA4SCe8CBv3r+I31DfHo7PXoKOXt4ejeSNtI2LzW99Rd05LSc9NI1+3b8t3R3EHcld5Z4u3lcOgE6i7sfe1D7kzwyfKQ9ZL29vRZ88fzf/WJ+Mn69/w8/pz8kvnL91b4z/oJ/YL9sf30/Kz9r/7b/q3+TP17+lz49fif+zv/UAGhADn99Pkv91j1Wfaa9233uvXj8z7zOPTP9T343fqz/Pv9ff0T/goBvAQACO0KtwxwDYUMnAv6DJAQchQfFQcUgxN4FEkXGBrHGzwdqR1OHtMe7x8HIvwjViXtJUElnyQEJKAiVyLTIRwiwCMhJP8hmh6XG/YaZBtwGcMUmg+qCo4HTwWyAYv90/jv84LvdOv05lfi6N3V2VrXutYq1WHT/dEY0PnPdNH20irUA9Uf1r/ZOeCy5tbrUO9F8arzAPcX+mT9awCXAzMHaQnhCQUJxAeiBxoHNgU7A+wArgBWAkoDuAJyAJb+6v4ZAXwDhgTOBAAFsAaGCIkKBQyBDMINsQ56DxMPSAwfCKQEMgFp/q77kPcB9ETxne4O7H3oUuXf4rXhyOAv4IzfUd9e4fLkfekZ7vvxYvWO+MX7HwC+BRcMMRL3F7QcVCGvJi0sKjGKNIk25TfkOFc6FTvnOiY6eDjRNfUyyi/FKxomgx7xFvEPuAirAYD6FPOz6/jkH9/t2c7U/s/sy/DJt8nNyv7MIc9X0UjTTdbR2ofgded77vX0NvvP/6sDVAfKCgAOlhDxEV8RnA9mDBkJKAYEA1wAYv1g+e30OfDe6jLkZN1K11jT3NAJz5vOAs8N0FXRHNPC1QfZBt1s4pfoNPCK+C0CvQx3FqYfeSeiLpg1AjzEQfFGTUsIT3tRW1OpVG9U7FLOT5pLV0YwQYY9yDnXNLIuUCc9IBIa4BPkDD4Fjv2k9gzxt+uA5R3fgNm41UzUZ9LZz7jN7MsvzNjNv87YzzrREtMD1/7Za9ss3CDcdd1i32fgTeE54ezhQeOD5K7lt+ap6ADrfu468g319fYo92v22/XO9bP2i/eS+J75o/kn+av4AfkC+rT6Vfo0+D32IfUr9fD2Gfp//o8DeQjpDOwRjxaOGVEbbhtzG1ocJh6hIJYj5Sb8KS8tAC9qL2YuvyxfK2Uq0iqZKzAs8yvPKiwp8yZ7JMQhsx1tGY4VyRHkDvQLDAhVBJz/kfqA9oLy4u5S6yLn/OMO467iuOIH4uPgU+B+4AbhueG84ojkoOZ36J/q+uxK8A30Pvfd+Zb8If+rASIEJgVIBv8GsQZ4BbYDuAGVAN7/l/+D/6D+lP0s/NL7svyY/pkA7wLfBD0F7gMCAqABaQIiA10DQALzAPT/ov6G/af8nvul+Qz2+vGN7U/pb+Xp4f3eetxL2mLY0NeJ1xzXo9al1yvaoN7t5NfrkvJK+V4ABAjkD84VJBmSGigb4Rs5HTYeYR7QHRYdhht2GVEXABVaE1USYhHEEAgQqQ4iDY8LzQo0CyAMJw1wDewNjQ94EYYSEBLGEOMPRQ80DowMmAlMBtECdv74+TH2APNG8OvsA+nl5s/mAOiB6arq0+vA7bLwQ/Qo+Lr8QQFTBUEJKAzJDV0PNhEOE18VxxY8F7EXORjwF50X2xZ7FacTnxH2DxEPVA+HD/cORA1+C0AKZQkFCEYFqQFq/qv7NPoo+b/36fV085DwAe6n6rrnmOUU4yDh7d6j3DfbkNpG2sraEtst25TbrdvV29jccd5a4KPiSuWo6JXs3e8q84L1R/de+Y/8CQEXBZYISwyuD7ISbRVQFzka4RzGHrcfwCAnIn8jtSRkJHEiFh+YGi0WxBJmDwcM2AiEBkAEyQHD/lX72/l5+Tr5tPi89qn0TvNA8wb1c/f6+Az5j/in+L74sfh4+L73wvcn+PL4bvql+8n94wA3BAoHcgjkCK0JMQspDbEPDxIwFGsWhRjjGikdQh8QIUwiwiIlIvggSCDAIAUiKiNxI3IiOiD8HH0Z9hQ2D0cIMgAu+IbwN+kU4qfbMtXAzhHIfMGXuxi3erTmskiyg7L5sne0HrcYu8jAocZJzMTSz9hP31jm2uzv84X6dAC2BfgJ6w2IEdQUoBd9GVoanRrMGb8YWBdyFIARtw7OC9gIyAQMAM/7UPhh9mX1p/Wx9o34evqS/QoDewlcEGwWChyfItwptTCvNmg7wD+hRLVK+1A9VnFZ61niVylVf1KjT4pLnEU4QB06LzTELWMlQxy5EuAIwf5c9LLpYd+x1fvMlsX9vpS5SrVDsQOu26qqqNGmVqY7qIirVLCBtcW6lcCOxo7NRNQe22ThY+aI7DzzWfqHAOAERgh8DMwQUhTWFgoX2xY1F08XZxeFFQQSRQ5zClMHcgPQ/zD94Puk/IT99/2Y/P35ePkg+uf69vvD+oL6afwn/9ICFgbIB5gJvAsQDUwOiw+iEMQRjRHkEN4R9hJuEzgS5Q/SDEsLIgmwBqEF0wRGBIAEsQS5BAQENgOGBFIG/Af+CfoKrwv6DT4QjxEVEYkP3w6eD7MPrA0fCukH5AdMCHEHCQUCAYT8Evgc9YL0WPQQ9Hryb/DG7qLtu+2I7qPvM/FP8mPz+fUx+l3/ZAPjBSUHFQgyCq8LqQwdDYAN3A2cDU4NGw2iDMoKVwfkAtX+zfv0+cn37/To8R3wG/CE8F7vKe2F6gPqSevG7BvvT/D28YD0wfbl+Of7gvyq+pr4Fvc7+JP6Zfxm/Oz6cvfv8yHx0+647drsj+wS60bpyund6/vtUvB/8QP0a/gg/Lv/BAO4BSIIpAqPDS8RPBTsFs8YkhqYHEgbKha0EF0OiQ/bE2wWFhU0EdEL+AjmCqYNyw6BDnINkwxAC+IIgAZ6BswIQQpzCSIHCQQyAoIAF/5A++/4O/fa9pT3Gfgb+Tf5EPhO9+P2FfZn9iX31vfs+cv8Sf/OAbgDtgJ1AR8AJ/48/hQANQPzBggJgghYBzEIlAtrD8sQBw+JDFoNwBHSF50dNSASIGoekxv+F6YUCxLrERkTLBKSDiAHWv/I+Sf2tfPm8Bjt0+i75B/ilOE04k/jY+PV45vl4+Y76EHqZez27ibxbfLS8+T0tfR19eL3hfqz+3j6TPjg9n329vQy8mLwFvBk8cDy8fO89Z/2Ifbs8ybxSPDO8aH0gfcW+fL5uPrC+8z9NP8sAPr/NgADASUBeAAK/1T+vv6JACUC2QLYAbD+Y/yG+3b7nP1b/17/yv9fAOUC2AaQCXoLvQx7DVAPYhLCFRgZmBocG24bxBy9HdodmRyyGusZbRk8GDMW9hNxEjgS5xFED/oL+wnQCN8IHAheBgkEewFV/0L+cvyP+0X7H/sw/H/8hPyW/a/+JACjAacCewPFAkcCdAJ4A8sEMQUwBZQFtAXlBKMCrP/c/BT6OPcp9fvzb/O08gbxS+8+7sXtQ+wo6rboDuka6yHume9C78juIu8E8QD0lfax9/D3mfjB+k3+jwHuAs8D5QOlBIgEeQL7AM/+Gv2E+4T4nvWj8q3viOxB6Pvimt1g2MfSZ86hy77J8MguyHzHuMdgySTMac/R0jbXkN0I5bTtCffv/6wJ4hMJHhIoczHbOgNE3ExPVc5cn2J8ZlFoR2mnas9rD2sAaehkAGDtWQZSI0l1Pw02VSw3ITwVQwqcAfX6ovXc8O3rWedb4o/eP9xr29DaWdo62/TcT+Ak5bLoLesF7ZzuBPHr8571JfbZ9Uz2tve596P2sPOF7wnrXOaT4Yrd/Nj+01bPVspXxQTBAr3Iuea35bY3t0S5zLvUvinCJcYZy7rPl9Rg2SXeDOQT69jy7/r3AqoLmBSQHVkmQy2lMio2rDg+OzI9nz6+PyVAVT8zPmk8mDrhOHo2hjIkLocqayegJDwhnxxtF4MSLw5tC5AIiARz/4L61fYK9G3xt+/D7trsZOtI64bsSu4c8EDwP/AP8I/ukO30693qPOrv6Xnq/es/7v/wGvSE9jr4MPoP/cIA2QQxCdoMcQ+QEUUUzBftG0YfDyEMIh8isiElIWcgRR8RHRsaoBZcE3UQVgxkBzMBRvrJ8oPqQ+JG2/vVwdJr0M7OJM04ylPHhcWjxcrHDcvmzl3Up9qJ4p/r3PR0/j8HIQ/eFfIaNx/DIiIlWCenKMwphyvlLCot4ivbKf8mECT5IfIeKRzrGDEUZRBtDWAKawfWAwYAEP6W/D/7gfrN+aX5NPry+RH4R/Uv8rzvJ+1/6sDm+uH13ZDaidg41/HUbdLg0K7Q/9ED1DnXzNul4qDqrPJy+vsBZwkoEaIY4B69I2UmHyiFKbgqESumKYkmNSJnHVYY5xJuDeAIywVuAxkBp/8E/1X/CQBKAAX/OPzL+Dj2fPXU9QT3gvcZ96v2U/YR9lX1pPPk8ZHwYO+J7iXud+5a8G7yT/RW9e71vfal9734e/np+ST6+/pW/cEAOAPlBGMGxgfeCVQMLQ7TDrIOrQ6KECoUHxduGIcYnRfvFmcW6RSLEsIPxwwYCsAHhwT8AH39Q/rq9uTyeu0G6Mzjs+CX3mvdwNy+3Jfcmdw53WreauGn5Ofoue3r8pH4a/6gBM8KLBGyF1UeJSSdKC0s/C+yM802NTiPN2Q29DR2Mx4yOy+2Ki8mxCGcHfUYDBNwDCwGjAHz/R37DPiq9G3yWPCT71bvT++a79TvJvFE82j2DPmW+rf7yvs5/KH8NfzV+7X61vkl+sb5FvnY+Gf39vRE8fvsCely5Wjh9NwF2WvVsNHEzUXKIMdNxXnEvsPHwnjBocBjwTbDh8ZiyuHNQdKE1+3cZOIX6PLtq/Ra+6YB0gdsDu4Vbh0qJMkqAzAWNNk3cjrTPJA+Zz7TPcE8ADuoOHE1KDI4LygsfCnhJ+claSQFIc0cCxl1FeoS0BGBEUoRGhG7EAwRFBKhE1AVCRcBGF0Y1xeoFi4WBxaHFTwVAhRQEqEPNQuuBTQAQ/tw99TzXfAv7QrqQOdZ5I3hyt+Q3tfc9tpQ2VPZidq923bcztwm3F3byNoW21Hcst2E33bhP+N+5aHnm+lz6y3tKu+q8e/yrvN/9W74KP3sAQsFnweJCjENXBA4Ex0WuRgsGpYabRoSGgAaJhkOF2IT0A4LC2YI7gW9Am3+cPma9Kvvg+m2483f+9wn3DTcvNt62v/YQ9hV2ODXMdgU2djaBN+55LjqCfBf9fL6NQAdBfIJJw/eFIoZ0R0pI3kpPDAKNWs3kzlcPGA+GkAGQQJB90AlP2o8DDn8NcQyzy7YKhUm3CCDGoQSRwrwAiP9vPng9qbyx+we5uTf/tzJ3OfdXuGP49PjfuMf4yfkzecp6/TugvM+92P52/jq9yL4t/o7/mcA/gCaAK/+d/14/Ez76/r7+H70vO/l7BrtFu+T78vseOdK47jhueOt5yjq3eqd6/btTvJ59+n7Nf9eAh4FWgY6CHILoA82E6sVwxaWFo4W1xQREtwOSgvFCOsGxQRdAqP/Iv1O+w/66/jL94H2b/XD9fz1UvVY9br1Svgh/Z8AiQKoA7kEjQZCCGkInQjxCYkM0A8WEp4SJhGBDu0LDwpkCA4GPQODAKT+sv3I+8z3pvN17zztAOze6lXq0ulo6jDsX+3d7W7uju5L8Pny7PRS90z57fvc/7cDvgUzBREECgRDB40Lfg6gD+gOVw3PCx4MNw7dEMER9g8uDtgNRw7QDpQP5RDHEEAPQA3TCvsKPAxbDGsL3ggdBm0EugOCAwEClf/n/LD7kfyX/b/8Jvs4+ir7Df0b/ub9z/3I/+sCrAUMBm0EbwKiAqgEZwZMBpEEnAEQ/6z8GfoY+DH1D/MI8gLxkO9z7TTrUOkk6EvnOuZB5Sjj0eGG4kvkVObU5yLpceoE7YDv/fEo9Rf5qP0JA8gIiQ7hFKoaLSCzJeEpEC7BMUw0zzZoOKo5dDo+OQw10S/+KvgnuCWPIWMa4RHhCeAC4fzX9kPxDu3y6bHnreWf4x3ixOCQ3+begN8Z4DbhMOIl47vkAOaL5+Log+nU6FbnYOUA5HfjuOPP5KflI+ZY5gXmleXv5DzkOeVl5rPnd+k961vuwvEA9WD40vuK/t3/6P+UADIDNAc9DEQQqRKbE2cTPhPXE1UUqBXwFroXuhj1GIgZ5RoBHBwcShwHHJQbvRq/GToZRRnQGe8aBRuKGfcWOROiEIcPTA93Dh0MHQmwBtYFAwXqA24DOwMXA2QBMP+Q/Rj9AP43/8H/+P+L/97+Xf6X/ab96P24/ab9BP02/KX7fvus+9H7LPxQ/db/bwJABBAFAQXSBKUF7gZsCNcILQhJB8EG8QV7BAcCPv/X/Kj6FPgP9LLvfOtI6MLliONZ4qvhVeDe3ujcTduU2arXRdb51KbUK9ZR11jYfNn92nndn+BK5MLn3urC7WrwtvI59R347/sG/xgCIQWOCI4MixDJE9YV8RY8F/cWhRYEFvYUdBOvEqgSBBPsEiMRVQ4IC64IXgfIBQAEnQED/y/+C/8CAeUDhQd8C54PRhNrFuMZph1vIeEkFiffKccsdy9BMt0zHDXRNZw1GTXlMxIySTCCLbEq/yb8IVId7BhlFH4QpAu/BZ0A0/rD9bvxKu5S6y7oj+Te4QXf0ttb2BbU+s81zBrJYsbZw+/AnL7NvIS7zbppuj26BLu4veLBsMecze/SEthg3fripegX7o7zZfgm/H8AuQXdCtUPMBTBF9Aa2BzNHbwf4SL4Jt8rpy9FMqwzazMqMj0vBSpUJM0eJhrNFmYT3g/ODIIJSQVPAMD6vfS5727rSudA5EHieeB130TeFN3z3EHdmt6v4CfjTObB6nvxdPmkArsLJBRJHOYj/islM5k4DzyyPkpBrUP1RRtGA0S1P7g50TJUKncfLROoBr76V+/X42jZvtB+ytXEyL8NvCO5t7cquBG6rb6RxDzKUdDK1kveH+Z47Zv0XfvzATUInw23EhIXwRn/Gp4b/xtwHHkc8BrTF6oT7Q4hCpgFHwKx/+f9dPyV+6j7Yfw0/mkAcwI8BAEFMQYvCFUKhgwuDtAPdhJAFSoY+BqDHCwe9x6WHqsdjBujGmMaQRoXGvAYXRaKEu0NUQnjBX4ClP0793Dxhuw+6djmQuOP30DcJtiD1JXRy85KzRzNnsxBzDjMKM12z/zSuNb02OTaKty33VDh/uU76+nwiPYp/HkC8Qg1D7IVQhvjH9sknyk0LswxNTQzNhs4gjnEOLM2izOAMDYuDCy4KDkkah/1GpoX/RQ6Eh0P2gs4CQEHkgTIAR3+tPo3+FD2B/XW8wPy0u8Z7nztlu0u7TLr4um+6U3r2u1G8DXzLPU99lj2o/ZJ9jv1gvQS9OPzSvOg8YPvF+2G6nTo0OWT43bht98b31bfo9/e30vgO+AY4OrfNuDB4d7kC+hB673umfJb9xL8VQDxAy8HrQn1CkEM8w0nEJsTKhdTGkAcWh3gHkUgayCdH1cemB03Hk0fWiCOIfAhASJCIuchtyDdHhoc1BgqFpoT6BH0D90NRwvlCOUGlQXiBCAEbQP3AlQCdwGBAGD/0P4e/sv9fP2W/TD+P/9TAOMBUAMCBcUF1gWKBZ0EeQMRApkAM//9/Xn8iPtc+577Cfvu+LH2mPV19dz1/fXT9Wz2+Pdw+ej6OPvA+rz6//mP+Bv3bvRw8YDujetC6bfmfeSx4tPgpt9z35DeUt3b3Jndld8A4d7gIuCC4LDiJeYy6e/qQ+zF7Jjtb+++8MjygPRK9rr47/sNAAUESAf1CeILGA2+Dk8QMxJ/FIYWKhiLGRobPx2dHYkcrhk2FmQUlRNlE4YSxQ+oDE8KeQikBwUG/QM8A4MENQdOCjEMTw11D2ARoRNiFrgYAxwDH4EhOSSZJSUn3CjvKPMoVSh7J3wmjyR6IgYgix7vHT0dJBtgF9sSIg9yCzgHDANb/vX4TPNm7UXoBuRq363biNjj1FHSRtCTzpvN58teyvnJK8qDyxXNCs7Oz57ScNZ52wXfxuE+5LTlQeiz64fvZvN1+Bz9KgDMAfYABwBRAIAB0gNhBUAFZAYaCAwKcwxqDN8KXQmjCNcI7glkCtcKSAvCDCUQsRKFEtoPWQucBgsE9gMTBt0GEQWtAGH7x/jR9oP0VfHo7SHsT+wo7e/tCvCn8wb4hvuk/EH9+v9EBD4J2Q0mE8Ua/CLtJ7ApRSndKZUtLzPZN0g6vjmRN4k1zDI3MM0scyi7I9sd8RVXDJkAWvWG7MnmUOIA3cnV280vyPfChryetiWzFbKYtAC4x7szwXXGo8v+0ZLa6eJ26V/wjvgzA0IPJhiOHhMkcSgmLeYx5DXHOAQ5rjVeMP4p4CM0HxcbbBf8EqoLBwIT+T30dvLl8m3zVvHC77bt8uyD8HH3LP/DBLkHsAm9DIIQ0RLDFLUWWRiOGakZZRclFOERbA9uDYUJvAKI+tbz+O7z6wDqS+bD4HHb49fY1sPXztfU1QTShs2xyxPONNP314PbEN5j4CDjTOZP6pnulfO39w/6IvzM/t0CAwqjESgXmRtiH+EioifqK4gv/TLzNDc4DzxmQHZEdEXxRPNCrj9QPF04vjOtLmUpviT6H4sa1hN8DHEGLgF4/HL28e+J61jplOfx5ILgx9tU2fPYT9lX2qvbi9zZ3anfQeEH4hfhON/R377izuaa6XXrGez26zfrHenR5/jnWund6kLrSepW6orrKu2t7bjryujF54LpLOy67Y3uXPA88072x/ip+nf7+vvc/Gf+LQHtA60GZQnODIEQNxNMFdgVLBYVF28YQxqsG1Ec4hw6HX4dzR2xHVscJRozFy4V3xTwFJAUWROWEecPVA6SDGsK6AgxB7UFBQX7BPoFMwZFBTwEwQIwAaIAUv/g/Rb89/kH+qL8kv9YAnEEoQX1BqIIfwqlDBkOtQ+JEWQT4RXFF9MYBxmSGPoXjhZIFLMR/w5PDJ0JkAZdAxAAE/2l+rb46/Z+9HrxP+437IbqO+kP6B7mA+Up5F7j/+FB39zbZdjT1WbVF9aE1UjUcdNq0w/UvtRD1CfUINT31VPZodwY4Mbi0OTh5hDpi+vU7fzvw/Kk9cT4/fsR/40BggMTBUUHKAlBC3AMGQ2HDdIOGBFSFIEYwRsNHqgeAR7wHKsbZRo5GPsU0hL8EUMSGRLrEEQPJQ9CEGwRexLMEt8T4BUYGc0dCyIRJQ0niScCKHgosygPKaYoGygzKF8oUSgdKHYnHSaoJOEiUiGmHwweaxyiGfwV3xEzDrYJtQO//Pn1CvBv67HmZ+Ir34vbStfi0SzMxcZ0whC+2bmQty637bcyuae6WbxQvwfDJ8ewzH/SZNiD3inkaumr7rXz//kAAbUGyQtrDrYPYBHOEwgY5huyHVUfbCAoIR0iAyLaIBsgCh8uHcEa0BZ3EnkO0grDB8sFDgMBAYH/HP6u/Z79Xf4p/tT80Pu6+tz6nfss+xz8u/34/tv/qv6T/Jz66fl9+oP7YPuE+mX5XPkT+yH8M/11/lEA4AJfBbwHzgpoDoASLRYdGVobuRxrHnghHiWuKHYrwCwYLE4q/CfuIi4cXRR+CxUDHvpA7/vkTtqFznjDbLmSsUWrxqRDntOZQpn0nNajaKyEteO/l8oW14nlnvJm/m0JRBQuIOwr/TVaPxVIVk/fVXBao1x7XTxcNFmqVXFQaEnsQSA63jJ0LFImPx/hFjUOawWW/GH0GOx15LzfaNwp2g/Zrdem1zHYNNmf2Y/Zl9pJ3Xbh3OXh6XHsje7P8JTzYva4+JT6evul/Dn9k/1T/40B6QPgAzEBh/16+e30H/Ci6yvn9OFz277USdGy0SvUeddc2brbzN/Z5B/q4e9b9Uz8pgQ7DZIWpiByKiszDDu0QI5EkUVHRYpFY0XhRP9CpT9dPGM4PjP9LiMqhySQHn8YWxPLDQwIJAI4/UX6hPfk8ynvFeqb5tXksuQw5Mbhdt5X3NHbNNwN3Knam9jb1nvW49a11xrYhdgT2SHaMNvx3D/fn+Ew5ETmJ+mH7B3wCvW4+RD90/8SAsUEYAeGCaYK5AtoDcANZw27DCYMZwzJDJkLqAkxCPoGVwZuBcgDLQLEARgCJgOJBLEEhgQjBHADdANyBIUF3AWXBYAFpAUbB9UIlwqnDP8Nyg6WD4UQxBFKEx8UShSoFOcUZxUiFtEW5Bb4FcYUoxO7ElgSwRLpE5cUIBSzEzUTzxKlESIQ6w71Dc0MvwuyCuQJRAk1CHwGHgSpAPL8DPot+Db4yPdp9pz0kPHp7jnslOmG53nlceRk5Erl5+Z26N/oLenh6enqF+yR7L3rYems5r/lpebM5/HomOiA5xDnt+Zu52/o4uia6Q7rwO0I8cXzcvWV9Sz1P/S/8w7zefKV8vryg/Pc88TzOfNl8ojxWPEe8YnwMfDP8JHxMPPk9Cb39vgh+hf7tfsr/ar/ZwN3CEQOuBOoFwAbOh7rIU4lTiiLK1EukjE9NdU44DsSP7ZByEP+RB9FskM/QZw+kjsUOAw0qC9OKosjxRvnEx0NaAa0/6f5LvOu7fDoV+XF4znjy+LG4uLiLOMz5HXlqeYo6CLqDuwb7lrug+2U7BvrIuqS6WLpPeri6lfqn+jJ5u/lw+UH5tnmvufY6PPpIusO7i3yRvbq+ez8iP+2A1AILgxRDuUOJBDKEvAV3xdkGI4XWxZbFMwRHQ7TCSIFy/8p+jr0N+6P6Cbj9N5c3FbaQ9m412fV4dLo0B7QJNL11fDZEt3u3p7hLub26+zxk/Ym+9gACAdiDWQS2xbYG9ggJCbVKTIrPiwSLlgwADMzNGoz8THLLxgu0ix9KwQq4CfZJJMgmBvhFtoSiw8hDDcJZgUKAbz8lvd88v3u3eyU7GTt4OxH66Lptejj6fnsF/Bg89/1lffM+Wf97AD0A6oFzwUKBuIFAQbOBm4HtwdkB24FJwOKAAf+jPxI++/5Dfhp9SLyze5F63bo2OX/4rDhg+Gy4ufjZeQ25ELke+S05HTmJ+qG74T1KPuX/yUEKQkBDqcSdBc1G6wdXB/iICQj0CWWJzQoyidTJuQjIyBpHBQZ+BXzEkQPygu0CJoFMwOuAcX/fPz09xHzlPDC7/zu3ey/6UrnBOa15WLla+Vp5p3nA+kF6iLqtOlm6aHq0u238V31L/jG+iv+3gDdAk0DTAPaA40EMgZJCMwI8gh6CP0HnggtCBMGpAJ0/xD+uf6U/xr/vv2X/Cf8p/vC+pn5hvjL+If5vfka+qf6ovtX/e/+yACwAksErQW5BtkH9QjICXUKWwu7DGgOhw+dELkRCxNTFEoVfRXWFIIUkhQZFf0V4BVQFXcU1xIgES0OiwqLB7UEgQKU/6n6evVg8HrslekP5rvhgtxa1+nTU9KI0g7TuNIM07XUgtir3UniguYF6t/tjfIH90X71f+0BIsJUw1oDzYR3BKKFFoVXxQIEuYPdg5vDWcNWw1pDX0Ncww5C24KSgszDSkP/RCrEe4RoxIZFKwWDhl6GgwbjRvoHDweKR93HzYgZSGmIgUjMiLfIPcf5R6lHGYZlRX8EQUPRgyaCN8EEQE7/e75w/Wz8GzrQef044/gBNyM1jrR6cxjyQTHmMRdwRW+3LoxugO8Pb8Vw6TGjMnWzFrReddi3kblZ+th8WH3G/1gApoGWAogDdQO4g81EMkQVhKoE1QVSBdZGDkZoBgDF0UVMhRjExcSJBC8Dg4PkA9xEL8QfhBfEFAQyw/hDvcMdwq4CLYI4wlEC0QLUwn0BlgEIgLWAO//Fv8a/g394PzX/A/9wPz+/EP+FgA0ArQEugc1C3sPwhN4GIscOB+JIbsj6yXQJ70oQCpILMQtHC54LKgpISb9IOUZyxKHDEEHSgMY/8r6g/bu8A3r4eSo3yHbNddF1KvQMc3oyh/Li81k0JbRNdKP0tvSAdSA1mjYntqL3F7evOFU5ernP+kG6nDqX+xl72bylvVj+Kf62/zW/n0B8QN4BooIPApoCwQN9A6mEAASMBE5DygMCwl9BhcEZALiANj+YPy8+sn5ZfgO9yT2Y/UT9WX1n/YW+Nn5jvyFAG8FvwpWDokQvxK7Fs8cQSOiKOorpy4QMZg0RjlSPfc+kz0FOyE4SjXpMh4w8yukJX4dkRVvDwgJHQL4+9n0se016BrisdwR2erVttP40rnSDdTj1cvWINms3IDhduYh60fu6fJj+cj+vwMAB98J0AxVDxwQlg5GDNEJxwb4BNgCqP9y/m/+xPzO+WP14PDC72vv7O4Q77jv5fHg9CH3kvit+Vn6tvyeAJwDlQUDBpgGYgkEDP4NWRC4EV0SkBAADecJYAm0CmkM/AudCDsEzf5x+bjzFu8w7MPq6egw5PDdrtjY0xLRvNDf0BPR/M8ZzgfOJc910rTX7tz34ZHnQe2Y8i34T/zBAXsIrg5ZFDgYYBvhH10iLCT+JAkkyiNQI7Eknye2KasqNik/Jgwk0yEAH9Eb/xkAG98b7hp2GJEUcxOlE/ASbRPIE04UHhbtFf8TrBFfDqgLSwvkCywMZgzuDCEOlQ66DVcJrgI9/Xj5W/f29afzSPFs7vLrCerG51jkat7Y133VHdZw2LPaJtqN2n7bN9yk3Lrcfd213+XhO+RF5zrstfIf+hECEglrDwYS8hLCFUcbaCKJJgMmnCNvIuUiJySNI2Ehfx2ZGM0TSA7eCesFkQKtAE3+Yvqr9iL0OvO/9Ob1mvSS8t3voO0K7RTuMvDB8k/0h/Sg87bxne/T7B3r9epq6/zq8umw6Pzn+eiM6rDsOu4p7/jvWvC88Z/zRvVE+CH8OgDQAyAFuAWRBmwI6AnBCWAIqQcJCt0O5hNYFpwVQRRXFMMWyBkBG3caJhjNFusWOBctGCQZIRqkGp4YxhQrEUwPBg/rD1AQ+w/MD38OfA2NDWcNAg1oDNwKLgkmCCwJJgu6Cx4JdQQnAPP95v1L/RH77/aw8U7t5uqV6Yrnb+Lg28XWCtXV1dvVD9Qx0rTSyNae3Ijh++TS5lvpSO1y8gf49PxUAdoENgkaDZsPfBEYEsYRJxEwEDYPzA21CrgFNQJA/w/9Yvu2+AL15vA57V3ryOtC7GLsS+yh7Pftue/Y8BP0o/gp/fAAMAR1CMINIBLSFDoWdhdWGDEZ+BodHRQggiK0JLEmUCjwKaYqpiriKLAmiSSHIo0hRCGgIL0fDB5EGs0V1RArCxMGwABD+9b2lfJf7gDrK+ic5dPi1t1A15rRj87RzQTOJ81NzC7N88+80sHUTtaI2cTew+U47VT0UvsNAegG9wwfExcYYhs6HgwgmyCwIPAfLx/gHTkbMxg9FPwO+AgHA3D9sfg19OfvAuwX6PHjL99y26zYKtYS1BbSBdGm0XbT1tUx2RndjeBV43fmu+o98G323/zrBDIMrBG4FmMbLSGYJ7gsRjA9MrUzAzgLPlVDzEYgSJtJEUu7S3VM+0wVTWJM5EqbR79D9T/zOQQzgivrI+McJRZhDyYIFgDr9uDtFuU33SLWp9BCzLbGZcEJvY66pLjctry1M7SGsvOw1K7QrAisea35sZK3Q75DxHXIscyv0PnU/dnT3yrlIerc7lrza/fa+z0BcgXsCPELxw1dD9sPUw95D/4PCxAXEN4ROxNLFG0VShaYGFsaFxgYEyoOSgmsBUgEFARFBrEJ6guQDe4NGA9gEkYWThu2IAcn7i3qM6I6eEAiRe9InknsSENIz0R3QNQ66zOyLHYjHhmSDnsDmPh57lnkA9yT1J/NjMhKxTnETcZEyH/KOc3Lz5bUKtzG5NXuN/gUAIIJARKnGdofhSSpKNEs3i7ULXwr0yhrJl0kph8qGV4THgwyBeD+m/gl89nuRutm5+nkaOHw3IDYVtRL0RzO6stcykHKmcw00lTZS+FL6Dntm/LX+fEA9AYGDI4QchVLGg8drByqGtEXgxQlEfIMqgb+AC374fQn757owuHc227WZdK7zUfHBMEAvTe+SsOzyfrPndVo20fjK+vY8jv7kQLeCF4OZBP7GMwfiSaCLOAwOjKJMVAuRipSJ0olnSPNIK8c/BdXFDoTchNJE4MSDxEgEHIP6Q0ODHoKywpVDZsRUxW/F44azx1nIfYjjCPzIJcebxzNGxMcDBzgGx0bvhmwF/gUuxH+DZAJyQVgA/EBewBF/n383Psj/Pr6tPaX8P7ql+Zg5P7iaOGM35Dcu9hP1MrPOMzyymLKacoHykLKzsvezqzTUNjh2sHbMN2i4H7mney58DPy+/LH9GT4af04AoYFygZMBmkGLAdkCdELgg2yDu4O+w68DlUOUQ6DDuENLw1/DIMMFw6pDkoOEA+SEXgV7BhyG9gcmR68ILoivSWpJy8pTivOLO4sRipYJfog2h25GgAXghIeDg0J/QOo/rn5fvUx8Z3si+ie5FXgdtxE2TvXWtWE0iLQls4zzr3O6854zwvQqdHe1BDZ8t3j4oXnbuxH8Xr22vsiAY8GywswEbkWsRshH14h+yIyJZYo9yugLfAt4S2iLl8w1DD1Lp8qDyWBH/0aVRYrES4LaQNv+7/0w+5W6VDiZtpW0x7NkMgbxY/DkMNdxNrFR8gNzGLQq9T42IrdVeMN6rDxJ/mxABcJDxKMG/Aj+ClJL700ODtiQs1HU0t0TRlPR1CZUC1PFEtRRpZBTj0fOS8zlSrDIPYXhhBCCTcAC/YL7WTlw96c2X3Ups71yGjCLLwjuJq0+7G7r42uZq8hsWiyI7PQsxm1MrhkvdTE88ws1MXa7OAx56/tA/Rv+u7/ygQECW8NrBElFXUYFxuxHIsd8R3DHUIdfRy/Gy0aYxiOFtoUyxNJEvgQ/w8YD+oOBA/MDgcP5w9IEZkTSha3GWkdNyGEJJQn2SryLTsxUjQKOLg7OT7rPkA+fDynOuc3MDUTMgMu7SgXI1QdzBYWEHQJfwNh/vz3rO+x5+vf+9lz1cnQc805y0DJCcmqyRzK5cq+y+XMYc7Kz0LRCdOH1QXZI9364B3k4OZz6cLr1O307wLyR/TO9ar2VfdT+Ef5tfnM+RT6j/na9y31TfG57ortiOzd677qtum06SvqZOvQ7NLuxPHv9OH3iPsj/6oCqQZeCuQNvhHzFJMX8RnwG4gdLx6vHl8eaB0+HIcaSRg1Fu0TyRCcDRcJ2wOs/o/5TvWi8eHuF+z86Z/pquoK7orzdPpnARwIiw63FPsaWyDlJFAp+i0wMtE1BjgNOU05fDgoNowx1CpRI40bwBNeDL0Ecv2n9nrvjOir4tXe590l3+TgXeOO5vDpj+6G9If7YgPBCicQDxXMGEkc2B/2IfIiqCFFHkAaPBZOEkgPfQw6CkYIzgTMAD38GPfo8qruYOrd5djgNtwe2eDX29cL2ObXstZb1SXUHdNi0iHST9LY0j3U2NWK2O3b994f4VrilOO05RvoUup37MrufPFV9Cn2Ifdw96T3WPdB9oz1ZvUM9g/3rvfc+Mj65/x2/h7/QgADAiYE3AYVCowOXhM6GD4dtyFQJmcqUi0UMI4yBjTZNLo1GjfgONs65jtkPBo89zr1OHI2ijNiMNcsOihwIzgfuRvuF8wTIw6gBmv+E/eg8fXty+kr5CHeAdmh1eDTQtKHz5HMysm+yKzJEMs6zITNFM9t0UHUG9fV2jDfTeR36azu9vMR+cn+HwWQC4IRbxbgGSYdmiC1I1AlwiW7JE4j5iG9H6kcMBhKFCkROA6SChAGKgGN/GH4ZfRz8Fjsmeg95r7kVONS4ZPeG9yB2qbaZ9t+3Ifda97A4KTkpulB71TzUvYu+nz+DgP6BxINPBKYF7sbax5jII4hsyJtJKYltiZVJ80n1yfiJtckkSLLIOMeRhyRGGcU7Q/pC2cHiwLS/cz4A/TN72nr3eZF4zrhD+G04UriPOM35KflpOcJ6a7qne2/8bP1Z/lM/B3+MQD6AVEE3QYgCWIKEwoGCREIvgdpCJkI0AdlBtgE+QOfAl8AfP2y+Zb1l/IF8U7wY+/E7UHs9euD7MDtSe7N7S/uAPD+87P3G/pS/Bf+EgE7BSMJDAx6Dt0Q3RMhFwUaexxeHx4jQiaLKFgp0ClGKjUqUiqzKb4oqyebJtslXiQmIY0d7BmxFksTgA98C1IHigLW/DL4S/Q68fftyemh5KTfJtuJ15XUxdHIzvDL/MnvyPrIAckpyNvGssVZxYfGK8k/zVXS9NeD3UviseZy6pDt1PBL9Gj4RP0YAukF+AizC5gNDQ/UD0AQjRDODxwODQyrCi0Ksgl4CToKOgvBDC0N6gzgDVwQFBQoGUIdpCDlIv4iECQfJqsoriuSLfYuKTCvLxAuPSuFJ8kjVSB/HOUXGhNZDlwJaAVcARL9HPgZ8njsd+hr5pPlHuXA5DDlfuYH6ZnrhO2471DyRvUn+VL8yP+PAywHvQksCzYMTQ35Ds0Pig+jDlINEAwFCwoKHAmgB6gF8AJzABb+f/up+Fn2GvWw9JP0FPT78kDxKe977bbrWOrV6e/plOoS66jqUekO6C3nL+c16A/q/euU7XHuge9E8e3z6fWM9m/2T/bW9rj3PviL+HD4NPfB9YHz5vG68UryDfNN87jyL/Kx8cLx+PJc9aH41vs8/jj/dP8fAF8BQgM0BVoGiQfgCLMKgwxQDhAQ5hEaEwATvxEHEF8P3Q8WETQSQhJOESgQYQ8QD38POxDsEHwRRxFCEPQOPQ7xDvUQsRKKE7MTnhONFMwWEhmAGgsbHRsWHBoebyC0IiwkICTbIrQgdB4uHb8bYhklFuMRgA23CfAFQgIo/9P7g/iK9A3wCuza6FbnwObw5ZTk/+Ij4RDfk9zn2bHY0teb1ujUnNJn0Q/Sr9NJ1VDWRNbm1t3XNNk121bd3N/K4ijmjupW75Lzj/ey+mT9NwDEAlYFeQfSCOQJjgrvChwLOgpGCagIiwhTCG4HwAZRBjIHtghbCtkL+Az1DakPwhK2Fl8aaB38HtAfvCCzIdUiFyRUJKQjfCLxIAwgNx6YGmEVJQ/XCPsCYf1h+Dn0uPC57fTqMuiD5Rbj0OBw317fj+Cu4hzmU+rc7inzp/Y3+jf+nAPNCOEMxQ+/EtQW9BteIAUjIyRbJHMj6yHBH7IcGhqvF2cVKhIvDvAI/wOY//D6Evad8Ezrf+b+4fjdYdqs18/VSNQJ0zfRl88Tz+bPf9Et0/vUMNeS2hPed+Fs5U7pve3W8iH4pP1KAkYHmgzIEaUW4RlPHLoe8x9LIEsg8B9dIMghXSO5I/ciySAqHp0cmRtVGsIYbBasE+ERcBDFDsgMSAoLCKEGZAVqAzcBkP4U/In6IPoR+9/7+Pyh/TH+ngA2A9QEWAW9BJ4FZgdqCAAJwQg2CMQHKwfbBcsEvQPiAan/wf0I+/73HPUZ8unvmu597ProM+XJ4BHdLtr412bX5dZA1vXUONNC0gnSN9Il08DVHtle3VXiM+ca6z/tM++u8Wf0O/ez+rz+ygKAB/ENTBRJGoEejyAAI6cl+yg3LRgyFjeqOyY/OUFyQrZC1z/XO1Q3VjJDLCAlxh09FqcQYgvWBUr/Vffh7h3nOOHI3Tzbj9j71jjVVtSv1PfVfth220HftuMa6HTtavKr9sr5yPsF/0YErQr1DiAQAA9kDp8Oig4VDl0MyAniBhcEQAF1//n9L/zC+3H84vwQ/R79mvwB/pwAmgIwBNoF4QdnC2oPtBFjE3EURBZnGd8cTB8qIDog0x9YH70cgRiAEwgPnwxSCmUGlgH1+9j24/NL8XfuS+pc5EXfP9yO2jLZFtdC1S3UtdJI0s3TD9YW2TTbLN1+32HjQOgb7BvwMvS8+Av9Uf8MAW0D7gQvBTIEfQOnBKkFRgZdBaQC8f+e/BL6DfjU9fD0uvUj9+r5Yvw8/vL/LwEdAWsBegLxBKoJIw5yEf4T0BTDFO8VyRb2F6AYcBd3FbUTXRKbErsS2BKVElwS3BJcEq0Q4Q6zDIULZgwdDhsQBRB9DpgN7A0nD9cPZQ5aDEgKOQmeCWcKuAptC+ELzgsTC+gIlAfcB2kIjAhfB6sEbQICAX4A/QAgANT90frx9j31D/Wy9Qr2hvQj8f/tyuwj7aju7O8N8FTwX/BF71ntbOsX61vsD+5A7oztUe0J7nbvwvDl8KbwEvGW8dfySvX29xb7t/5dA9oHUQukDfsOuRDnEqEUVRXDFHATwhJwE2UUlRPDD7gJGQOH/Fj3jfJm7bnok+MU3iDZTtaa1UzWgtaw1d7U59Rn1q7Y99vg4Mbmkez+8Zn26fpo/rUAeAKOBfoIMAwsD80ReBT6FuAXiBeQFogVTRZaGFQaYhu+G/sbcx3jHu8fAiBTH2oeBx4bHo4ejCBPIt0i2SF1Hwkdfxx0HeMeVR/tHdUbeRkKGMQWcBWgE7gRoxA2D8oMLAntBLQB7v64+4P3O/Kd7BXnreGt3LDY0tVZ0/rQEc9+zW7MQssry2TMec4A0XbTG9aQ2F3bit9i5Dfq7u/v9Kf6dv/VA6QI/gzuEL0TxxVBGKMa3hvEG+8atxnzFnAS1AyxBsMBl/0d+bL08fAE7Q/qteeu5NLiheAn3hPd89ww3rPgeuRX6Czsm/Cu9Gz50f5AA+AGWwqEDs0TWRnaHdMhgiQRJ5UqGC12LlovZC8XL/ItxysmKpcnxCTLIN0bIBc+EpoNYwkGB9IFpASgAzgCDAEaAMf/GwBXAW8DlgTHBeUGegkrDSYPlw+6DhANIgtDCR4I5weYBwUHYwWhA7gBk/76+mP3RfOa76Tsx+mO5ynlYuJa4KTfyt4t3vjcsdti2oHZ5tjz14LWKNSV0qPSCtOG0nvRmdAs0RXSrdGJ0JHODM1NzS3Pz9Fa1aPZQ95g44/pVPBu97z+UQVoC9AQvBanHAkioicULSsyHTd1PHRBXkVPSHJK7ExqT4pQf1A0T0JMTUmPRtRDFEDaOkw0Ty77KDojGx2hFasNEQaH/rv3RvLL7O3n4OI83obaAdYi0T7MMcjgxcDFV8eHyizPkdTx2TnfguWe7CT1wP3KBb0NNRWMHMEiaidCKl8rwiq1KH0lgSBJGuISGgueBAH/1fmW9AnvH+ny41bf49ox16HU+tKm0t/ToNWc2MjcTeHM5THqCO4e8hn3RvwJAcAE8QdxC7YPJxRwGAkbXxtTG0caFRsaHX8dxBwpG3oZ/Bg+GPcVuRKCDuMKzwdxBC8Btv1f+1D6pPje9nv05fFH8NfvPfDE8DPwre637UDuiu7g7o3uUO3b7Nfs8OwB7UHsbOsY65zrt+0f76Xvpu/R76fxDfRu9e72RPeU98P4v/ot/Ub/VgFTA+AFdAg3CjYLMwwLDV8ORg9RD6sOWA7wDucPYBCZEFQQmg/0DhkP/A/ZEJgRnREjEvITxRVsFpwWxBUZFb8VchbqFn0XVhcVFzgXxxZQFakSdg/gDO0J2AXKAZz+EfxL+X/1WvE/7tHrEerJ5/PkU+Jz4JDfqN+z30zgaOEY4gbjZ+RF5mLpnO1D8gX4Tf5yBAsLChIFGGodjSFEJNolWSZ/Ji8mRyVjI3UgBR0lGWYUXA62BxMBYvoB9PbttOiy5A/i1t+P3q7dbNwz25Haedsr3ojhSOUb6dbs+/C49Ir3A/ps/Nn+jAELA2QDqwO+A6kEJgYIB3UH4gbaBP4BuP94/TD8b/v0+Vj4N/e69mP2Hfa+9Y311vUB9i32OvZL9if25/Wi9Ur26ve6+cP6Tfv/+r76Xfvm+4L8ovwx/Qr+oP9EASAD8gTOBicJ6wtgDwMSBBT7FToXQhiGGR8bPhyhHGYdXx4mH5EfDB+mHqQeaR4aHvkdDR4QHxghgSMGJhkosShqKL8n1yVhIyUg8xy2GYAVPBA6CtIElQBC/GX3JfF46hfkk92K1v7PQcuzx0fFp8Jiv0e8SbqPuV+75b0kwAjDC8aWyi7Qk9ZR3S7jmOj97cH03fu9AbUHTg2/EiwX/Rk+G54b6xu3G0oc3hxxHe8cqxrlF/EUqhGRDmkLPAdJAsD8jfgr9u70Q/Mb8D/t0euN7PjuCPIJ9ZP38vhO+rX8X/9HA8YG8QgYC+sMmA4NEJMQ1RCDEFoQTxC+D6YP3Q/iEMkQrw9YD50PhRDjEd8SvRN1FFoUiRNaE8kUGRetGa4aVxufG7sb+RsbHMYcAh0sHOYaQhl3FxsW9BMJEeIMYge7AUn87fcB9bvxC+5K6dzjSt/h27PZttiq1//W+dWO1NTTptOZ1IrVI9Zg1rPWadcZ2draRtyy3YHePt+U3wnfxd4l34PgveOk5zXslvDc8xv2svep+YD8lf8NAokE8waCCXgMSw9jEcwSLxNHE4QSohFOEaYRCROiFKAVARbWFRYV9RRwFaEW9hcTGY0ZQBqKGzwdGx9bIDYhyyH2Ib8hICGhH88dzhvoGREYPhZoFLMRYw+yDEcJOwYmA8AAQv4w+wb4r/Vo9Inzp/FP72/t9+uf6wTr8+ja5k7l+uRH5h7nH+dj5+vnw+hW6mHrj+zH7l/x7PNi9jj4uvqY/gADswZpCZQLQQ5oEUIUIRa6FrMXtxiqGUYa1xiMFq0UABOOERcPRgv9BvICTP/c++n4qvYU9YvzufFt7/fsd+q76FvoPOnq6qbsGO5b7+Tv1/CZ8hz0TfUr9uv3Q/qs/IP+3v8LAb4CKgRjBUEGfwbPBjYHhwfJB1UHmwYMBoAFvASyAmX/Rvz8+af42vd39t30JPTj83Xz8PK38ZzwVvDi8Cry3vMt9P3zl/MK9Dz2Wvh7+Xn65vr4+8H+MQJrBgIKJgw0DmIRXBS1FqYX9hfKGDwavxvqHDAdoRzCG4oazRiDFkETdw8YDG8JvQaqAxsAlPyM+gr5MPcl9dPy3fDg71fvze/A8ODxVPMp9Q/3Mfms+k/7k/zH/ggCbAWbB/kIBgpLCwkNIA9gEYYS1BK9EggTpBLIEYoQnA/IDl4N0gqNBzEFKgMWAgEBN/+G/Ef5SvZA8yLwQO2S6grpjugs6B7oMOiM6KXpEut+7L3tMu9w8DjyA/Sr9m76pP6tA1oIqQviDU0PghAnEswTuBTEFD8UcRPuEmkRQw7/Ca8FLALu/h/7gvbz8cjuq+wK65LpNucJ5YbkIuVm5k3nDucf52jokerf7Hzude8k8NHxcvOl9Fv1Z/bp9xj6xfwQ/58B2QNiBdsG2AhVC7cOEBLYFPcWaRmfG/UdGCCGIAwgQh+uHp8eMR5/HG8ZhRXEEbUO6QywC3kJ7gXUAMf7Rvji9VT0mPPW80v0sfQ79U72t/ce+gD9gwCqBOQHWgoHDJkNHRCHEmAUmRVqFf0UeRTBEwgTIRKuD5kMXgm2BeYCLgAr/b75Zfa08qDuA+rv5RLjBeGm38bddNvP2UHZiNmr2S/ZP9nZ2QHbmNw33pXgvuP85ZXn/eeT553nZehH6arqm+tV7BvtCO4Y78HvZ/HH8rL1IPka/On/9gM+COYMQRBRE8gWMhu6IAgn6i3pM6A4ZjucPQJAoUPcRs5HukW2QtVA5D8qP+88nDn2NEIwXCqYIwEejRi0EtQL3QP7/P/3WfM07mvo4+JT3tnZgtZq1ArULNWm1ZPWmtYJ15PYFdrM3A/g1+Im5vTo8Op47KDuN/E+8rvyLfIY8rHzdvUr9zv4gfci9/r2S/bz9bD0dvO39PX3ZftJ/cv8Svs1+VH3vPb++Cv95QAQA8kDOwXFBiIHPQfoB48HeQb+BDcE7gTJBtAIJgjFByIIRgevBQ0FrQRtBXYGoASVAvX/B/0S/dT8VPuN+av11/Ba7PTqsOua7TPw6PFk8970T/e2+vD+ogJ7BWsIDgzvD64TqRcxGnMcgh+DIV0jXCTIIpMhASCwHbwc7xsJGgQYixXEEpoP8QqEB54FxAL9/nH6cvfe9oL2MvaK9fX0s/WN9jb4KvrT+Q75VPlb+5X/DgS4BVgGXQdhCV8MdQ0MDB4JhwZvBXwD1gEGAeQAfAHRANf90Pl39T/y2/C17s3sN+od6d/peeqG6W/nBOYF5QfmEehE6rvst+347uHwLvM+9rH4dfp1/Fn9tP2i/rz/ZwCpAM0AlgA8AWsAGf64+2r6uvoy+1P7ivr1+UX73Pwf/Sv8K/qD+sL7EP0O/vf8p/wH/goADQI/AsgAtv/u/qD87Ppl+ZT3nvc9+F36Z/5PAXUCvAG7/6UAdgNCBggJqguFDVsQoxO4FpwZtxrLGQsahRtLHEIbDRk5F5kXcxguF+kTrg+PDOMKzAlQCEYGNwS2AzQDEQIhAK/96Ps7+177tfwG/sj+4v1s/E/7hPqR+j360fkp+pP6cPqo+uz6C/zd/vMBWQRoBcIEfwTYBCIGEQeOBm4G3QaNB9MHWQYjBO4BcADy/or8uvgA9AXwQO057NbrM+w066Lo0ebr5HzjcOPE5MfnGuvh7GHuZ/JG+OH8tv7w/W79FQBXBCMJqQwkDegKEglxCcIKJwudCGQEVgB0/uP96/2Y/sf/UAExAmoBqP5j/DD8vf7vAbED1wK4Af0BKwRpBtUG7QUeA3wAYP7K/Zj+cACtAfcAOv5t+/T5zvo6/Tv/AQE5AqkC6ALiA00FsgftCkMOXRHsE2EVPBf2GKYbqx4OIUwi3SCoHdgZ2xcZGF0YPxaCEhIOhwroBnABGPvb9T7yTO9k7OPpX+fp5GPhnNyu2afXg9b51dbVpdcK3Efg4uLU5D3mjOjN7E/xBPU/+fH8jQBoA+0DDAQaBHYEYgSvAiABt/0F+ff1jvOf8lvy6PAM8PXuK+3B65bpfOiM6cXq5+wI8Pzysfbj+oL/IQSsCOgMRhFIFvkbnSGDJ3YtAzPfNz87RD61QUVGGktWUDRTXVRvUwVR1E96TQlK/kQ5PuM2gTBHKTghaRcODC4BUvdv7m/lXNwJ0zTK9sPHvsG3D7Esqh6kQqBPnY2bsptdnC+dtZ7ioDmlMqx/tOO8d8SAyyjTO9xF5o7xWv2KB9EQxRi6H08mZCsbLtouyy+OMHQvbS2xKP0i6h9lHK8X3BMED0sMpAuZCfgGjwJA/ib7h/n3+fr6P/xw/SX/1wOcCYANBhAkEfMQAhKfEhgUXReBGhocGx1wHwoh5iG3IDkdoxmPFVISGRDFDJIIlQRNANH8PfwG+kz3APVJ8+nyofPX9VT52Pst/QgA6AI0CLcOBhPNFQEXkRVpE78SkRIVFHsVsRNrEVoPgwxoCWYEoP0L+G/xbelG4bLZm9Ry0H3NvcwnzIbKZsmeyeLLnc/E0wLZvt/Q5n/tQfRi+mX/GgRGCKsLGQ1UDmUPkxD1ESsRMhCyDncMMgy6CzoJYQWJ/5L7Mfur+zv7wvre+bb39vS68r3yp/Np8//wsu5I7x3x+/Ih9DT1q/bu94P4y/bT8y/yGPOW9rL6Fv80AfoCzAbBCnQN+A10DWwOnRFsFbcWfxbGFQQWDxhKGXAY0hSSEYwOMgv8CA4H7gXQBsUHeAiNCMIIcQlyCoMLFg0gEOkTwhXoFZwWqBf6GM8ZoxcnExoQJQ0mCncIqQR5AQT+6Ply+E/2P/Jo7U/pTebp5EjkNeaF6izvovMa94b5LPwh/6sD+goIERYVcheTGQYdPR+kHxQdXxruFy0U1xA9DBoH3wGy/Kb3U/LO7Qrp3uOp34TcQdsS2xbZq9b71bvXidro2xjcONzS3SrhLuYN7L3x4fUv+bf7rv4lAr0Fzgh/CyIOnw61DeYMhA6+Ee8TgROOEMAMUgoYCL4F1gKv/qP7D/n69pP2rPYZ9y73NPaA9af1iPZt9xT5/frb/f8AQwIsAssBYAEHAs8CHgLoAOYAdwFtAncE3wSTBMgD/gHg/zn9wfvW/WwBbgTCBh8IfAquDSQPTxAREVsSChVfGJscZyAQIzolJibLJTgk9yD5HbccnBsxGRIVaQ6oCDQEoQGyAC3/8PuC94L0SPMX8tPvYeul5pzmzOj56t/rfOr66azsb+/l8fnyOvKz8aTxqPPL9iX6ivw//tEAeQNzBbsGqQYdBjwF/QEj/of5OfU486HxNO/y6wzo/eK03QfYStNN0XTRF9J30g7SStLu08zVRtco2P/Yg9vt3w3l/up+7+ryU/fN/dwG3g81GNUe6CT5LBE3qkE5StROylDCUrJV+1rzX3ZidmJHYMVc2Fh2VIFPwEkgQhQ4ZiwaIREY7hBUCm4Elv2i9VXtMOV93ejWGtGazJvIvcTwwS7A1cDpwgbFZMa9x/jIe8pPzA7ORtAg1OXW6tj82zzelOE45ZPmRudR50vmnOVE5SLlR+XZ5GDjCuK+4OLfCt8H34LgiePt59DsV/FK9cL5TP/fBpkOCxaQHRQkbCmgLc8wvjT3OO08vkBSRGhIZEtbTTBOqk3lS+5HuUIEPa02+i8aKbQiJByWFesOhAeY//T3rfBv66foV+WA4gngtd4b3wXfS97Q3DHc5dwO3vTfleK85Rnq5e7d8vP2sPq6/b8AJgOMBc4IWwx3D9sS3xRxFXsV8BSxFIwUUBMBEYIOXwuTCKMGSAVDBBwCT/7C+SH04u0355Lgp9rF1DTPIso1xVjA/Lvhua+5LroDuxu79br0vEDAysXzzAHVp93w5f7sEfQW/KMEXg2bFMYblyLYKA0vrDO0NoU50DoeO8E6wDm9N740VTCxK/0mayIAHnYYHRKUCz8FqAD5/Wz8Avw7+0v7fPxu/gkACALYBGUIHgzQDggRvxKFE/cTlRT9FFQVjRS+EjURqg/wDVAMaAt+C5QMPg3iDCUMoAzbDmURmRPgFDsViRRpE3gSDxGxDhUMoQhkBZ4Ct/8i/X36C/gZ9VPyYfCP7sfs2uoq6dro9elv6/fsTe7U7hHvxe7W7XPr5een5MDi9+Dp3Qba0dU80yLR2c3wyZ/GBsXdxbXHmcmRyw/OrNLi2bjhHuke8MX2lP7bBgAPrhYlHc0jXio4MEQ0HzX5M+wxty6VK94nxSOaH+oZJBQ/DvgI5AN9/Y/22u+z6v7mVeM84HreVN6+3y7iBuU26Ljr3O999IL5wf5CBdcMMRawHzEm/SosLqswoDPANfQ19TPOMLEuoi5KLt0rdyjRI6MfKBzrFxoUuRDTDY4LdgmeBmwCtP3a+KL0hfHT7pPsserk6TTqeerb6g7rRuuy6+3rbuwB7Z/tau6O767wr/Gp8unztvXZ9vj2v/a+9g73jfgM+uj7iP2F/hr/9/5y/+IAiQLkAo4ClgHvAHMAGwAr/r37OvkE9yr1g/Py8Vzwoe7o6wjpROZq5LviJeFM3wXfguBD49PlQeeK6CPqV+xi75zyA/fr/GACegdEDEIQmBRoGdweTSViK84vpjJCNNA0WTVlNc8zazHTLi4soyo7KZsnNySkHlgZfBTqENcNZAlwBPr/Af0++5f5JPd89FLy0O9l7afq7+hS6P3nhubS457hWuAa4fviqeRN5SDltOMS4vHhjOEC4pPjd+TG5e/m0+d36WDrlew47QLtbOxT7HXshe0E8BL0Lfg2+0X9Xv/MAcYDSQUNB6IIvAklCqwJ6ArYDQASiRbwGR0d9B82IkElJyg3Kgcs6i2DME4zqjQDNhw4jDq9O8I5STVFMDQsXikpJnYhYhqhEV0JHALk/Cf43vIq7OnkmN6Q2N/Sycwtx7LDksBovTu6yrccuNm5nrvjvVfAa8OTxhLK0M5l1QrdVOMF6PLqd+1g8YD1OfmA/Hj+zgBhBLsHZAvuDcMOdRBBE9QW7hg0GdcYixhFGUga2xtmHXIddRtJGnkbHh2wHKUayBdXFgcYBBn1F5AWqhUbFlkX8BYeFbwSlhCUD40OUQ07DdsMTAxuC9cKUguFC6QKsgioBI4AfP8UAV8DsQK7AKr+L/2K/GT7AvrS+Uz6x/qm/KX9cv2q/Db8KP07/08Ax/9A/4v+lf81AdEBsgHq/br5l/d69jb3jvgf+Z73sfQ18DnsBuzp7Fzuf++071DvP+9c8c31+vpB/8kAyf50/RP97PwKAN0DtgYXCSsGLgCm+Ojvrek+5vXisN+G3LvZLtgV1pnSUc4fzE3M0c0v0vLXvdxt4rPna+w39Jb8jgVDDnsTNhXDFV4YCB5KJugtcTLaM88z/DF+MUwxqi8qLdopIyYnJYolKSQtIcEayRORDasGDgAn+1z20PIC8IDs+erw6b3mdeMm4QXhr+Ps5RPojewo8eb3s/7yBA4N0BSsGvscDR4BIa4miy3tMhA04TIBMVUw7zBBMAItKCf7IWMf/BwwGT0UzwxoB08FfQO1/vL2XO+76gDq3OlM6GTlOeJi32rdzNu62qfcW+BG4sTixeGT4C7hh+FF4zbnhOov7BbsV+xE7ujwu/HI7/vtKe3A7RvvBe+07TPrw+dq5YjlW+aj5XvjPuIV49blj+i561bvZvMP91T5Bvvc/CwBqgcfD3gUQhUsFfoVFRk1HWUdcRvtGEsXrRbBFcUSPg/7Cj4GqgCZ+n70Y++k7CrsU+3N7ebt6u2I72DzRPer+yUBAgalDKYTqxqzIWInGixTL7gxzjNiNcI38DkyO9s5yTUdMq8u1SpXJncifx4CGgUUlQsfAx78fvcU9E/xsu326VHm4+Je4Q/gvt6A3WLbTtn+2OnZxNxP4I7i3uMH5ErjDeOF5ErnKusB73XxaPKU8mz0FPl9/rsCVwT8BCwHgAqyDH0NaQ6iD7oR+RIVE3QSuhGsENIOJwy+CEcF4QHw/lj8DPqW9xn1S/LU7pHqteb24yniY+BA3w/ejN714L/iTuRX5sPpZe7H8yb4jfsM/2cCjgUnCmUOAhLGFHUWQRmkHNYemR+wHeQa7hgpGJ8YuBjwFyAWDhRHEroQtQ7kCx0JUwjkCDwJZwgUBuwE0gV6B+MITAneCYoKhgvIDGoPvhIBFZkWGxcFF8kWBhaKFZgUbxLJEO0OIwyZCDkD1P2N+b/1f/LI7vzpyuSB4NHdR9yx2UvVj9BjzHDJyscOx3bGYMWiw3bCz8NZxiTI68klzF/P+tIY1lLZ4t304mvo0+4I9bP6fABTBi8MsRLoFxkb9R32ICYk+ShoLSExqTQ5N5w5wDuKPWg+Vz6nPes8/DsbO6A6ATrqONk2JTM4L2wr0CexI2sfMhuuF6MVVxPFEKQOtg2UDJ8LRwrQB5AFAQOQ/1v87Plp+Gn30fXk83vzIPST9LDzL/G47cbq5Of95IPh7tyh2QvXtNT10Q3OccmUxGy/crqztZixF6+srZSt5q7zrxmyA7YLu8jB/ckJ04HcWOUm7vz1Ev1+BaoNzhWJHbsiTyZaKjMvWjZyPUhDtUfTSfNKpUrMSDVEWD6TN6Mx3Cz2KGsmJCJqG6QSIQnzAIH5Z/FW6ZDhBNyu2fzZX9vI3Gfe3t+44Hng1+Ap4ojk7OcV7IXwLvW6+Q7+OgMOCWcO0xLMFuga8CBBJ0MtlTNFOYU+Y0LbQnhBaT/HPBM6MDYhMccqAiIxGNkOJAaT/a/0kut74+jcpNdS05/OesqJx0bGksZhyPDK+s5J1AXaR+G26KXuVPQL+GP6RvwZ/eX9lf7D/t3+jv7d++T3BvQU8XbvHO4V7UntoO+29E77KAMHCxsSJxhrHQ0jyCgsLiIy6TPmNPA0vTSeNAwzpzG4LzcsgicKINQW1g3cBNr78/G05wvem9VFzi3GYr1ytQyvgKsfq/2rGa7VsEe0tbnuwGvJB9JO2TffXuRl6gfy8PmwApkLZxPfGs8gDiV4KVctqTBOM4YzqjINMbQukizPKvQoCCYmIkgdwRcVEmkMGgfvAvf+mfuA+PT1L/Rz8/zzFfWz9lX4Ovrf/LAAPgVXCnAPdBNEF60beR/ZIuEk1CVvJt0m9CZBJlolSiNxIJEcshcXE8sObgpOBQoAS/pa9TjwFesz5vfh/t1+2iLYstUH1M7SH9Kc0vPTSdZB2fnbh97z3+3gxOJA5d3nJOs172vzO/ck+rj7fP33/oIAJgJ2AnQCxQIyAwkFkQY7B/MHeAeCB6oGRwTPASj/Gv7L/dj8CvuO+Bf3S/fO+HD65fsg/Kj7mvuO/BT+U/8UAXcDWgbdCJ0K7AudDT8P/RB/EmwTIxRGFZkWtxiHG7EcMx28HJ4b+hljF8wV8hQqFHoTjhAwDWwK5QdZBq4EkgJk/1/8dfrp+bv64ft1/N78svwT/XT+PAESBCAFGQUcBY4GLwkQDMANzQ5HEOURJRLyEb8Qvg9PEKIR7BG1EB0OkQvSCXwHTASfAJf87vc98kzrs+X04GrcStgD1KvP0ss0yIDENcGIvh+8aLsNvIq97L/IwQPEGMedyrzNu9C90y7Y6t7l5lLumPTX+bj+hgQSCloPChRDF5cZLhwuHiYfgR6OHRgdARwuGmIXDRRtEbEPaw7zDV0NxwwEDSsPBBJqFZcY5xlqG5cdvCDNJK8oZyu+LkcyNTV1OF47jD3OPsE+nj15PRg9eDwSO5I4YjbNMiQsUSQyHAMV7A6fCM4CEf269jjxVuwc5k7ev9dk07/Pc83CyV/Gn8WcxbLEFsXEx0DL486S0MHPU9Df0yTZyd754v/lHerB8QP5iv3A/U37V/uF/XsBsgR1BJgChwBp/8H/Nf+6/B37wPrQ+nf5ZPZL8qLvle+Y8jD3vfvJ/+wBVQQ/BTUI7AwVEKUSkBRfFSMY4BuqHa4eohxiGbIXPxbpFVwU8g7zB68A6Pu4+T34ifcP9p3zPfL+8Ofwg/FD70Xs4+ww8B31Avku+fv5Uf27/i7+1fzf/MT/7wHQAK3/xv/f/8n/ePz79/f0lvPW8xL2xPi7+Rf4//VJ98L5lfxa/jr/2gGmBvMNOxbMHZsiKyTFI6EhPCFlJPcoSS4XMMwtASmqI7wf6B1xHYIbAxf8D40IVwPqAdcCJAIE/nb13uvS47jdYds425rbftz+3IPbo9hK1EzPac3BzwPUGddO2KLYCNuL4Zvobe4U8m/1mvp1/4kDagaGCiIQzxUcHBEhgSNzJNQjqyJPI7IkWia6JwQoZyivKG4nOSOfHUQYkhQhEs0NCQe9/8v5L/Y680LtsORJ2zXTGs52ymPHI8XhwzfD58H6wFvDV8ku0cjY897i40vp4u+Z+PwC1Qv4EMQTBBevHOMjzCkfLDsrKyhDJV0kECRkJCAiqR6LG6wYDRexFLARFxF4E2MWgheFFSMT4BFKEvITqRXEFfAT5RHQENsQVA8/DO8GuAFv/Yz47/Ql8qTvo+3S6rjnSOTv4Ojehd0h3AXbbdrA213fC+NQ5sDpguzi77HzKPbr+K77zf5/A9wHeQtPDpkPsxHXE6UVxxZFF6kWMhUQE1kQVw3YC/UKMwqTCJwElv96+uT1qPIB8EPscehg5WLkdeTx5W7mFef356roEep/6+PtffF49OL3wPu3/qYCkAWhB3IK+gw9DkgOaQ3QDNcMXA6eDzQOowrpBdMBoP/W/Vn7jfjs9Z7z4PBB7vHq6Odu5vLlbOaq5yjqS+1W8ff1QPp5/6wE0Ql5D+AUWBqIHx8k4CfxKioumzFDNq473T5AQEo/9TyjPAY8uDqROBU2tDSsM6cxIS6NKBoibxwHF00ReAooAwv8CPeO82vwRO2a6RDlf+Bw3JPZa9i01gbUm9Ln0fbRE9N309HT49OG06vTY9RM1UHWYddT2MrYE9l02TnaBdsG20baH9lY2DTX29U+1YvVOdZ41w3YGthB2HjX49Xg1FDVENeB2RTbudza3ifi/eV96VTtMPLk95b+0gX7DEgVcR4aJw4wjjgYQEZGvkuDUHtTnlWfV7NZclytXv9eWF72XFNbIFhhU4hNIkZ+Pyk6gzQYLhIlHhtDEzQNWggHA1b8C/Yz8ADrheeU5DzizuCn37bedd5V3wLhfeLb4jPjoeTL5wbsS/C+9KD47fuA/iD/0//sAMYB8AKZArYAm/1n+Zr0MfAc66rlgOEw3aLZ99YV1QXVzNRQ0zbRrs4ezYHMWs2bzz3SxdZ726nhqOnx8e76RgSPDH4UMxyTIlUpDy+lMwo40zpgO7E7ajvEOa01ES6nJFscABUDDnEHGAEH/E332PF07XDqoucJ5U/h9dxh2fvWTNW11CvViNdV2cPa3dwP3xrijOQ15qLpzu5w9AH8sAMuC80S6Rn/ICIory2YMBgyqTJZM100uTTFMh4ufSjNIsMd2hmLFtwS9g+NDP0IbQYKBKsC/gCe/of8LvvZ+XD4aviq+cn7/f3O/oX+pvxm+5P6Nvpg+Zz2ovP974vrN+Vz3YvUeMxixx7FJ8UGxZPDeMInxIXISM8i1gzb+eDv6NrzXQARDWgZZiUfMYY7HkP6R3FLDk4RUOpQFk81S2tG8EAnO9kzzysfJE0b5RFsCOf+QvaD7qPnaeGT23DVNc+/yOTDG8KmwpfEm8eZyx/R4NdQ3mDkj+k47YTwmfT/+LX9ygITB7sKoQ1oDq4OKw9CECYRgxBXDmAM2AsgDFsNxA4iEc4TmRQ+FFITEhMRFMQUchRfE6ARYRC0D04OiAxjCqEHbgW5Ahv+k/jL8hTuJer95jDk7eES4ODdBN1J3nPgQONC5bnmEujp6GrpG+oc7IXuQPIL9or5xfyk/gEA5QEVBLAGZQk/DKgPwRM6GPwdOySqKLErLiwoK9MqpSrKKRIo5CQEIrkfwh08Gz0XghHOCj4EIP9F+gf1bO9+6ofmF+OM4Njd7NvB2vLao9u03E7dPd5a4OvjcOge7ejxGvZa+n7+3QKqBk0JNQudDYwQIRTiF5Aa6xsuHI8ckxzoG0MaghchFKkR5g60C1kIHQTH/237//aA8m7txeic5Hrgt91x2xfaddrR2g3bXdvC2ujZpdrA3JHfL+OB53bs+PFT9zP84wD5BN0IigwgELISLBSbFOUV+xd/GWMY9xREEfAN6Av3Cd0HSwaRBBADMgLIAAgAX/+W/hb+rP0d/vL+CwF9BNMHlguzDoYQjBIvFTIYDRuWHKEemyFlJI8mxSdlKWMsFTAGM+c0nDU7Nbw0ODTSM5QzZTOqMqswHC3xKHskxx73FuALk/8Q9QXt1OWB3vrVBM0sxaW+tLiDsy+uGqiYo6SgsKDzok6lX6iYqyGw6bTRuCO9JsH4xbLL9dAy16/d4+Lj52XsdfF+9x39bgAzAV8CuwTlB+MKeAuBChgJkQfLBwEImQbsBHMDLQPvA/ID+ALOA8kGbwnNC8UNKxBPFCAZPh2jIJIkVCkXMOs2gDpWPIU9dz4QQbVEHkfjRz9GlUN3QfY/CT3lN4Uxpyr4I1wd0RZJEnsOXQmyA6b8s/U08E3rLOiB5gHkROFp3jjbv9iS11zY39k72r/amdrQ27zeAuGL4gHlGOhL6zruFfGL9hz9wQPfBiEG/ASBBcwIbQ1FEMUQ5g8FDj0ONw/GD1YOewmnBAsCgwKiBCEFGgSjAQL+xPll9TTyyvHp9BH39Pjz+Ej3zfZs9mf4KftZ+9n4ePXu8qjz5fS8863xoe6p6w/pX+XM4T3fLNyK2jDaudlo2brY5th124TfauMA58npb+yG8Bv2Uf1aBjgOJBOmFTwVpRVTGpMh1yotMUoyrjBfLpQsZCoMKDQmnSUgJZUjbyDIHAcY/xIKD5wLYwi9BtgF3gN0AXf+Uvy/+1T9Tv/W/8n+7fx6+g35f/hV+cX7MP6r/Ub8fvsC+8n6UfmV97X3JPjU9833T/jQ+t794P9UAEQBCQOVBVUH5AjnCdkKDg38D6cS8hPPEiIRoBCYEI0PQw17C20KyQo5Cg8I0ATNANn8/PjR9pL2fPbD9HXvguhZ48zggODU4NPeYdry1mPVUNYa2ZbbP96d4JTh5OEY4q/jvuW46a7vXvV6+mD8hvxy/isD/AjEDJ4N5gwRDdAPahQ9GAIa4BmeGOoXthcxFs0TMRETDzUOLQwpCVQGJQObAPD+yfuw+O71y/Mf813ztPNc9ML0FfRi86Tx4fCm8THzJ/Y5+cv7Z/4EAa4DhQU6BmEH/wnYDskTEBdpGaca1hv5Hd8g8yKwJBwlKCWAJTAnJCn8KCEnNiQgIrsggB/CHQcccho/GO8UJhGrDMoHvQJf/rD5DfVe8KrsJ+oh5xjjvd1M2STWg9Tf03PT8NJx02LVZdeP2TTbZtzs3sDi2OU46P/peOvR7ZrwwPIo9LDzxPGP8M3vXfBC8r7yj/Bt7cjqY+tx7q7wAvIC85z0uvah+Ar6IfxaAPAEcQmEDcMQ8xPqFkQZ9xvFHnYgUSIKJM4kryTWI2wiyyHXIUQhPR8+HMQakxrfGt8ZfxZfE5sRARI7E7UT1BPAE9oTehQtFPcSrBASDXkJOwXJAIP8NfhW9GPvIurm5aHhft2o16DQY8tZx1rFtMRGw6bDq8V7yCrMs8/H0sDW59tS4sLpOfC+9Rz8PwKoB7YNJBNMGH8dlCIoJ14qQiwhLScuvi8fMG8vaC3gKQ8mSiGyG9sVlg5LB2EB+Ptu9lrx3OwU6JTjit5X2lDZ7Ngl2Xnbt+DJ6QvzwfoVAfIGAQ4FFkkdnSOoKEwsYDAeM3Q0+jQJM+ouNilzId0X3w10BIn88vb48Qrss+VH30DaEteB1DTSodCvzyfSitat22TisOh17zr2A/xBAC8EIAgIDeYS/RiwHvkj4yhGLb0wxDKLMi8wYi6VLKIquyhbJaAg2hsRF30S/Q2+CJMDKP4q+Nzx9esp5ynjB+Cv3dPaRteN00zPw8wCy9TJeskuynbM6M+/04LXettI33biUuTf5X/ofOtP70TzKvdX+03+vAAVAxAFOQZQBmoEDQIC/xb8MPlN9pzzF/Fh71zuYe6R7rDufO6Y7i7vD/B38Lbx2vNN9pj6pP/oBOgKMhE2GMoeSSRkKFgstTD2NLw5jj0gQbZDFkUCRrBG8UZeRatBQj3sOAE2DzRoMlIx9jAvMV8xlTD/LX4p6iNRHgsZxhMxDd8Ftf7M+HTybuwN57nhct1P2WHVw9JU0NHNe8rzxTXCib9mvei73brbui68QL1AvhHAUsLPxbHJIM5A1KnageGC6LPvA/dL/WECwwUcBzIHVgd9CCgLgg7JEJUR0xDED/AOeQ6DDlsOmQ25C9oI3AXuA6IDUgRSBNUCIf+e+lD28PKk8Ajvq+2G6wrqt+jB6IPq0+tT7KvsLu4L8VH0Xff6+Vj8bv+mAu4GuwuXEFMVyBmeHsYjzCkVMCs2ATvvPThAn0NsR2JLyE0fTttM7Up3SH9FYEJ6PW83NjHMKoIkPR4IF8MPUQg4AX/6+vMT7p3o2OOj31XbOtbh0GvM8shHxqLDDsEhv2O9w7xbvCK80LvOuzK8Lr40wWLEa8gxzV7TFNoF4cjn2e2Z8/f4Of1mATEFmgivC80NaA+REZUT2BUbF7kWIBa6FD0T2RDIDbsL1ApqCkIJAwdVBGoCRwEbAXgBTgExAZ8AtP/L/1gBLARqB7wJogu4DckQahSoF2obgB8aI6QlQiZbJfgkHCSsIlAgWx1mGzsZZxaSEuAOFwvlBwAFCAHu/Lf4d/Sd8cDvLu4Y7BLqxOio5wPnAuaq46bg+d0q3FDbCtoL2J3WltZ21wbZkNpA3LDeqOKY5xzsRPBu9YT7LAJQCIcNOBItFpgZ6B0vIpQl0ybgJYglhiVqJJMiWh50GMMTBA+kCi8GnABD+8n3APXt8nPx9O/q7wPxafLY9Ab4YfygAf8GTAwdEaIV3Ro5IGQl4ymsLd0xWTX2Nlo39DYiNQozHTAvK0MlZCB2GxQXWxF1CeUBIPnU73Xm19ya1BjO4cYewKO6SbZStIGyhLAHsI+wxbLKtSa5Wr67xU/Pddlh4tDp2O8q90v/ZAbeDPIQpxM5F2Ib8x6CIAMgrR3gGjcXuhFyC0QFR//v+r/2YPKI7x7sueoa657rx+3V703xwPSI+A7+1QWTDGESqxguH98lrSv9LBAruyleKZQqZiySLD8rgShwI1AeRhhpEY4LOgeKBdgDOAFt/R/6L/hx9gT0lfA67dDrfeuz6i7qBuvU7OXv7fLv82HzDfNc82PzNPIr8WrxTfTx94n57Pu//mEChQZtCA4J6QmOC5cOdhL9FZcYUxqoG9MeHiJnIqAf0hu3GlccAx0kG9cWqxH1DuIPABO/ExUR3QugBTMAX/sW9kvy6PDl7iXr1+YO4pzbUNc21HvRc9BVzuXMgsxRy3nKscmbyIfHQcdFyIbLZNFJ1yjbvtxk3UDek+CH5GjpsfAS+Kn9qQFfBYMKDxHhFmMaBR3lIBEnQiwKMss1MzdsNwk2fzQPNPkxzCyDKDslrSTtIl4edRiEEogOsQuhB2oDRf6P+Q74Vfm8+1L7Jfb+7knrCe2F8sX3Zvrk+fb3aPa/9/n8CwTwCS0MNwurCaoKVQ1QEDcUtxfTGdwZcRcvFJ0R0w9LDo0OcQ9pEBoQ5A15C3sIUQU7AQD/pv7k/U/9E/1g/aT+Gf/Y/HH4NvSO8LjuZu6+7Y/tAezf6pbq/+kJ6fLnpOeE5wLnFua/5cznBO1+8rT2Lvi39iD1yfVU+QD/sAODBbYETAMPA7ACHQKDAQQBdgElAP37+PYg8+LxwfL88jHxx+0N6j/pw+qi7QLw8vCR8O3vu+5O7kXvJ/DV8AjwBfBN8pH0WfXm82fxdvCI8N3yI/Uj94j5VPub/fsAaQVvC8sRtxUIGDoaYB3GIYUm4ipiLsgwkTIzM6sz0zTMNqY3bzbrMlEu9yoOKE0llCEVHZsYSBOsDbEIZAT8ALP9qfnb9VjyLO9P7KrolOao5snnE+m66c7pWOrb64Lte+9B8gb0u/ST9fT2UfnR/OMAnQMfBZQGawgUCs4KqwhyBcEChwE7Ad8AZv+q/Lz5nfYJ9OXxxO9G7ePsxO458Dvwv+4r7hnx5/Qq92v3WfaS97H7Xf8fAvUCjAJeA0cE6wMxA18BS/+Q/uL9of1K/fD8R/yk+6D6wfiQ9rzz2vCi7rPt8ezh69jqWul66TnpwOf05mnl+uMS41DiSuO/5HHlhebW6KPsNvCQ8mf0pvf1+3//7wPICMwOvhccIJEnVy6wMwI6B0A/RF5ILkwvUZ5WPlqJW3Vcy1yNXN1anVV/TctEwjzHNSkveifjHRQTqAhV/mP0GusA4oXZVdETytzDM7/mu5y5YLmAuLG4TbkyuZ66l73TwVPH68ss0B3VGdsu4ezmBev47F3vXPIS9Zf2+/Sn8T/vye0w7KHo8eJa3MjWv9LQzn3KVcfcxGHEcsXExt7JJs4H1OzcsuaJ8GP6kAKTC6cWqCFzLUw40kGKS+FTsVnmXhZkNmmPbZJvOG8EbGtmD1/WVwFRIUrjQUM4VC60I84YOQ5tBIj7IfTz7B3nfON04ADectzh2rvZ3Nm32rjc59/+4+voqO6m88z2CfpM/T8ASQJVAwAESwN+AlYCFQPHAz0DlQDj/U/8PfrJ+C33ivZW95f4wPpP/Rj/lf8k/yf9G/on9gby5e6F66/oveZ35GfiKeCj3QLcoNvM2hvav9jI1wDY/Nhs2jjbZtwd3xji6uNW5S/mJ+hC6ozsje8Z88H3sv1qBK4KLg9EEv4T5BXxGCMc1h/yId4iuCO6JMAkfSW+JXQlcyY+KHcrdy9WMtwzgjSFMhwuOyhZIuYclxdcEX8KiwNO/Yr45PIL7EHkzNuS0/TMe8hYxa/DKsPxwwXHqMw/1PHcOOQl67nySPucBSIPuxa/HJsiDimxL9I1+DmZO5A7Sjo2OMY16jIiLyMrcSeOI94fKxvOFMQO1Ql9BX4BNP0i+fv1fPPA8Y7wXPBT8Dfv++4S7gfsWOle5Mzeitpc1zbW7dY12dzb593336ziHOeG683uAPFr8xD3p/vMAOEEhwjqCvALqAzNC9EKwQk0CLgHVQhmByQGiQRdA3sDcwRmB1QKnAvoC/cKiArjCkEKrAjNBDkA1PsI+Wv3NPaL9EvxLexA59PiFt4u2oDWq9NI0ozS8dPU1VfXR9gV2tLc7t+P4hbkYeb46lPy8/ntAAsH2wxpEyAaGSCFJVkq6i53Mko1pzjTOl08wjw0O1M4lzT6L/YrQijdJH4hxRyNFzYTSA9sCqAEaf6B9+Pxp+316Nbl1uKQ4Ing8+H/4ynm0uc46Xvrt+7k8h/4vv2XAzIJcg5eE+YYXB9/JAoo2CeiJeckTCV9JoomPCQxIPwaFxbrEPUKfAQS/UD2NPDf6YrkyeB63sDcw9rK2FzY+NgH2jXc4d464Xfkved56z7wx/Oo97H7S/61/8QADwKEAw0ETgM3ArEAkv7Y+wX5k/aD80rvVupw5YfhJ95A2wPZBteP1nzWnNY810PY9tpj373kqOov8MH1NPuoAPwFlgsBEQ4V6RduGTQbxR44Ih0lYyhrK5IuqzDZMVYy5TOSNns4TDlAObY4uzclOFU51TqGO1M57TU9MrEv1SzmKCYk6R5XGQITIgz/A8j7U/SG7qXp5uNU3DvU0sx0x8bDX8DFvtm+Xb/uv9O/JMAlwqXEKshMzEfQk9Vg2pbe1eIo5u3o+esi7k/vDvBm8CLxZvFd8ebw+u937r/soOtq6qDoNOe757TpXuxZ8Hb0gfn6/m8DRAecC1MROBhtH0sm6ixUM1c58z1NQrlGCEtNTg9P401hTC1L2EpIS8FKkkc8QdA4SjD0KMkh7RhFD0YGfP8Q+0f27u+P6djjqN9u3PXYFtff1tzW0NZa1qDVw9Yq2QXcJeCO4pHi/uEl4XviZedP7f7yRPeK+Dr57PrV/Nn9c/ys+qr7pf5tA9MF1QXkBE4CW//5/E76ePhf+Bn4IPhy9k/znPDu7+7xk/RA9Ovws+wf6ifsrvD29Jr3Cvms+Wv6mvuY/Fj+JQFrAz8FugX5BAgFgQT1A4QD0QLLAED9Qfm494H4JvpM+/X5uvbg8rHvhu787wDzYvcc+yf9/PyE+436vft2AFYFQQg1Cg4L0g2vESYUUBWDFZQWjBh6GjobDh0FID8jOyY3KN8orilYKmUraix9LLgsaC00Lw8xLTFwMKwvai4DLh8s8ijFJFUfPBnoEvEMwAYaACH5RPKm7O/mHeEY3G3XK9M+z3XM7MrjyvrLo8wazv/PzdE71ZfZLd3W3qXfy+BJ5Brp0ut06+XqGu0W8w36Rf64/zUAQgF/AnUDQwRHBZwGeQiWCUQL/gw0DdYLUwjrA8v/P/zu+NL0AvCZ6tXklN/92YXTeswpxr/CQcPuxVjIaskqyUrJIswo0gzaseH159Hu/vcxA9EOHhn4IIonrC6fNXM8vUKmR5lLK0+9UWtSTVJGUQFPDUyTR3BCRT3+NyYyDyvQIrUZDBK3C84F//84+sD0I+9i6TnkbuCM3hzeYt7O3qrelN484LbkG+tA8cf0uvYl+ej8NgK2B/MMAxJKFY0WrxadFvAWSxfQFdoTDRKHDxwMfweTAiv/7Pxy+4T5IfaL8rrvcu6T7uTu6u0U7efsHO7I77Tw7vBQ8SXyI/Pn89vzb/M+9Gf1jvZE9lf0kPJx8q3zifSz8y7w7+sj6UbnG+eC5+DmSebM5ALkveSl5jzp7+tW7u7wwvPK9zb8swC8BEEH0AlxDAwP2hFDFXYZQB5bIZAiSCLJIbAiGSRSJYQmICdGJ40mRiRFIvkfUR7NHFgaLBdME0kPzgsvCFwFowMXAsYA//3n+vH3M/Xo89jzafMf8jjvIetm6MvmSOZS5t3kLeMS4vLgHeC830Xg1uEq49LjauT25Nvluej56xbvd/L59Cb4fvwXAQYFkAedCHkJkwpMDI8OFRHrEoIS3hCDD+kOCg9kD68QpxLTE/YTJBP8EvwU3RcTGr4byxuEGskYIRfHFoMXyRg/GSwZlxjKF6MWwRM7D9AJEQXRANf9i/vL+af4EvZc8oLuFOt856zkxuGK37veEt3t2rvZK9pW21PcE92r3azeMuCe4TzjUeY76Yjsw++X8tP1xPfr+Er5t/gX+ZH6p/qp+QD4UvZg9gj4nfkX+yb8zPvO/Cv/zgEzBVwI1wtZD/YSbBb2GX4dKiI3JzYrXy1hLAIrvyjEJtQl0CO5IesgCyEkIhojlCCNHDkZeRWdEsoPaQvHBm0BRfyY+IX2/PXn86Tuo+kW5h7jfN+t22TayduQ3mjh8+K05E3mqecj6Y/o5+Zg5dnlR+nk7hzzsvbg+Tb+ZQLmBaEIzwelB4AJDw7AFM8aah5uIp4lKicaJ04l+iNrI5sixyHWIJseuhv9F0gU+hAUDSsIZwNO/cf33PNx73PsTuop6FLnZ+bf4jze2Nkb13LXEdmm2vfandk92EzXM9a/04LREdCVzjTOMc0KzDTMm8xbzpXPb8+Mz7TQr9Xs3Yjmbe6i96ACtw0ZGZ8j/S0OOcFCW0luTTNPc0/VT0BPEk1mSZJDITxnNKYsOyWIHSMUhQpDAQr3Hu5k52Hk7+M85FDmD+hB6iTsWu3x8Rn4Xv6OBUAOHBkVJfYvwDdGPcdBhEWDSTdMp02DTyBQSE6bSQREwj4+OXUznCrRH0UX7w8QCUgBb/c866nf3tXmyxjD3Lres9quxakapoekFKZyqc+smLANtPm2ILrKvdjDQs3I1nvfyebr7Pfz0Prc/qsCLQQsBFcDTAAr/Sb7Ivqi+SD4ZPQN7nHm9d93227ZKtiY1ozV+tXn1iHZqdzY4VboL++S9Mf5wgDCCQgSCBn3He0h0SaTLJcyvDcEOjI6Rjs4PilDWkXXQ48/OTt4OWM5nTizNrUyCi3nJkshuhwKGA0TXw56CtoHAwbbAmX+2/iW82Dwde9G7wvtSerL6I7piutI7LTqUOhZ5gnlluV45/zpqe3s7zDwyvHo8/P2rfub/qIA4wGuAYkDrwb1Ca8N2Q/bEDYSYRNeE0US5RAvEZwShRP/ErAR/w8PDsoLuAePAjf8k/bs8QPuGunG4n/cvdYY03bR7dD20DvRzNCM0HXRIdM21afXuNp930/lmOq77qbxXfRx91v8JgLSBhQKuQwJEDEUKxmhHTsicSYRKVsqeioFK7sr1ywaLqEtAysZJmIhax+CH5Mf9R0oGmkW8hJjD1EMWgiWBPcAD/3l+Vv4EveU9XPzDvCq7CvpPuUC4h/g396G3lPepdzq20Lcttw93lHf2t/T4IviruTY6Jbu3/Tz+jMAfAQfCHoLAQ1vDbANuQ4tEWoTJxW3FjUXtxfvF48XWBcRFksTkRA4D7UOSQ6pDS0MQworCHQGXgUqBC4DegLHAdj/z/yC+Yj3Vvd6+Iv5Lfmu+P73Aflk+3H9c//ZAFgCtASgB7kKfgyEDbcNqQyLDJMN+g1TDZALNwrcCYgIUQbCA+ABmwAO/0P9ZftE+Cb0sPBQ7XzqaefT4hrf0Nzo2qfYTtVQ0o/RsdHo0fTR5tDA0UbVxtow4mvpH/B89sX8wwILCbEOrROiGLgc8iCKJNAntip9LY8v+jDLMeAxaDGbMGowhzDkMIgwgS/ZLesrSikdJiQisB3PGdkWXBXvFBEU0hEYD5MLhgfhAir+Gfoe96z0k/Hn7GDmXN8w2b7TYs7jyDzDyL4nu1a40LWktC6167Wttni3/Ldzus++cMQCzCzUSt1b56TxJ/uYAu4HBAwmEOwVfBuOHx0i7iKAI+okKybNJTkj7B5OGugVQBLLDocLtgkHCdcJrgqgCusKhwujDVYRmhUWGg8f/SN1KdQvEjYRPIRAvkKQQ9RB7j7ZOwo4ZjSVL3wqeCTsHCgVlg1aBv3/k/p+9bjw0ut05pzhjN4Z3UHds9333FPbhdnr2ALZLNli2SHa99vL3lPiLua06izvzvLw9f33/vna/DMABASOB8wK5g3jD7QQ7BCMEQoSZhGbD7QMhwlFBU0Ahfon9LPsZeSk28nTSc3Wx+fCEr6xua6257Wotui3trhguqu90MKsyVHRJtkU4qrrLPWQ/WwExgrREA0XyxzPICQkoyaGKcAsZy9FMVMxjDAhL+AshyqKJ30k5yB7HDAYOBW3FJUV8hVwFOMRjA+WDYwMkAyZDCcN+gz/C3ULswu0DeAQkRNoFrEYwxr/HLweECASIawh0SLLJIQmdSj5KNMoCilOKPol6iFQHP0WxRGuDMQHfwIp/db3ePL17NfnqeK83WrYENN9zvvKT8iixQvDp8Eowd3Ba8MkxgTKMc4+0grWqdoI4SXoDO8K9WL6y/9eBUwK1A0nENcRxROtFF8VsBWyFTcV6xNAEa0OyguFCOsEbgBS+zf2M/E37HPoruSy4TDfIN1Q2wLaSNmf2Y7bON7f4dPlSOqx7irz1Pe1/JYBxQVrCfcMOhEdFR0ZCR2wH6sivCUEKT8s2i1mLskuJC+5L84vpC54LEoqvihkJwwndCZnJRQkrCGzHjkbUhgAF78WahbIFHERaw25CUsGPAJM/KT1re9+6g/mJ+H03NrZ39bW0xPQ0cv4xwjF28PDxPrGa8mey0fOFdJA1kraT96a4qfnf+y88K/1b/t2AS4HuQuJD3ITPheDGr8dmSAcI80kKyX8JPojHiErHdwXRhKRDXQJ2AXZApYAaf/r/nT9QfsX+e739/j/+/z+NgF3AocCpgN+BTUGKwdIB20GcgX8A1IDzgPRBAcFtQT8AjgAMP5K/Ij7NPxV/UL+nv5K/eb76/te/NT8Uf02/Gf67Pgr94r26fa592/4hfir+Bf5E/nq+C/4jPcq+Mj4ufj/+Pb4YPih97z2+fRO88Dxe+9X7qftnu1j70/xTvIz8zfzrvMZ9mH6wv9dBk4O6hRWGhoeKCLYJwIubjMLN0c5qDt7PrhB1UNiQlw+eDn1NCwxdC2GKKciIByZFloR3AvKBcD9g/TK6z7lHOCL29TWYdJL0EPPo80IzA/KksdJxizGBccrySLJuMi3yWjM7NAw1SrZzNyf3ybiEeTz5OTmj+rG79L1yfji90v2pPWL9yH7wPvW+Of1c/RR9oD6u/2a/73/y/8AAZMCcgTkBmwK+Q7OFLkaKB9lIX8jEieqLAs0XzmLOrg5tDkmOok6iTt7Oxc8jTvyN2gzyS1PKAwjxhxAFcAMTgU0Afj+J/xp94zwYOpP5xXlFeH/28TWatTW087UL9fM2gng2ePy5XLnTulX7APytvhGAaAJPxDfFQ4ayh0CIqYkwCblKDkq8iv3LK0tcyy8J2kgohcLEUkOjgutBrD+ifRS7L7mceIk4FPeoNq61dLQf8xGy0jNKtHB1STaLd0s33DgKeEr4mrka+ee6i7syOs47BPssesR6kjpfegZ56LluePr47DkjeOI4Zjg/uHB5Yjp3uzb76HzxPiV/dUA6QKdBJ8H8ArQDsATwhedGoYbqRpJGi8adBmjGO8WXxX+FKMVKxjsGnUbvBrKGbEZtRtIHv8gVSOQJO8lgycqKTQqCCsOLKQsCCznKQQnmiMaIPUbxBeqFBMRzg2pCcMEggCm/MT4wPVq86jxA+/86xnpceZe5TDlEefe6QPt/e5i7zHv2O677svuF++l8NLxOPJT89D0/fYq+Z35S/mJ+JL4D/la+vb7R/1m/nr/ZwGJAicC+QAtASsEdggoCt8HZwO1AK8A2AEYApYBjADL/sz8vvk99xT23fSK853wkevp5Qvh7d6S3tvd2NvC2CvWPtad18LZ1Ns63FDcut1K4ZHmVOyN8VD3H/7YBT0NbROwF9EbwiBvJlIsEzECNTo4Hjo3OoI4lzRVL9Qp3iQZIa4e7xufFygS6AuFBVL/Kfkf9M3vSutV557lRObZ6K/rAO0V7Rnt1+0M7+TxwfVz+lIA1QXOCt0O8REDFSAYUxrHG9kcOx3SHbMdPx0SHVQduhwRGl4WYRFHDHkHoQIe/v35VfYa8kfta+gb5PDgVd6w22HYNNXA0prR5NK/1FzWstiW2ovcSN7/31/iZeUq6QHtG/H69Pv2vfjK+vz8QwCNA4cF2gZzCKwKIA4gEY0SVxP4E3sUrRYuGXwbPR7lH4ohXSPsI48kLyQ4It8hrCE5IVEg7B4lHT0bAhnoFVoTyxBODVgKSwiLBpkFsgNYAbj9bPmZ9Vfyqu/Z7DLp2eVk47fhwODc3/rexN6o3tLddt2P3Ube6d/84eTjy+Vk577p2uxR8OvyhvNs8kvwNu+77sbt7uuf6SfokOeu5gXmXuZq58zpnO0B83P5LgBoBuoNpxauH8wnSC3lMEA0JTgaPN0+fT8GP/g98T35Pnc/ZD7IOzA4TjQZMCkqDiMQHIEW7xBuClwCZvlL8sbrquXd4MLb59f11rrXTdoN3n/hQuYz7HvyBPnn/l4EfQmWDr4TORmKHjYjCSZgJ0Um/yJhHlMZMhXqDp0Gp/3o8/Xq1+GH2IHP8sWyu7qxrqlEo6ue/JrUmCOYqplzncehiKYQrCOyyrm4wpzLXNWW3zHqyPRi/vkGMw5CEwkYhB0MJAsruDD4M5Q1zjbbOfY97UBZQntCV0LxQRtBNj9VPa072zkrOMk1IzMTMPQr6ya/IXUcjRc7E84OYguuB4UEKQHR/fj6kPcl9aLzcfIG8i/yQ/NK9jf5c/tP/cj+7v9KAPX/TADB/03/AwAIAOkAaALkAsICrgJAAVn/B/6I/Gz7efra+oD8kv+PA2gHsgoKDXcPoxIHFvoYLxqbGZUXmBWQEzQRHQ/+CuQFmQAM/GP4EfS37SHmW9711mvQmsslyMHFPsRgwhvB1sFTw4vFycfxyQXMq81Tz7/R7NVE20nhh+a264nxtPfB/a4C5gViCacMmQ8/EQMSHBPJFNwWbxiSGc4ZARqrGKUV7BG/DaoJbgasA5sAZv0I+8n5H/kD+R75cPgv9731Fvb5+H7+SgUkDLoSghhVHhokySkqL8Uzzjf3OxU/q0GvQ9dE2USEQic+hjj7MegqrCLCGRcRugds/oH2mO9O6WrjJN2a14DTn9CYzpbNlM0ZzvHPENJA1CzYG92p4W/m/urj7tnyZvbG+fv99gEgBfAHFgqiC88NYg+dEOIRYREaEC0PDQ8HENARSxKZEcYPGw5xDbgMJAvcCOwFrgPOAmABd//n+573+POl727qSuRi3ljaDNjE1dnSxM8czSfMUcwFzczOAdHS077WgNnt3EzgzOMF6Cnt6vHR9bf5Dv7MA64KqhBMFgMbKx8UJBMqiS+DM141HzXNNOM0jzTmMoAwni1rKzApJSacImAehxpFGLIXWxi6FysVMxKUDrgLLgmKB+IGgwXTA6cB7f/u/mz9nvsg+aX28fQW8zPxc++m7oruWu9T8DrwIu+x7dHsku3h7oTwY/L78/X1G/e59rH1OvTG8sHxrPBk71Hu1+2E7ZntG+6P7ovvQ/Er81P1wPcC+n38Ff/BAUMFdQlKDaQPZRCFD7oNGAxOC/gKkwrcCJMGVAUCBB0C6P7o+kv4m/f894n4N/jg90b56vvj/tIA6QC8AG8B9ANqCFoN6xH7FGUXERrjHBEgfSLPI04ltiZPJz8nSCaaJBQiMx+XGlQU6Q0pB0IBvvvd9Z/wLewf50jiGd052HTUztDgzSvLBcl9x5fGzMZ9yD/KBMvXyvPK6csNzq7QFNOY1tTZptxS31jhv+Ow5tHpNu3/8Pn0BvhK+iD8sf/dBGEKOg9AE9sXSB3QI6IqUzFTN8w890HERfZGDEfdRjpIuUrKS/1KhEd9QrA8ojbbMEQrHSXiHkYYSBHqCjIFlQDH/CP5ofWg8gnwxu607VDs9uv569Ts3e4I8aHzcva1+Pn6mfzN/RH/PP9q/7P/IQAIAWUB7gB7AOT/Zf+a/vL7J/gC9MbvLewq6VnnouVt48Dg5d1O3ITbk9oc2SLX+tQk1PXVM9kX3RvgcOFN4mnjY+b46inxQvc6/QoC6wX5CY8OlxOSGFAcbx54H5EfJh+3Hhoe/RzTGvoW4xLhDnELigfYAk79b/fS8SDtl+kM5xTl+OJ44JDeUd2q3FDcVNxP3UvfQeLV5ffpje9n9TD7EAFSBjAMdxKPGLMdHiIoJs4qMC+9Mr40oTWFNug3QTnzOdA5+Dc8NXUytC/pLNIp1CUEIQ0bBxQFDfEG4gFc/Rn4OvN07xvsoekL6NLmgeZz5mbmCOdD6BPqouzS717zVPfF+q/9EQBZAgYEPARFA8ABcQBK//b99/uh+db23/Py737roOe45HzimeAu35rdAtzG2tPZK9lV2eDZDdvj3HLfMeKu5FPmn+dw6BvpuOqY7dXxAvZS+TX8PP+rAiAGCQkCDDoPaxKpFAYWzBYFGK4ZfhueHIQcMhxFHJ4cdRxzG1kZBxfuFD0TXxEfDnAKdwcdBcUDJwJTAL7+Tf18/F/7C/oQ+d/46/mY+5L8Y/2f/cv9U/6Y/xgBjgIjBBYFiwa/CDELtA2ED30QQBFPEtgTCRUcFqQXWRmuGhYbERpBGb4Ychj/F7sWcxVUFI4TjROwE/YSdRFwDwgNYwpSBuMB9f2G+tP36/T+8LPsZeg15OLgON352MTUTdEuz6LOFc/Uz4LRC9Q118fa09253+3gH+L24//l2OeF6YTrQO7+8GDzAPXb9Tb2vfYc+NX56/um/iwBUAR6B50JagyRD30SVRWgFsAWyRZ/F+EYzhpNHCUd8BzHGw8afBc9FNMQvg2ICssHvQQGAaX9Wvpf97D1HfVq9QH1ZPPJ8qHzn/b5+iT/UwKxA+gDgAROB5YLSRDyEiMUWxUwFocW9xQhEp8PTA5oDfwLLwl5BVQCuwDi/zr/Zv2S+oj4Ffeb9hv3xvdQ+KH4fPid+NH5XPtI+8r6MPoa+jH6jPkF+E/24PRR8pfwTe9f7abraOmI5/Xnyui96a3q2ev27d3wYvSJ92D6qP01AuAHlA3LEQMWoBs+Ir0oCy0lL3owADPlNmQ79D5CQEA/ATy2N+sy9S2oKFIjgx4jGWQTmg1RB5IBLfvj9PPuWukK5DveHdlR1RPUddRr1Q3VCNSU0svQXc+tzrLOmM8i0avROtPv1XXYdtoa3ObbmNtx3DTc1dvc25Lcp99G48bltufY6NjpVuof6i/qiOyg8av3nv1TAtcGxgyzEjgYQR3pIeUlESnsK4ouDjCEMKAwyi8mLy4urCsCKg0nNiQoIv8dIBoQFlARjA1vCvUGJAUtBCADIgITAfYAPgDa/q/+Y/9UAdsDkgY/CR8M+g5JEaASpBIQEuESKxU/FjQUPBCKDAQK8Qm3CIUF2wDU+sX1e/Kr8Hjv7+zS6IzkZOCB3S3bPdqt2RzYYdYQ1lLYYd1T47LohOw+7ujv7PGg9Zf7SwPGC34SJhdyG5AetCCkIl0jwSMjJTYlLiR2Ih0g0xvPFagO5AYqAO34FPKX7Pfn5+Ji3KbVO9Fjz43Og80gzLrLtsztzrrRh9Ws2RXeL+Oj54zrNu+f8T/0UfkCAEMHCw2sEBITHBY4Ga0cuiAJI3cjWyM2IsUi6SPNI0IjISHpHrccTxpWGBcW7xJeD44KBAbyAhsCVQJLA7oCUwGxAHIAFAJBBXUIOQsXDh4SAheKGlIcxBxSHp4hISRsJJsiEh+pG2EY4xVfEq8MrQZ0Aeb8yPhn9JLvjeqI5bvgm9yL2UjXJtYy1UPUB9MU0VzPb84izpnN0MxhzJnNNNH/1a3aEN553z7gPuK45J/on+1u8x75Rf4DAhUEuAUqB48J/wy/EM4TXRXLFaQWLBjzGbsathkDGHYW+BWgFWwUExOGER0R4xE2EmUQAA3bCLsFLwQ+A/sCywJBAnQBXQCp/6j+//0S/nr+ov5p/R/8SfyY/sYBsAQSB4kISgqODDIOYg8sEWMTbBXQFuIWrBZ2F6AZeRvxG0oabxeVFfsUNBXQFD8TYxBYDY4KUwjfBYgCB/89+9z3kPVa9Pvy5fHQ8MrvK++t7Y/qpefB5NHiZuK04gTk+eUd5+/n0ejx6IfqAezJ7Lvtce4o8AjzyvYq+3X/pwOzB0MLMQ6VD1IQfBFmEykWOBjdF2YVAROnEO8OWQ07CZcDH/4Y+aT00+9O6lfmreS549Tiq+GQ4Nbg9+F345HlT+jx6rDsge7H8H/06fj7+/b92f8XAQ0CdgG+/zP9fPoT+V/5ufmt+dP5Q/py/CP+Ff+//gf+pv1d/n4A/gOUCKoMOBBdE8cWnRkjHIweICDnITYjbiTLJcUlISUbJP4i7iF/H38bdxY1EsYOIgsDCFEFGQQXBNEDlALrAZ8B/QBWAXcBDAE4AUABtQE/A3QEHQU/BE8Czv92/Rr8zvpZ+f73c/Yi9hL3PPgf+Xr58/kw+tb5xPiU+V779Pyh/jT/x/87APr/8f8aAdACnwQmBigGogZRB+kHJAgMBgUDL/9S+/X3afTA8HXt++hH5NLf49pA137UYtF4zjzMJcqlyOPHGcnnywTPlNGw0p/TmdUx2dzdieJd59Pr/vDZ9/v+gAVmCxoQpBRjGR0eWSNAJwIqsiuKLDYtXixCKqsnJyTKH+0ahhXrDwoKIwPU/Mz4Yfcu+O/5cPyw/+YDawg+Dr0UNxupId0nXi4+Nbk7CUKQR9RLM0/yUWVTi1OnUu9PXEuGRPw8gzUrLQ0jQxfNCoP/WPV46yzhudbczMbDkrtntPitG6iGo4CgaJ56nTGdUJ3PnuehQqbUqsOvwrRKulTBCcmo0S/bcOTk7dH3vgBKCQYQvhSiGKAbJR+dIowl2ifwKHopjCrbKz8ttS0ZLRcsnysvK/opdygQJ8cl2SP1IP8ckRlmFlQTxxCbDWAKUgiWBtEE7AI0APn8M/pX+OX2ufUO9GbymfEv8onzMvRj9Cj1x/Xw9m74EfqY/AL/kACTAbQCLQNXAz8EfQVcBpcHnAcxCC4JnwkXCtcJ5ggDCHoH7Qb3BXoEBwIe/+/8OPqx92T0IPAT7Jvo7eU+46/eg9ln1XPS3dDszxPPwM4oz2vQ8dL41TrYotrl3Rnixue77Rf0z/qrAScJ1RA8GFkeqCOsKNIsZjBkMtoy5TLzMqgyzDEuLzIrfibWIaUdZBmHFFUP+Ql4BWwCyv9j/X36RvdD9af05vRL9VP1ZfWi9dH2zffe+F35B/oc+1X8YP6EAY0EzQeUCj4MTw3RDaMO0g5GDnQNMQzLCncJewdCBZQDjgLEAXkAG/7l+hH4VPZz9pz4AftE/eb+igDbAsUFagh7C/0Njw9VEYASTRNEE6wSqhEjELoNPwpzBosCKv6j+Sz17fDm7KToHuU64mzfytxn2izY2da61v7Wzddp2C3ZodrV3ErgAuPJ5TzoReqA7N7t9u4L8QnzH/R39Rr2g/Z09g723/WM9v33kPkV/K//2gM0B6wIiQkKC5cN4g8SEYYRhREoEhAUvRYZGRsaDhluFwAVMBIQEcYQkxHSEl8TTxMHE9sSBRMTFJoVCxd6F0IXNBeiFw0Z6xkXGlQa7xkoGX8YDBhjGKEY0RdMFokUXxKQEKEPWA4JDEcIWgSOAHj9RvpM9p3yXvAz7/Tt9Osr6hzpUOnT6czpzekP6Wboved/52/n6OcN6Knon+k+6uDqwOug7Jztbe6p7lDvLfBZ8hH1gfj1+2T+oAAqAjUDjQQZBGMBD/6r+n/5+/lq+Xb3rvSA8Uvu7OqF5w7l5ONy40rjaOOE5LTnfOyY8Uv1hveK+a37Y//kAy8I9gubDqIQehOvFroZGBzPGwsaIhdzE6IPwQziCiUKUwldB7QEmQFF/439tfxK/SL/CgGYAokE2ganCaEN7RE5FlwaJx6iIcYlUyqYLt8yIjaiOKU5TjkvOPY1BzSoMmIxuS95LEEnQiCbGFcRpwqqA/b7t/QA7qHo2uLE2xXVOs/oyuvHwcSnwbm/TL5Nvfy85rxRvXq+NsCqwnjGMcsy0AbVJNny3CHhWOYD7cnzbfnz/igE0AkZEPsUXRnyHJgfkCLuJJkmJigDKRQpwCieJ1smCSXKIwgh7RziF2QTbBC0DasKfQYZAmn+lvt9+Kz0lPAq7eDr3ew47tLvgfB98ITw4fCW8pL0RPZj9yz4sfn6+rD7ZPx5/KT8rvyf/C79oP0E/Tv8APtr+tr66vo4+jb5Z/i0+Bn59vg8+PT2Tvam9uX3PfmG+mz74/vY+7P7Cfxq/JL9eP8AAcMBuQEHAiEDCQU3BsoGpAcPCRYLGA3bDr8QzhIuFb4WTRhiGRkaqhsMHSAenx6+HogeUB79Hfcduh0ZHeUb6xk0F7ATzg+tC3UHDAPD/pf6APZB8Qrt3Omj59jljOTC47XjAORB5JbkVOWY5njou+qC7Xnw7/IS9Wn3H/p+/Ij+NQCFAXoDqAXnBoUH9wYsBncGOQckCAIIhQYkBKkBXgC8/9X+sv0t/H37cfsy+hH4pvVG9D70aPTY88XxNO8O7u7tqe5e7+juLu6H7UztxO1678Xxa/SA9+T6H/5pAJ4BawJEBDkHqQpvDaMPGhEaE94U0hWiFYAU9RIMEREPrgzwCT0HEQVAAgH/wPuf+Ev2gvRb8p7wcu+g7hjvr+9A7yfv0e8u8mn1Ovhp+Vb67Ptb/gIBNAK9AnED/gTYB0QLaA7bENsRkhJWE1AUQhaUF6EYxhnfGmscRh2bHa0dMh3xGzca4xdsFbETlhKYEXsRzRCDDxsN3QmyB1gGSgX0A7ECAALCAbQBVwHHAP3/Av9//Vr7EPlK9vHzB/Jb8J7unezT6aPmpeP54EPfVN6p3dDc7NtF243bUtzf3IXdT95z39bgeuL945DluOdK6sPsFe+c8JHxpfK985v1O/jk+sP9lwAFA5oFCwjTCjgOYxHTEwYV5BWIFh8XMBjOGPUYChk+GSsaMRs9G8sZpRctFgAVMhSGE9QRmQ7XCYwESgG+AKkA8//A/fD6a/gV94L2I/f5+Jf6+/vT/Gn+vQFiBhUL/g9QE7EUfBSGFBEWjBgNG2AczBscGogY8RcmGOYWaxNUDncJqgSSABH98fka91r0cvDb7KrpdObF5ObjS+M04yTjLeJc4gvkieei7OzwBfQQ99r5WPwR/lX+0P3d/Q7+sf1R/MP5DvfH9Kzy0PCQ7zrukeyb6WHmheNI4A/dbtpc2V7ZA9vF3F3eeuHW5IfoAu0R8dz07vge/nIE2Qo0EesXxR0XI7QmICiXKHAo1SgiKXgotyZKIxge/BmRFpwTJhAzCxkFEwB0/Jz50fdc9kX1WfQW9Ej0DvWr9jL5tfxNAMQC3ATjBwUNnRLsF64cFCJSJxcrGy4vMFAxvTHdMdwx5TIJNSo1QzLULd8nuSHlGwYWPBCoCnsEn/5V+Sn0Q+765vDfctod1qXT+dDnzHbKsMhDxvjFHsY/x2LIAMfNxYzGs8ihyxzPEdJ71HvWVNho2Rfc7t5j4mrm2eqW8er31PzrAAUFpgjWDJQR0hWAGXkclx7mIfAmTyv6LXouBi2mK58qlCk5J50i1x7kGwobyRmRFVIRkQwOCAUEVP+K+xP6PPrj+kz7VPov+o36T/oj+oz3s/S09Pb04PSn8zvyKvLc8jzzZ/J/8LzuGO6y7tzws/J38xD08PVq+I37i/3G/lgAsAG8A90EbQRgAtkA1ADFAMcAo/6Q+0n4e/Uz9JTyk/Di7YHsh+3+7gnwr/Jx9lT8bgIYBoIKtw+bFCIaCh7iILIkfSebK+MvLTMvNZ80UzKRL4gsYSrkKOomSSWnIxoiqx+YG3cUzAs9BZIApP23+hf2CvGB7YHrl+kp58XjPeB23dfaaNnX2VvaBtvd29Tdnd9O4RPjJ+QH5bnlGuao6Azt+/EU9i74yvky++T8E/5b/qX+BQDOAqoFRgewBwwHiAZjBuEGKwd7BlcFVQQPBK8DxAKMAYn/L/5//Rz8bPqO+Av3r/Zb9+z3FfeF9S302/R39wT7Rf0v/kr9dvvS+9r9pf8GAH0AsQH1BDEIBAlGCFMHNwbdBdEGOQdsB0oHLAUzAxYCDwAe/qT8VPtR+wb7q/ms+Ln3Ivc1+JL5J/sn/hsBUwRFBzUKsQzqDiwRWxOGFigaVx1YH1EgayGKI+IkwyQmI1EgLx1lGcwVuhP1EuoRZA8yCwwGzgGF/4v+bv0M+mH16PDk7UrtEe4B71bvQ+0w6gvnIuUZ5ZTlQebT5cTk2ePp45zk/ubc6S3sT+047TvtLu6B8I7z+fYn+gz8Dv0N/m//gQFUAx4E5QPUA7gDOwQKBJACEwFVAOMAMAEpACj+AvwH+qv4//b59EPzVfJ28THxzfEX85706vW599n65v6TAtAEUAZMCL8KRw3lDksQYhGmEZISxBOgE7wTpRI+EBMOYAvJCLsHcwdpB6sHNwdRBrUFywWHBwwKrgs/DJMM+gySDY0PrRGrFPwYWh24IJ4ibSMwJDsmCCjAKAMoASYiI4gfzRvdFz4T2Q3PB5oAMPqF9GbuxehR47venNtD2NrUx9HrzkzNtsxkzHHMGcxOzK/NhM8b0TvS2NNM1qTaIuCk5sPsa/L79zH9gwG3A/AElwXNBhgJmwpqC3YLlgoPChAJIgeWBIcAlfxF+b32hPQp8gPwcu6M7XPsvOxO7jfwW/KX9KH2sfhJ+m781/4aAZMDAAaACH4LWw5lErMWKhpDHQ4fSyC9IMEfRhz2F64UIBKZDysNygoMCacHpgYABjIGOQdSCNQJ0AyUESEY0x8fJ2cuwzT7OfE+UUMWRndH7EelSNtJhkrCSVJGnECnORoxzSdzHf0RagW7+NDsB+Kn2D/PcMWZu++yoqvipIefJJsvmPuXYZksnDGg0aRPqkmwsbV9u2zBGchcz9HVEdyS4qfpTPGd+MH+egSwCVUOrhFuE8IUyhWUF6IZ4xpQHKMdWh/SIJ4hESIJIkYiJyKiIeUfwxxlGaEWkRQiEmcOlwrrBm4DyP8o/B355fdc+D75E/tq/G79GQC9AywIowxoDz4SARUWGPsbHB9DIUAigCKqIiEijiAhHtkaeBcjFLwQeg12CXwE3v5X+Wr0oO9i60bnz+Jf3kTZiNRK0BjMa8huxVzDV8JgwpnC28LtwsXDdsW2yODMJtFz1o/cZuNi62r1OAAUC7gUPx3yJDcsIzNNOQ4/U0MORvJH20juSMBHdURPQYI+STt4NwcymSyuJ+Aihx4XGiQW/hK8DxMMkwhBBdUB4v1I+cX0M/E07lrsdOsR6+LpnOdu5evjZuOz4g7iCeJp4hXj4uNI5RjnWen66zTuT/Cd8jj1wfi+/IgABwP4A+EDfQNdAwUD9QIuAi4B3f9D/h/9+PtY+iX57ffB9Qv0S/Lu8MLwfPCP8D3xp/H/8b/x0/Aj8enxYPP59LT2vPiw+oD8C//zAUQEnQaMBzsHWgYpBTcDjAEVAJT+bf0y/A77ofoL++P7Jvz5+uT4XfcE+F/6Mf20/wwBaAKbBKIHdgqRDAIO+Q7XEH4ScRMvFLQUaxbOGIwaIhvfGikbNBzYHdse8B7THuceXh/HH7YeRRzIGBwVNRI2D+sL+QczBPAAD/8K/vP7zvcT8+zupexD66LpoecQ5UfjSuIl4mni3+Ip4+PjzOTq5WvnaOm7623vcPTo+AP9TwBfAh0EFwUUBpAH2AefBrgEFAIqAJ3+RvzJ+Qb3GfSI8Vfv+ewj6/3oveei5wHpKuvZ7GvsY+va653tevBI8jPzsvTU9un5xv26AfgFZQmxC0oMpQzFDf0O/w/mEJoQnBBxELoOngxvChQJFAjgBrgEMAOuAzgG8AhECp4K7wo+DM0NPhDYEpgV8xd9GYMa3BzoH+whNiPZI3skHya1J64n0ybjI7Ygeh6rHJYaaBhFFc8Rjw0bCXQFawFV/Gn39vIh7yXrt+VF38PYS9X11D3W6dYt1jDU+NJ6067UxNaK2UbbO94l45Poqe5A8+H04PXK9s34j/wJ/9MBywWgCSwLkwoaCFoE4gJNAUL+AfxO+AH2kff++K/4U/Z+8VXthuvq6gjrp+pP61DtIvA98+j1j/hR/NUA9QMBBqAGrgfBChwO7BGpFGgVShULE0YQrA1oCVMEw/1095PzJvIH8qjwJu2k6aLn++XB5ZXmZurQ8Uj5dv9lBPgJpBFdG8Akby2sNAs67j4pRaBL4U44T7VNZEuPSrtIGUXbQFc8hTYKLwAoeSGLG/AUNQyTAoj7rvZo86PwluwE6czlYeJe34XbMteV033RddEW0/fU/tXB1KLTcNRT1j7ZK92F4VXkBuVO5KLkiubA6ULt8PCS9Gj32fmM+9H8Nvwe+kv4Ufjp+Vn5Ovfy9NDyzfL/8svyHvKf76nsEemv5U7jT+Pu5frpQu3Z7UTuxe1T7+PyB/bP+mr/rgMyB7UJowxgEXwYXiCsJQYodCjSKawtkzJPNx46bDr5OPU2OTSKMSAw0y54KxElphyZE1sK7gFO+tT0IfBo6X3gEtbDzIrFJ8Gbv52+/LyEuhq6o71Ow6DIccxq0MXWEd655bXtf/XW/t0HABACFw4coSCTJSkqEC7wMH0yuzOQNas3Ajg5N3M1jzP9Ma0vmiy8KXIo4igyKqMpGiWFHYQWmhLUEWoSqhDJC3kGTwLB/27+r/zj+Kz0zvCF7prtfeyF6gzouuUD5BLi1uB34M7gwuFu4NHdbdvo2ZjaHNyT3DPdV90G3dLd3d5Y4ATixuKU42fl5eZQ6DbrnO4W8tn0sfUO9qH3KfrM/PH+9ACXA+gFRgfpBwgIBwlZClELOAz1DOoNIg+TD/wP+BD3EWcTuhR+FfkVkBXcFHYUABSPFLwVghaFGGQaTxsiGzAZ/xXQE/kRuhAnEFIPHw1cChkIfgVzA0UAu/wg+T72ePQl9Rn2bfaG9nj2Rvf0+AP6jPrL+yP8SPwM/EL8tv0ZAPEBCAISAQkA1v7G/UP9uf1l/0oACQAe/4r90fxx/HP7nfnI9nD0o/Ov87bz0vKL8Ivt8Oou6XnpvOkz6UDo0OYE5yLpWete7dHuC+/670Lx1/Ji9Z74gvsZ/sD/ewF4BDYHIwloC/cN+BEvFhYYjRgfGbAaFB35HcwcmRrqGMcYlhlzGqkabxqCGRcYQRY2FE4R6w03CuEHAge3B2MIdAgdCNYHQwgACCwGeARiBG0GxAljC1YLpAnfB/IF3QPbASf/I/wq+cL2hfVV9KbyRPAr7SrraupQ6SDo+eYs5nbmhudQ6f3rnu6X77XwIvLD81n2efgW+pr7ufwT/jf/H//N/m39LPph90T1JPJc7ufpjeVV42ni2+AX36PdQtxn273bcNwR3a3en+D94tzlPulf7GnuOvAK8jH0xPe4+y7/WwNcB7QLrBC+FS8bJyFSJooq5C3jL3kxYjKFMiwyIzGQL0cuBS7CLnUvWzDjMcEyiTKxMcsvMS37Ka0miiODIMYdHhpGFgcSIg5bC4EJ/AiACJMGawOC//H8h/yC/Xb/LwBf/9X+d/4L/+b/P/91/uz8d/rx9kvyB+076PbiYd0W1zbQQ8rKxAHBVr5Uu8y4O7f/tvy4N7vbvffCjcmd0WDa4uJU6xz0XvwiBKgLchFaFfUWDhfmFv0W6RUqExgQTwwjBxwBfvkF8MDmBt5K2D3WcNWu1ZnXeNtB4TDoRe4F9LX5kv+GB1AQlRlXIlYqFDOIO/9D8Eo6UA5UFFZiV0BYlFiSWL5XAFZuUoFMf0QUO2EwGyUiGGEJKPqN6sXd/tKXyUPAvbZar8epWKVeoXmdU5tSmmWb1J5OpPmr4bRIv4vK3NVp4ErqOvRX/mEImROtHgUpLjJUOr1CtkrzUHtUl1WfVJNTLVLhT+BL5UXzPw86FzX0L1cpyyKbHFAX5RIwDjsJjwWHApb/kvyS+OXzeO9j68rmKOOV30zcBdmS1UPS8M0myqzHp8ZEx+zHWMlwy5POLtOc2O/eDeUa6u3tWPF49XH6F/89AuMESwdTCfcKFAu1CnUKngnHCCsIlAZbBXoDaAFkABP/zf3H/G77tvqt+rv6tfrB+mP6rPmH+un8W/8SAZsCpwM/BegGkAi+CpsMoQ57EFsSVRQuFr0XIRiJFlQVqRQbFYAWqRaDFjwX3BejFy8WhRNiESkQ2w/zD0UPyQ1NDDgL3woOC+gK+QmZCPIG8wTJAhEBnP8q/9T+2P6w/sX8yfqE+Yf5IPoM+rz43Pdh+M/5PPtR/OP7GvpU+JH2IvXP87fxAu817c/rfeor6VnnDuUq40zizuGC4RHhmeBB4LfgQeGo4a7haOHn4kTlp+df6ubsm+9p84X2oPnO/Lj/JAJvAygEBAWNBQYFzwOjAkQC1gJoA2kDLQNiAqoBmAGrAQYDvgTSBTsH3Qj4C2oPtBGLE/YUFRZyGFMa2hseHVsddx2hHTce/x6JHr8chxoeGcgYVhgLFhsTchD9DbALxwcCBDcAf/1j/C/8dPwH/qUA6QNZBksHEAiKCSgM8Q5oEpAV6RhzHFMfOyEXIoQhnB9iHVobBBp+GJwWbBT8EaQO/AkTBDL+IvoN9u3wJ+vB5Fvf+9u12FrVrNG7ziTMK8ojyLXFH8Qiw6fDrMVVyMbJoMkTyYzKkM7h1OHamt5p4cHjg+Z+6krvjfTg+BD7yvoB+nD6cfx3/t3/EQDE/lH9jfsr+qf5HfiT9Sn1sva++c39agBbA/MHhAxZEXkVxRhEHRoj1Sm7MLA1wDmnPRFBRURQRvlGRkejR89HxUdWRs9DeT8YOn40cC2RJTYelhcdEboKMwX+/y76ZfPr6rXiwtvF1YbQsMzgytTJwshmxy/GVMYqyJLLrdDV1a3Z+ds43r/iVOq982v8TgESAioBcwELBA8JLQ9UE+IVRBY0FRMV7BSaFc0XFRnTGd0ZKhgEFmIU0hJzESIQwA0DDJwKDwlnB2IEtP/a+eL0yvD57VfsHeod6WfowOa05FriaeE34gzk3eTq5RboIux98S/3P/xcAGgDaQacCeEM4g/VESATHhPSEpkTwhXyFuEVwREKDe4JcweSA/r+GPq79nv2FfZN9KrwVOwS6pjpLOoC61frf+sL7FHtLe978Sf0T/fu+TD7Yftp/PP++AIUB+sIBwq5Ck4KSgrSCm0LYAwADdENHA9qEAgRNBHZEWgTaBV5FsIWihedF8AX2hc0F7UWkhQxEXgORw23DHoKNwZhATb9sPot+OT08vDN7O3pdOk76R7og+VP4jLh0+GR4jfjWOOT5J/mqulR7XLv5PDS8f/zL/if/QcCUQWkBzQKzQz0Di4Q3BEhFIsVJBXnEvkRrxLLFbsYzhjdFn4TixA5D58Ocw3QC+UJ8QZjAzz/zvtG+hP5lva48pPuX+pQ583kHePm4tfj7eTh5BLkjeNP5KnlM+bH5l7nOOiZ6gvu9/Bd8kvyNfEE8mb0HPfR+bH7zPw4/tr/TwECBE4IsgxhEa0VXRmnHbghEyXFJ6sqfi1nMOEyeTMEM2Ix7y75KxUpESa/IvMekhuOGOwV4xJFD1wMnAmoBw8G1QQNBN0DIwRuBM4DkQJpAM3+6f6s/87/Yf54/Bz7F/tD+lf36vIV7jLqpufl5aXjGOBW3BHZMdcW17fVjdI/z8XN18870xnWU9gE20Df8OPY56vrvO4l8uX1pPhR+xL+DwEEBBsGtwaNBpMFhQQhBOED8APKA9wCOQEh/3P9ufv6+gH8z/1d/00AJABPANQAZAGCAioDzgTcBhkIuwmuC4INcQ+AEeoSLhNeEcUOEw0lDe8N3w1jDEMJSQafBEgFxAaEBs4D//+d/B/7VPpe+pH70f1iABED9QUqCNAJkAqhC+wM+A5rEWQTUhbqGTQdJCDAIcEidiPiIpIhOx8vHYsc2hurGlgYeRQnEJYL6QViAe/9BPsk+Yb3TfZM9dHzIPK/8FLvWu5g7QbsQez47GLu8/B08qDz+vSV9cj1mPYo92H5L/sQ/OH96/9dAlcEHQWSBloHNwZZA07/L/wK+tv3XPRc77ToquH72rzUtM4Tyd3Dp79Cu62237NQs6K0AbjhuyTAgcXFyrTQ6NgZ4abqF/Vg/XwGsg/XFx0hJyt8M4E68T7fQctD60MdQoU+dzl+My8urSgWI64dixihE10PmwukBwwEIgAw/I/6L/s5/oIDJQobEdQXFR7mIwIodSq5K5As7S5iMkQ2UjlWO607KTuPOpw41jXPMkgttCUdHdoUiw6VCCcCqPqj8v3qHeM62/7SyMlNwZm5uLMfr/+qhKflpPOiN6HEn7yexp80o+yoUq+utee7wsFNyI7Qb9nO4QTqd/Lo+gwDgQs6E+gb+CRtLUI1mDq7PkVBokIUQxNC2T/HPCs5cjbQMwQxii6TK/Uo2CapJEoh0xysFxYSvw0wCkgGiwHX+/z0le686Cnjud6C22DaDNrz2o7d2OCq5W7qDe/b8wv4Nf34AXsHtQ6XFV4bwR73Hxgh1CI4JN0kQSQGI5EirSLAIhoirx/wG0gXkBFqC2oEvP2/9+7xAOzI5jDixd482/fWP9LczIrITsU9xOTEUcf9ysPOnNHN01rWqNmy3afhZOXA6GfsB/Ac9HD3QPqE/dH/aAHBASABKwG4AQsD9QQ+BjIHowecCLsK7Q03EXMUtxeHG7AfBCT/JiUo6Sg6Km0sLC30Kx8pdCbQJJgi0h/BHMMaBhusG48cPxxgGs8ZuxmFGKIW8RPPEHQOtwtOCdAHYQZ4BIUB6/1e+sj3YvUZ83LwKe5T6yzoJOUB4uTfbt7a3Knbn9qI2fXYUNjz1/nXW9jd2PbYbdiQ2E/ZoNo83ELesuHy5WTqJ+3S7Vbude+18Efya/R192n7jwCeBYoJ+wtlDcEObxDhEckREhFxEEkRKhT2F+wZMRmEFowSxw7IC/EIHQaXA/QBPgH7AYcDvgVqCOIKFQ1fD24STxWyF5MZfhvRHRggXyEUIsEhxSA+H7gc7RhbFF8PsQn/A+D9MPjN8yvweu0R60Pp+OfJ5dLj7uGF4TrjueUC6b3sPPGO9jH8jwIzCEwNdRLQFhoaLBzJHBgdth32HQ0e5R1HHCoZ1RXREh4PFArABAoAe/1F/Bb6DvgG9tbzWvJR8Nnub+5f7vLuG/CQ8DnxpfF38k7ynPGT8N7u8+y466TrxOwb7+PvE/C97xDvuu+p7ybv6u0K6u7ld+Pv463lxeaZ5hfn5Ojd6oTqLujo5BnjaOT95xbsUPCy80n15Pa1+HX6Mvzg/Vr+PgDhA8QJKxCRFGYXYBnUGuEd1iGFJVco9CiQKFwn2yVOJQ4lLSUBJlQkGB/iFzkRxA46EAMQYAxzBiwAC/x/+p35IPtO/nL/Pf8P/qz95v/sAuYEagjsC0YPdhNsFsMamSBkJJAmBic4JVMiix44HREeGh1pGboS5gr1BL/9N/VL7r3n5eEh3aTXhNPf0D3ONs2Tzv7RAteO3LThP+aZ69vxb/kwAfAF/Qn9DdEU0R1IJGUmIiMvHgobWBusHNsbuBcYEaUIy/499WPsquOF2qrSw8xMyTDHrMRswm7BrMC2wHzDfMghzvHT2djl3QDlUe139sH9EAL+AyIGmglrDggVKRrKHckfryCdHzkbHxVEEGMOoQ6zDmYMLgi6Ahb+S/wo+rb20vLx7DXoHebs5BnlR+ZA58LoLuta7aTu/e8v8ffzkPk7/yADrgh9ECsaSSWeLJAxcjWfOVpBBUrPUHZUa1bgWOxb6126W5NXGVMnULVOC0qdQ2o8OTQOLS0mBx2+ErgJGgN6/B319eog3yXX49HbzYbJMsM3u9m04K/XrJeqgqfWpGOiK6HDoM6fvZ3knaOhXaexremypLeDvjbHxtDc2u7jUe7g+UYEXQ25FC8aAR+LIqQk+yXWJ0wqFivRKSgnwyOCISwgeh07G4UY6hUgFBYSyxKdFR8YwBnWGAUXLBZ0FUAWyhecGV4bJxyvHJEdSx46Hv4b5BeCFGsTJhXPFoYXiRcuGOsY/BZWEgYMpQcZBUAD+wDm/M/57Pea9bzyku4L6pXmE+Nm4Ivext2a3MfZPdlK26DeXuLy43PjtuM25eboV+6K8tT0avYw+MT55/vd/Zn/kAEyAi0DKgR7BX8GiwbaBpAHtAiKCNoHmAXCAzcDRQOsBPcFBwYSBVwEpwRVBacFrgWpBOEEXgZ4CHkLTA1KDC4JbQWoA2gFAggECSIHVwKz/a/6NPnD+gD85vuZ+sz3PvaR9d72qfls+xv7Sfnl9n/3uvpB/rgCiwYoCEMHWgTuAN4AwgSvCeYNaxBhESESshEIEHIOPg2TDI0MDQzNCs0JJgi8BlAG1gV3A7H+B/pZ9zf3yfjF+an6TvxJ/f78mP1q/x0BCQJj/8781PyK/hUBjgTbBpMHmQZzA7b/Zvyd+VT3cvTD8KrsfegR5TfjWuJt4OXcUdjT1BnTcdPX0mbSWdJE1InYz93m47Lqt/Dz9SX7mAAlBzcNaxIpFzYbCCDwJXErGTGwNOk1tjbJNmY25jUUNLEysDBuLI8mPiAFG3AXWRXXEpUOmwj1AOP5D/Ti7pTpd+XL4yrjauIU4KLdf9yA3And6d1R3zPhCeMH5Z/n0us18Vb3Qf2nAcIF/wlADvcS+RcmHb4iBCiRLHcw9DK1M6oz9DLIMNgtgSmjJhEmcyR3IKAZ4BBLCdwCkPwA90vxe+zh6c7ocue45STitt5w3JbaC9vo3L3eBuCd4IPi2uUx6TvrvOpt6mXqtupo6+jqp+m455XmLeaD5FHiBd+b2z7aHNoe2vzaattl3KDfjOSO7BL2Tf9CBywPshZQHpYlLywMMkk3LTxmQBdEhEayRr5E9kABO0k0fSuPIeEWUgo3/cLvX+LJ1q/MbMN7u+CzdK1qqKykL6P+o/SlJqgirEqy+7vzyNDV7eHD7a35nAZQEyYeQSeTL8o3RUCwSOZPClU+WCNaalqCWelW6FEHTOxFPUAGO0I2YzJELqMpFyX5IJUcRxiKFNgPGwz4CCIGmARLA8gAGf7R/PL8ff7J/xwAhAC0Ae8DDgcyCWQJ8AepBf0DlALtAPH/gv8QAHUAAQCc/3v/av4v/PT4lPPv7Hnlud1g1YnM9MNuup2xgKoopLee6JlWlxGYm5oYnvmiYKhcrka1yrsPwo7HDsw00rjZr+Bf5kLq/e2u8oD4hv3WANgDCwhVDYIUFx35JOYszTMtOsRBpEj4TRZSqVSeVklYM1kVWARVdFGtTu1LpkegQT865jIpLFUlmh4iGKgR9AtaCPMF7wOHAVn+d/sD+cj34vcl+Z77Gf+2A0MI2AtPD04SpBRKFjsVGhIjDysNbg0lDeMKqQYkAcj8tfg589vs/OV13pzYL9SA0DDN4ckWx+PFX8XJw+bBr78iv63BYMVmyTnN4NAr1pHcuOIF5/rpfu0G8hL3dv1/A+UJGxBLFMEY/BzvH2MhwCBkH88eEx5THX0b+BmIGB8WMRMuD/8KvQd3BCkB9v38+Yb2JPRX8s7xwvIn9Pf1bPe595X3Wfhk+UL6ffov+kb7h/2IAFADCgUVB9MIcwk/CSIJgQnPCX4KAwvHC54MNg1tDjcQ0BFwEz0UEBXTFosYYRqEG2QboBstHDIc3RyNHIgbshqeGaQYoBfrFCkR+QwSCH8Dtv6S+e/02vAq7YPq+ee/5VXil97d2n7XqNWZ1EzUGdRd1BfVJte/2fXb/N3C36/hj+Ou5knqle6U82P4z/3bAm0HNQvgDbcQlhMiFkMZmhuqHGAdvB3+HesdIRx1GX8XNBbaFNMSag9zDEsJHAY2AoH+/Pv/+QL4HvYw9d/09PRF9ev1yvWb9aD1z/Z2+ED7wf4PAg0F+gZ5CAUKYQwRDuoN3AwmCqkH2AXrBEgFPgXLBOUDPgIXAb//2P5M/VL7ePpg+lj9ZAHWBP0HNgmuCRQMQg99EjoUbRWZFhoYhxg5F5AWNxYkFckRogyQBnQABfvR9JDueeiQ4cHa8tPnzAnGn7/zuRq3t7XztQy3nrj+u8u/KcS8yBDOxNQk3SLnk/Hr/OgIZxOhGyYhHSU7KoUw9TYpPWBBUkPQQqk/5jucOQ44ETaEMjksoCNoGvUSFw5XC0QIyQIj+5jzze1l6hvplejs547n+uf46Pjql+3z71rxd/G78UXzGvZH+Zb8eP8jArQEtgauCNIKPw27D90SjxXsF5QZTRoBG0EdBSHVJVgpxClsKD0mkSORIRkh0CBsH+Aa/RJOCwgHsATIAdP7pvRQ7x/sW+of6HnlKOOa4ubi6uIG4dreWd5M4A/kcOj760bt0O3i7Ifr6esG7pnwW/LF8XfwNO8j7fLpWeXK4D/d2Nus25vacNgv1ajRx9D30WfTutU+1yvZWd3Z4RvmXOoJ7sHycfh//U4BPgQmCJQOuBWaGywf8x+QIA4iTCWMKuAugzGNMnUytzKcMggx8C/BL4cv2y/8LsEsxir5KEknZSVDImMechqEFnsSMw5/CQIFggHz/j39hfq79tryw+9+7cTrVOrU6afr6u1S7z/v7+3v7GPt1e247X/s5+k75+bluOZj6Z7r/+vK65PrOezF7f/vnPON97L62v0+AZgEiAdWCtIMHRBVE78UDhVlFmgYXho1G0kaaRliGe8ZdBqDGtAYsxV9EuYQng8VDtELNQnKB3EGHAXFA2IBlP4f/DX5yfbr9Bf0a/Sb9UX2aPae9TD0yPLC8dbwG+9H7UXrTOpX6mPqmumD6Bznl+QI4ZXc+tcg1TPULNSy0zfSZtD1z6/RLtSV1uvXPNkE3NzgJOcE7bbyivjl/YQD/Qi8DRcSqxaxGqEeJCKxJDgnPykOKy8sWizPK4krcSunK68rjyqQKRgpJSj0J80m/CSTJNcjXiN/ItAfCR2BGlgYURdBFbURwQ3XCTAImAf+BXYD6v6c+ir3XPR28xbzh/K78fXwdvB+8Fnw/O9l7x/vYe/i7zPx+PIi9WP3tvh5+eP5t/nm+UD6P/vv/ID+MP9A/yz/k//Y/+H+UPyj+P70tfM29TT2zfT08N3s5+rL6frnH+YD5UflAObu5PXi/+Gp4p/ju+NL4yTjzOQs6NPrge6V8DTyb/QA9yn6Wf3QAL4DlQUkCAMLRg76EHoSYBOCFK4VbBYSFnIVuBSvFLcV7hajF/gV8BFxDWgK1AnQCnMKNwmEB2sFnQMQAk8CTwRIBg4HugWjBCcFtgakCfIM+A+wERMTSRRJFhMZfRviHCUdixwZHMsbhxwnHhke+RthF8gRYQyfCOsFgAKi/kr50fI26xfjhttP1TXRBM/ozUjN+czUzLbOJtKY1ujaUt5+4Avknel/8ej6EASCDN4SLxfRGVcbGhz5G98a/Rj7Ft8U0BMbEwgQZwuJBdb/avot9Bnt0uYo4m7fid092xTYCtUx0sfPx82WzH/MmM0G0CnTstbt2TbdxeGe6LDvSvbD+5UBKghpDwAX1ByNIjQnxSvDMtg5BEAyRJlH+koBTwVSHFNDU0hRf07YSsJEMD0tNZEsVSUJHyQY4RBSCccBkvl88cXp4eIy3kXaV9lQ2YvZVtok2dzX3dgn3KzfdeGd4dDhyeLY5T7qWu/38p72Bvrf/A8BpAOoBB0HyApiD4MUfBczGsUcNx4BH10eehyjGVoUSw6zCBIDg/70+SD1lvH07q7sMOuT6Ybnj+X74q7hSOLc4rTj3eR35anm2+cp6R3qT+od6lrpv+np6kns7e1o7b3pgef85Zrk4OMN4QHendyA3Nfe7+LU5b/na+rb7abyNPib/NMAMwd+Dl4WDB5/I/InqywEME8zaDfnOSc7nzmcNf8xfi94LrEsbSr7JzAlgiGwG5sUvw4RCqUFlgFI/gD8sfro+uj72/wR/qYAXQPTBjEKggy3DngQ6RClEY4TBRXDFGsTERGZDfQJJAbEAXj9yPhf86Tuqulf5dnhMt4q2vPUeNA6zoDMkMvvy+vN3dFt1XbWANf32I7cfeIn6VjvfPSk+dv9MQMeCRANsw/NEYITehWtF/MYFxlyGHgX/xZzGCoZvxjdF/cW0BVVFZUUVBOdEtMQrw6RDPALMw2oD9oRfxODFN0TExMaE0MTLBQlFZQVzRVFFvsVRxReEccNewvZCfEHugPQ/L32wfHp7OHob+M13snZ9dQz0KTLCMblv2G6K7extjq2qrUOteS1sbnrvqHEW8qoz1rWdt6J5+LvtPaf/pII6BIvHVIlhSslMSU1TDgiOvY5aDnVONQ3tDUiMqEuVStTJ0QiKRxBFmAQUQo+BboBSwD2/uf7Ifgl9kv2Gffp99D3evgT+/X+xQSjCiQPyhI5FWgXQxndGTgbSx6TIh0m7iaAJcUksSMCIgoeaxYUEOIK4wY0BEMBX/70+i/2FfFc7IPnTuPQ4FHfoN1S3X/dGt++4cTinOIk4Aber9283rXgTOFl4BXfGt8V4cHiMuOf4kHiAeLK4a3ghOAj4hTluel67u7ysPb7+Uv9cQIxCLoN0BFPE/QTwBQLFt8XSRn+GWQZ8BbvExIR+g5wDsENuQtUCfYGdwVfBU8GHwhVCWAJwwiACCII8QkcDdoPQhI7Ek4RHhFvEhMVdRjuGSwaHxqSGusaqBqvGcIYthh4GEcYTBa9EvUOawvKCMMFxQHa/YP53PXy8szvQu3t60DqBemN6Dvow+jF6Wzrie5o8VfyBvL98Wn0Q/iZ/AgADAM8Bn8IXAulDm8RHRMxEiAQAw/VDrkOYQ2VCogHPAQtANP6ufQq7sPoIuVG4ljged7m3IrbitrW2d7YtdYC1R/UTdSO1ZvWn9dF2K/YfdjP167XI9fk1oLXE9kq2z3dVeAP5MLnBeze8GH2YfyQAoMJWxCZFg8cWSK5KIwvZDY5PPpA1ETDRrVHGEmxSVRK1Uq9SqtKz0nNR2xF7kKOQMU9qjnCMw8t4iY7IdIbahanEGsLjAY3ArT9OPg78tDsPeiK5Bfh39092mnXAdVA0lzQ7c5bzjHO183+zd3PVNKw1WnZ0t3d45nqQfHV9+f9xgMsCdcMjQ/BEVsTsRSXFVgVshSlE7MRZg4fCooEwf7E+uX1pfCO62zlvuBz3f3aQ9rM2enYYNiD2VncmOAg5aPpN+6c88f5hP+2BP4IEgwwD5QSaRXrFzoZnhmoGTkagRv0HHkdkBzXGaIWBBTZESIQaQ5+DOQJKQe2BJYB9/0i+nr2YfRK86nymfGu77TtEezU6sfqbuu57BTvH/Et81711ffm+vP9mgAFAykFeAfyCEIJFglpCZwKdAwhDnkOQg6IDgIPHA8hDtIL8Ag8BjwDzP97/bX8FP21/Tn9p/s3+mn54Pir+Nv42fhe+Dn4h/gC+mb8dv7SAM8D6gbbCaUMfw7GD7sQNRHiEeYSqhP0E/gSGhEdDwMNkwunCdsGegOO/zb8NPni9uj0LvPk8XHxYfHH8Y/z2fQr9Qz2JPcv+W37yfxo/fT9+v50AM4BIQJJAQIAFP8i/lf9PPwO+7H6hfqF+o/6fvn/9/r1NvSc82PzoPI28Tnvje2W7JrrherZ6NDm5eVg5Q3lMOWM5b7mfehx6hPsse3n7x3zKPb1+av+SgSpC1ATDBtWInco/S0zM4Y3ZjsoPthAy0NbRa9FqUTUQotAFz0pODcy4ivxJXggCxsWFQcPCAl3Ayj+OviE8Ujrv+Un4H/bwNdA1p/W6da71bDU/NTk1r7ZJdwa3ojfYuEl5NnnFeth7fjuH/Cn8Fzw1e+878fvSvDF8CnxEPKf8vnynvM+9Kr0z/T39Bv1P/ap9yb5Xfqj+jP71vvA/O/8t/y0/EX9qf7kAMsCeAMTAwwCbgHZASkDdARHBDQDaQKFAnwDdgOkAn4BWACY/0b/of8SAbsDpQZrCXsLXg0QED4Udxj7GyAewR7aH34h1SMtJhwohyhkJyckLSDWHO4Z6BadEq8NgAjmA8P/d/xM+lz4nvXA8s7v0u257LXriuze7gPyO/Um9+H5uvwEAJYFJAsbELwTbBV+F4Abpx53IHYgFiCjIDIh2x8pHHQXKRJlDZQI8QFj+0/1GvCh7LboD+WP4Fnb49Yn1OHRJM+5zCnMJ86Z0O/RBNFCz7PPxtFE1N3WQNm83JXgzuOs53nq0e1i8Vf0aPe7+dv7z/3c/wQBXALAAtQBmwDR/zIAXAFkAXsAOf87/QP8hvvS+2D8l/yQ/Nz8af2E/2sCywTTB+EJYQy0EAEU3hdAHNIfmCRFKQMtEDBfMhczCzQwNcQ1gTZdN403+TYwNVszWTF/Logr/Se7JBkiYx+yGucVfA82CisGkQFM/L30++yx5wrlLuNZ4P3aNdSJznzLxcqCy3zLvsppyk3Kx8sOz6zSFtYc2Eva7d2x4Y7kkugW8Zj7PgQRCWUIRwelBzwJGQ2REC4UExf+F00Y0haXFUUVGxVsFXQV4xTBEzUTphJhEtwRfRG2ETcShhMrFPkSdRCJDW8LOwonCCQFRgId/8P7ZPhS9K7vj+zm6fTm3eR24sHe0tsG2hLaVNwJ3pLfneH14xPnWeto78Pz5PeS+sf7vv0rAQIEVQenCloNOhD8EYERyBBfELYQ2hG4EqoSQBB+DdAM6w2/D2gPDA3nCkwJIwgiB2UG5wWrBIgBuv2y+pH5hPls+Dv33fUR9QP1cfQY9Tb2Bfed+LT5QfjZ9yT5JPvC/RH/UP+DAF4CagO7AwADFwM4BjsKEg2XDe8LDQpcCWoK2AvkDMYMZwzjDaAP0w9PDcEIBgbGBTcGgQV2A9sAjv+p/j79Dvwp+xn51PXd8vjw4PE89Pb2F/hL+Nn3y/Un9Zv2x/d5+Kz3B/a09Vr21vdv+bz5APh19dryLvLu8Tjx1u/87rrw1vQU+u3+ZgFiAtoCPwJQA/gFqAiZC5cNGw9WEc4ShxKNEIANrQrnCGoIIAhDBwgHUwc3CMoJtQptCnYJoAc8BnUFaAXdBsYJAg2BDmYNNQuwCWUJ+AnWCNcGsQROAf/9Mfwq+2H7lfoY9krx++3R7O3tlO4s7mPt6Oyo7fXuCvAj8dfzj/dM+o/7LvzU/mwETQkZC00Kuwg/CAMJbAnHCfkIjAZ6A7kAx/1s+475uffY9pz0APHJ7YTrSOua7GDsKuzC7HruZfC+8vD0RveH+zMANQQuB3gI3AmgDMUPeBKfFFoVfRWoFt4Xjxh3F10VYxJRD3oMLgnLBusFawVuBAkDpAHR/4f9TPs++aT46vin+Xz6Vfwd/wsChwTgBT8HswhgC/oM4A0LD6MQCxMcFjEXjRZjFHIQ0QxSCp8IeAZcA2r+jfmW9Q7yHu6l6tDozOgh6TLpKeij5tDlqeZx6QvtU/HX9Z/6m/8EBEMIZwsxDVYOnw9IEsUVRBejFrwUrBJpEZAPiAx0CMcDHf5s+F3ySu1l6QjlBeD52QPTaM4ZzP7HpMPyvky8Ur6LwbzEvMY1yKXJUM0d027ZD+Ch5kTuKPZy/bkBDASwB+QMCBQQHO0h3CaaK8AvETXDOfc9QECkP+09/jyXPZg95TzjOe81NjLOLBAogCQWIG4cChhCFIATVBPUE2IUHhTAFAcV2xMQEwcSvRJ5FQ0YORvIHaQeYh8GH/cdlR5SINUg2x5TGz8Y7xWKERoMIAbZ/uP4xfJl7MjlId3S01HKYsLUui207604qHukdaI/ogOhF59mnRCdDZ88ooul6Kq1sbW5iMOEzYTXYeCa6Pbw8fi5AaMJFxHvGKsfNCbPK5wxBzdkOi8+8kGmQpBAIzx8NzE0dDGELkYrVijqJEkggRmmEaIJbgQGAV7+rvzM+hj7HftS+tX4Kfff9nr2Ifah9e3zV/O/8230vfWP9Xv0sfJ18Fjvuu797DbryOuL7nHygPU9+ED7cv47A+8JqQ+GFIoY9RqdHvIhRyT7Jmsqgi3PL04wOi2fKEomjSVLJZIjJB04FKILgQRs/7v5APMB7Z3lfeBD3qvco9zj3QveI9+G4Mnh+eOE5SrpEu/b9cP62v33/28COAcCDLQN2gx6CaIFYATsA6QCBgJ9ARABfgDc/sr88vuy/O/8fPyK+zn6Gvkf+BH4rfnM+9T7X/ps+RP4uPZf9Enxq+/m70PwgfFG86XyzfAc8AHy0PYb/GT+bv38+5j8fP9cA7IGRAjgB3oH6AdqCQ8J0gZJBcsEJwQTAjP+tvlO99j1Q/T48+f0V/cZ+qr82/1C/x4CXATcB2EL6g3UD4ARqBN2GNscQB4JHZoaXxmMGeoZYhlYFxoVoRKpEI4RrRJAEeAOewuYCKIHDgXfAqoBW//8+0z3e/Kn8F/wI/CK7vDrs+nd5zvo3ep+7n7xiPMU9WD2+/aG+B77Lf67AQ4FAgfaCYoLHAu+C+EL4grzCTgHvwQuBncJFg1UD5kOTAzOCl8JywflBCoC4wDqADYAGP5M/Gb7pftC+TX0we1M5zDiBN8d3OzYlNZX1JHSD9Ghz0LOM862zg/Q/NF31I/YjN806L3xX/qBAYwI+A45FtkckyKxJ9sr/y+WMz433zrwPBw9vDk+NHYtyib4IVQcBhatDyYJygNE/hr35O675lHfz9qE2ZXZhtm92D/Y6dmg3f/im+nX8M/3PP00A0MKpxHQGM8d9iEnJoAqXy6yML0xtjGhMaIwri5cK8ImaiHwGwwWlhByCzQF8P7x+SP2JfMY7pjmLeD6247a4tru2jzb6Ntx3PDd8uBK5PrnMevS7SnxmvVp+zUAvgGpADj+KPzU+Rv3xfWA9MryX/At7gzs+OpC6Rzm2OPH4mbjJeWc5ZPluOYo6S3tO/CC8QDz0PU0+wMCqAhZDgkUuhlrHp0iCCQeJAskgiTzJW8oySl9Kj0r5iobKvsnNiXmIXEe4BtEGUcWnBT1E6ATShSqFHsU2BQPFXIV6BULFvwWbxipGhIcrhz1HGob4RmbF8gUBRJWDfUHdQKb/Cz3NfFA61zmxeHX3AbXps97x5y/ybg6s+2tu6hso8ae6psnmpOYxJZ9lc6V45iznZijX6r8sZu78MXS0NLbTOZ18fT89QhFFIAdEiU+LAw0qTzHRcdNOVUjXJdhz2U1aPFnIGbtY8Jgb11pWERSLkvoQkY6iDE7KR4imhrVEnsMUwbsAYv9rfgh9ZTyTvFH8cvwMvBM78fuae9v8PDwFPHz8ZHz5vXh91P5vfsS/30CNQVABp0HtwkhDS8RLBRkFv4WiBXcE64SsxI1E+MSuBGrDvQK5AZqAq39KPkW9PLtOOiW477fm9wV2rXX9dXZ1BPUa9TU01/Sd9Gq0V/TNtaH2VXdX+H45Jbo1Ov97gXxVvIZ8xz0XPU99Qz0wfLu8Q7ynPLF8iPyy/CB79vuFu9x70DwmfHW88D2Gfnn+mf97wDaBJkIbQsNDloQ9hFJFJwWkheeF3MWBhWlFJAUahRPEwsRdw6yDKMLqQp7Cs0KswvqDWIQTRI/FPkVTRhwGygeUx82HxMemByGG/YalRrfGDYVMBD7CqUGVASUAksAvf0V++n5OfoS+yv9iwBABOgH0wqbDTcRyBS7GAYdeCCEIhgjriJkIUYeChmgE6EOcQomBnMAZPpy9ATuSuhq5PHhUN8e2xTVls+nzPTLYswhzBDMpczxzQDQL9Ip1K7Wq9ld3b/g8ePr5hrpd+w/8Eb0cPdZ+Sr76/3AAQQGzgk1DakQ0BIGE6kS8BLSE+MULBXcEzASoA/8DLQLCAvHCbwHrwStAV3/b/0e/Zb98P2N/O35JPck9lX20fWL89bwJe5P7Lvr6uv17Tbxs/UO+or9DQGiBagLjRIDGSUeTSLXJXwpCy7QMbQ0VTZTNgA2zDTkMpIwPyzeJigiah1wGPsRSgqDA2T9I/hI8+Pt9ugb5C/fB9uw1+nUKNR61DvWAdnK21PfiuMe6UzvxfWT/GoCVAdmC7UOqRKfFvUYmRmjGAgX5hUwFJoQ5gqjA1L8gfbi8f7tjOlv5LPfCdtb1yDURdBjzWzLzcoMzJTO1dE91jvba+AR5bnpJO7a84H6RQHcCPoPpRY7HDcg/yODKLcsDjG5M4U1TzYWNvI1TjZmNlw1ozLaLnMrrihbJxQm4SPzIdggFh/6HLgZZhXlEYcOPAreBSICTv7K+uf2wvKh73ntyepe5+Dip92p2OnTD8+Tyv7HoMYyxoDG4MXIxDzE2sKGwdLA3MAXwgDFFchfzLjRA9cq3XTiDueT66DxBvid/+oIDhJ3HIYmCi9xNtU7ID/DQYFEjUesSvVLlUtZSzVKGkjnRIlATTsvNB8szSOYHJcWGxG1C9kFNgD8+eHyMu2P6Z7ncuZ55bzjTuLt4orkuuav6IfqB+0/8XP1F/iI+SD5mfg1+Ub6Dvz0/Jj8yPvi+nH78fvh+tn3vPMJ8YDvNO3J6v7oFei96DfpE+mW57fkMuLq4BThK+Fv4UfjZuaL6v7tcO+j7sHuU/B78nb0KPWP9br4Jv0PAVcF/gcOC1gP9BPbFkwY5xntHEEhECUaJlMkNSHnHn4doh30HLYYvBJpDUQKHQlgB4ED4f9y/YH7N/ny9gf2sPcX+tr8Nf86ANoA3wEeA+kEfAeCCiQO9hCnEkAUKhaJGTYczhs5GWkVlxP/EzIUvhRFFEoT5BNPE1YS1A9MC3gHUAQWA0wENgaOBpYEMwGp/aX6RPgn9ib0RvKC8frwqfB174Ds+eiY5o7nheqI7UDvje8+7xHvVe8h8cjzAPYA+PP4v/jV95T3mPf/94P3afWs8wPzsvPI86jx/+056tvmceQ34+/jCuWA5Irgb9na04vR/NGw0s7R9c+jzozOzc9P0hDVitiX3GnhOeZr6xLxB/hMAUsLsBXdHe8jqSk8L9U1+jwxQ9NID0zBS0lKGUnxSK5Jv0jlRYRCcT0tNxQwFCmwI6EekBghEpIL7QUFAnf+E/tw95L0rfIc8x70sfPQ8jTxQPFM8/z1evmG+7r74Pul+xT84/0B/8v/+v9I/jj8rft5/If+ewCTAPMA9AEuA7MEugWWBRgFqQOuARkAVf/Q/2EB/wLuAoD/xvkn9RP09vbf+ar5rfZ58h3v4u2T7tvv+fAP8WTvU+3j7LfulPEK9Kf1bPbf9ob23fV/9rz37vjq+R/6y/qC+x76qffc9F7y6vAT7qrp0uWO41PjkeQO5WXkceKY4Mvfj95P3VfcxtyB3wjjMefF6+Xwv/aW/JYBLAYNC0cQBxYTHEgjwCsdNI86pT5fQYFEvUZ+Rw9H40QdQjBArT2MOsU1Hy9LJ08ffBcfENIJFwRA/5/6NvYr8iru7ert6Rnq6Ooq60brYeu67G3uxO948YjzafVo94n4o/ds9uj0O/Qs9GrzGvHM7UTraeoD6+/r1+t+67/rAux07JPs5+su7KXtCfE59R74+PkX/Ov+fQLFBWEHFQlFC8ANJhAKElESXxElEDUPHw/PDjwNHAqmB8EFmwO2AAP8pvew9GnyDfFp737tTuzr69vsLe5G7nLtpOzo7TnxWvSc9075Qfr4+yT+2AAZBEAHSgpIDK0NPw81ETcTWBTXFO0UjRSTEm0QLw40DcwM9gotCNcDTP9f+9v3CvT58I7ubuxL6pbmCuOi4Enf696O3vXeTOLp5tPsD/KU9r37MwFZB8UN8BQfHAAjCSmtLnAzpTfBO6g9iz2vPAE73TlrNyo0UjFNLh8rpyY0IVEbmBXIDg0I2QL9/mz62vVf8Untp+oV6D/l3+OI42rjROQp5TfmseYz587lkuMc4vjg8d8H32LdMtz83IPeW9/U3vTbStil1h3XpNix2MbX/tVh1UbYFNyE3tjfueAR48jmP+um79vyL/a1+vD+7AJdBasGFAhKCYILWA4QEU8TrhRPFQUVBhVVFT0VDxbeFEsSGxE1Dw4N0QtSCwIMGAw+DE4NLQ//EY8U2xdJG0wdFiBSI28n0itQLsYuyDCxNL04UT3iPzlBskGuQDRAKkDNPi48YjjHM08vzCrfJVIgXRlTEGEGa/tD8FHmmdz90+3LYcR0vEOzHKqpoiOegpucmcuY1ZmnnVWjxKhIr9y2vL/MypnWHOI27gb6owTNDXAUiRitG4Yd5x4hIf0hGCIpIS4fBxzFGEUVjhJnEYIP6g3cDI8LSArsCkkNghCUE4ETPhEuDekHmAKD/rz7jfmv+PP4o/r++1n86fv7+kT6bfjt9Cbyi/D28FzzZfXf9yX6OPvJ/If9bvzm+/z6Mfv6/U0BcAQLB4IIPQqIDOIMXwwgC9IIFQamA58BowAqAGf/Yv8cANcAvABx/rH8UP0j/7AC0wVkCKQKPQyrDUgPihDaD+MOvA3NDHcNFw5NDloOoQ3eCx4K6weUBXsC5f0F+Rr17PEA73Dsdeky5rPiVd5e2CjRyMuOyLHHDci6x3jI5sr5ztDTTNi43MninerD8zj+xAgTFNQfKivrNvhBh0tkVFRcmWKpZjpn72XcYyNh9F3CWWlTIEu5QY43TC3gIrwXBAxlACn1geqZ4BzY+tA6zMrJO8kwyc7ILshjyRvMhc+e0zTXUdoH3ZjfuuJi5zTs7e8+8h/0HvUi9Qf1X/Vt9SP29vWZ82bxP/BO8M7wlu9a7TnrnukB6c7nV+bT5dXl/+bZ53HnwuUE5HzkZeeF7J7wQfLe8gn0H/YP+Wb82/+7ArgESgauCMMMPRB8EogTrhLBEkcUahf2G6YftSHHIiMjxiNJJdMm1yfyJ3QnPyeuJ84nwCcfJ7glECRqIY8dLRk/Fh8VMxWSFN0RzQ5HDCAKVQf5Ax4A4/z4+mD5Cfin9uD0zfGs7hHt4+za7czt4uv06WToxudE58/lF+Wu5BvlpOYV6UrsFPHQ9f/5lf6ZAk4GUggaCvkMihCjFA0XUxdmF80WLhbBFQUUwBB8DPsHPAR+AI781vjl9K/w0etR5i3hCt0z2gXZUdi42A3ZX9kO2u/aCdzk3QThjuV862DwgvRT9xL6c/6xAxsIcgsuDeMOURFvEsYRwg+kDAMKrwfJBAEC2P7C+9f35PS+8n3wDu9S7YXrwerA6eXpGes37cjvDfL69E74cfxkAasFSAlqDI4PwBOdGDMdsCAoI4wkSybBKAIreyxjLP8qBinDJggmaybYJ0oqECyMLEUsACtMKs0qdCzSLUot/imQJZ8irCCKHiUbzBZ7Eg0PQgvlBt0Bffx295/yOu1i5/bghdu515TU6NF1zqjKjMd+xbzD78Iww3XE6sWox4vKlM4F0xrWZth32ibdOuH15Grn2uiU6S7ryO1G8Afy1vJe86D0L/WA9MLyHPGZ8IfwX/GC8onyiPHY7wzu4+1a7rHupO9O8QH0wveA/KYBewe/DFgRwxWWGhEgliWpKuku+DJENkQ4iDm6Okc8Zz44QPdAckD+Ppw8vjkiNwU0pDAALVIoMCN8HZEXBRJ7DDgH+AAQ+QXwdeaJ3ojYqNNZz0rLzscDxsjF+cUDx4LIXcuUzxTVfdub4u7pN/Er+WsAYAbyCv8NERDIEY8SeBODFOcVLRe2F7cWlxSlErgQZw/VDfMLFgqiCFMHBwbrBEYETgNDAdz+6PvO+Iv2APWn9Ez1p/UM9WXzgPAX7m/sdeqk6PrmPuaQ5x/q3uzT7lzveu757YDuwe/w8cbzMPYN+U/9XwKEB9ILWw7OD9AQ5hGxEdYQ0A9oDxMQRRHGEhUUkhQaFbkV6xVNFlUWBhdxGIIaPR5OI7soOy1LLzswaDEtMukyhTKbMHYtFSkSJLoeLBjFEBsJKgF7+OfuMOT+2GvPA8fRv465GbNrraKoOKVmpAOlo6ZpqD+qIq2UsQ637byOwwPLQ9Mm3FjkxOsJ82f6HQJjCSwP2RPEF5kbPB93Ii4lxyeWKVAqsCr0KlIrliufKnEolyU2IoIe9Br1F/QUqRGFDaEJjgYKBQME1AIaAXz/rv5Y/u7+mwClAgEFAAfvCCULqA1VECwTjxUBF74YLRpmG8kcCh2gHIYcsBytHUIeKh7SHUIdJBxkGtMXIxUOEnwOaQrHBeYAO/zq977zs+/g6+XoIeaV40Thgt993vTdWtxy2ijZ/9g+2u3bUt2j3lTgduJo5dfnPOpE7cjwivT799/57voh/CX+7QA7A5cEzwRbBOoDhwMQA3QCdAF4AT4CIAK+AJv+b/zf+r35b/mD+ZD5qfmb+Vf5/PhJ+HX3Vvix+Tz7KfwQ/FL8Rv1w/vv+gv6J/RT9svwH/QP9GPwb++v5oPi79zr3jfcf+cf66PsQ/bX+iwGUBSUJ+wtWDqoQ4hNaFwcbpB4KIYQioyJPITMg+R/WH9sfRx73GnEXhBMoEMkMKgkOBsUDUgJDAOL9kPvX+d356vlP+qz6R/oW+8L8dwBcBSkKxg4rEmwV/hghHBMekx9kIOQgECGGH+wc5BmPFjkTqA+tCxkHjAE4+0j0NO345cPdqtUszvDHOMJauxO1w7GQsbiyfbOxs2u0Jbf2u3nB5cbjzevWQOBW6rzzt/o/AXsGrQpPEGwVDRo/Hikh2iOZJOIiZB8/GzgYchXZEpMQyw2ZCEkCdfyh9wT1EPOt8dXxrvHg8PTvGO+w8DvzOvX+9sv4Rvt6/yUFkgodD2QSkhT4Fd0YrxyJIGElUimELJ4vCDKdM5kyXDH4L1MtvyuDKu4ouSZZIikdchoZGekXZRV5EEAMvQh/BTwD7QECA9kDdwMBAtP+fvvn+Fj3X/Wf8lfuHerC5q3jg+Bp3sbdc9zs2lTXNNRf05HUwtal2KbYgtde11/Zp95Q5Urq/+x37gjxZ/Xr+YT8r/1j/6cAhAF+A00GNQozD6sRFROvEw8TMxLgEdEQ9BBBEjUU1RfXG1se6x0+HLYYfhXbFOcWoBkDG04a0hixF9kVABLGDJwGhQD+/CP6FvbN8N7q/uUs5IHkBuQm4WLbJtWD06TVH9lB3a7fWOFH4kbhiuAi4UHiUuSR5v/m2+e16JXqk+4p85T2efgh+XL4ivnI/B0CjwiADi8ShhThFuYarh9FI0kkQiMCIn8i1STQJjkovieCJb0i6x9YHHUYPhTyEAkPOw5sDKcJKgZjAcX+if63/2AB+v8L/Zb8gv7fAZMETAWjBpcIRQrsCgcLSgtyCyILCwqACQkJTAghB0cFGgJS/478T/qd+En2bvPo72PsJep+6W7q1uoJ6jLpueg06V3pkegg6KroiOsL8EvzxPTx9LL1//Yh+c/65/st/RT/ggE9AxIE6wPhAxQETgUlB9cIoAnZCJoIhgpmDvURFhQkFfkVQBffF7gWqRRYEi0RfRDpDh0M8wfzA/AA2f1J+S3zT+yR5hHieN6p2gLXutTs0yLTqdHLz2DO9M1Mzp/Oe8+b0DTTgte+3GLjUOrI8M72l/xCAvkHdQ0RFLYbTCNfKpgwsDVuOiA90T2vPcU8zzuiOr843zX1MUMtVigZI54eWhslGDUVIhIsDqcKCQe7AyoBnf41/ZT7Z/pn+hL7A/wE/KT6Afpl+lD60Pl2+Pv2m/a79rD2X/bH9Yv1qfXN9fD1/fTH89/zI/Ti9O70DvQN85/ydvOK9CP1LvQd8qPv9+y96uzom+dT55fmk+V65A/kK+XK5hDpw+o57CTun+/28e70evgo/CD/2AI5BrwJ0gx8DsoPdxD9EBwSzhKMEiIRpg4nCz0IwAWDAtb+ivv6+H72SvNG75rrNekC6IznVegG6gDtD/Fz9WX67/5cAzwHKAp1DS0RehWVGrIffCRNKlwv1TMPOIU63DvmO6A6bTl0OEw3LTUmMSss5SbZIPAbfRebEg4N4wRK/NT1H/Fn7tLs8eqK6XbpG+pf6y7tku7i77jxE/Sb9sz4Jft5/cL+6v7Q/cf7MvnD9uHzVfBt65nlUN+f2ETSQ8wixuO/MbqJtT+yKLGdsPuw07PTt/m8ocLcyDfQTdms4ejoMPBV95X/Ewj8D5EWUhtyH5EjzSdaKxMtyi2/Lb8sfiqBJ2QjVh6GGRoVXRGCDiIMAgk+BfMAOv26+sz44vcm99L2Dfg5+kr9rgAWAygFJQdSCZkMkg+kEv8U+RdqHUwj0im0MG83Lj3AQftEQkiRS3xNr0yDSvtH3UQ3QcU7MDVlLZIkRxqED28E1PqL88PtFOgp4XnZa9E3y8TGDMSzwdu+Trz1upu7yL0wwCDCw8MJxUPGb8eHyGfJYcpfzQjS0tZ62zbflONf6Grt4vNU+9QA3AQ8CO8KRw99E64WthmBHIof4SL3JXQo0Sq9LOUtny7LLv0tuiz0KnQoSSV1IEEadRPpCzgEJP2/9QnvM+kE5MvftdxC2u7XONaO1bXVIddU2MTZzdxn4NzkMOqO78D0zPkB/v8CpQfgCm0Nxw/bEu4V2RciGekYVhc1FV4S3Q/uDZ0LQAkAB9cE2AI4AWP/qvyN+QD2gPJd8MDvN/CJ8SLyDfPr80v0B/Xm9IL1YviB++X+NAKzBREKYA9aFI8YaxydIHYldSqjLoUxnDM7NSc31zbVNJIxZS6gK5AoqiRCH6wZHhRBDq8JYAbkAtz+gvnY9MnxNu+E7MTpZeYT5E/ih+A93pvbq9hj1qvUN9JDz6XMhcpdyVjJlskuy/nMgs8L0s3T3NXs1wHaw93v4V/mN+pE7dvxyPc1/h0EjgihC20OpRFsFW0ZJhxcHicgwSHfIhQigCB/H5weDB2MGrcWJhMqEPcNAQ0HDIYK5AiYBm8EcwO7AvACwQOUBHQFewYuBzgI8An6CrcLcAwADdQMJg11DZINSg2rC1gJHAevBEACDwAF/c75dfbM8obwHPAv8aDy8/K48nLySPPS9I324fhx+y7/wQP3CBUOvxKRFjoZAhzhHeUeeB+2H2ggjiFCIl0hvx6XG4oXThJDDHsFJf8E+WXys+v/5Sni3d+c3dfaMdjy1ZPTYtEZz0TNHcyZyzzM2M3gz03RBtIV02fVUtkQ3kfiBeZV6dfsPfKX+AT/QwXZCr4QdBZ9HLMheiWOKLwq/SwBLxkwFDBdL14u6y0SListVivsKKolDiKCHqoa7xY4E40PGg2FC4UKRQm/Bz4HzAbVBuIGpgVKBHcDLgNmA3MDEAMRA8MCBAJeAFz9h/nY9bPy7+9l7PPoTuZa5K7iGeCr2+zW5tKtzyDO38wyzOvLn8viy7fNntBQ1JTYydy44Brloen47eXytvie/tEDeQjGDOUQShSqFnEX8BdPGYEa7xp6GoYYEBYwFAkSiw/zDFsICQP+/qH7sPn39272wvUz9kv39feZ+cb7Of8TBNQJDBCqFeQalx8KJBIoWyoLLPkt5i/GMT8y/TBULo4qgCb6I7Ui3yEiILUawxIEC10FdwNZAwcC+P5L+oz1AvL874jv5+5s7f3qxOiK5/TmnuZG5hnmaeYb5z3ooulX627tI+/R8P3yvvTi9Xz2LPjj+tv9UADFAW0CYgPXBPEG8AggChkLgwtMC3UJbAayAuD/9f3p/C370/it9cDx5e4R7NDoKeYI5OrjIOVI5dDkmeNZ42bkxuX/5rbnD+nE64LvFPPN9cz3qvm3+hX7UPvV+6D9CACMAwMHqwnmCisLLgufCxkMxAvlC1EM0g1aEPoS2RSGFXoUcRLQD7MNRQ0cDb4M+gtkCvoImAcaBbYBh/7u/M38+Py//B/8ePzX/q4BYAS0BTYHYgn3C2MP5BLnFqYacB3EHqofXiDzIOggjSBkH1UdcRvnGUMYaRUAEqUPuA6fDS4LRQcrBDMCUABR/oz7CPk090v1AvNR8KftKOtK6UPnZ+Va5Kviy+HV4CzfVt533mTfPeFA4oriWuN+5GfmVehO6arpX+om69LsXO9P8nP1n/hM++b8PP2S/DT8BP2S/m8AcQH7AM3/O/7t/B78gvvb+oL52vcA9tPzJPI08cPxg/NW9MfzHfID8dbyBvbM+WL9KgDaAzUIhAwEEeYUjBi8HEQg/iJ4JbEn7CqhLhwx2DFDMAEuGyzIKqspQScGI4kdFxhdFDwS7Q9RDIsHdwLF/dr5U/aB82PxpO8f7lfs+Or16YfpMenv6KPoIOli6nDs/e5W8l72BPos/fz+zgAvA6kGwgmgC7sMjg6eDzcQ3w9RDrwMNwsCCV8GhQOD/5z79ffm9FnzkPLB8Druous56V3n+eUs5WLlqOYb6KnofOm36r/shO+z8QXzf/SX9Yn3OPrV/GH/PAEFA78EzQb+B40IzwhOCb8KCgyfDIIMDQw+DDUM/QsLCxoJcwejBVME3gPkAqgBYgCi/rD89PrG+e74svgS+AH2lvOU8bDwbvHz8SHyi/HV8B/xn/LY9Ib31vn9+7j/iwSLCc0N2RAIEwcVrxcxG24eJyAiIBoghyC/IYYiGCKmIK4eGh3GG7cZwBYoE74OJQotBmwCa/6o+T30ZO/w6hzoMeb+42XhxN4k3Zjdc98R4W3iA+Nm4yHkyORv5Rbmt+V95Qvmw+Zg5/jmiOXT5MzkIuVO5kfnpedS6NfoDekV6q/sE/LX+CMAOQd+DQMT2BiNH4ImCi3dMjU4hD0jQ0VIwEsPTr1PalHZUj5Sh0+WSjlEej3rN0kzyS1mJ2UgRxm8EhoMYgQl/Un29+4O6CnhfNui2JrWedNA0NLN78zBzSzOLM6bzqbOc8+x0VDVctkO3ZnfGOHy4vbktOVX5j7oNetr7RLue+657xrzOfca+17+oADzAGcAIgDM/xX+5vre9yr17vM99JHzjvHp7sLrm+pz6+zsH++Z8DfxDfIC9Vv5TP/KBZYLkBHNF/cc2yByI8QkGyYAJpUmuyazJ5Yn8iXxI38ghhymF/AR2AxgCSYHxgN1AM78CPhM9mD2lvYP9YvzUfOd9EP3b/gS+iz90P/pAbADWgXCB70JHwv/DLUNXQ6WDgAP7xB1ESkRuA9HDKcIcQXmAl8CmgJmAtgB/P9q/en6HPk89y72kPXJ9DD0B/JC8JzwWvKT9cL4kPn3+Sz8Z/+gAyQG9AXaBPsEWAa0BWEE1wJuAeMBsQEH///66/RF7ynsbulT6EnnwuYm6bDrIO0y71/wF/F183f1ePnL/vkCcgipDhAUdRnpHFgdzhzgGg8ZXhjcF0AYgRiXF2gT8gsqAyD64PGE63nmlOI732bbbNgX1XfSMtF10I7QmtBE0MDRH9a33N7kTO2z9VH8rgCABDMJFBBCF7QckSDcI+wn1yw8MdY0xzdSObk5WDjZNBgwJitBKHknmyXKIdMbQxTpDQUINgFd+r/zfu006YLltuIy4fngHeLz4nPj1+LQ42fnOewi8T71fPiv+2P/uAK9BqYLMQ/fEJgQcxDdEWQUxRVzFeQUABNAETgP7QuBCgQKqAhsBuUB8/wV+Yn3yvaf9eTzg/AM7Qjr0ukd6SXn5eJl3ivb09rs2pra8Njx1rPWuthD26XcDt7j35fi9+V86RztIPG79e36Kf9cA9gHkAvsDnsRmRMjFZ4VoBWBFmkYEBovGn0ZYRjrF6gYlRjJGIkXvBWGFOoS7RCWD1UOuw1UDVQMtQo4CXEIQQhWBygF+AJUArUDUAXLBkwHxgcyCCoJ+AnCCgwMzAxLDHsLuwrBCRMIawaGBToFywTvAxIDMQKtAP/+Yf6x/wsCBgPmAeb/if/LAKICwAPSA3MCLQETAXcCTgWXB/IHgAhtCiANyA8MEG4Otgy4C3ELJwtMCYwGqwNaALT8dfnj9Z3xnOxI54PiWt0714zQWct+xxzEUcH3vne9o7y2u8S6f7qQu/W9BcJ8xjTKg81C0PHSedbl2sne6OKh5+nr8O839CL4b/z/AKoFfgsIErcYHR+YIxUnCyuYLxc1VDrNPZo/P0GYQ1pGMEnJSuhKn0r/SXFJTkiCRS5BODxeN/8yBi7yKFYjcB1oGKIU9hBeDcYJNAasAxQBYv+R/kH+j/69/or/7wD3AbwCjwLSAakAFv/K/ND5GfaR8EfqEOSC3gXaB9YH0pbONsuByC3HpcVTxI7DWcQuxpbIdMv7zVPRddVu2t/fF+Xd6mTwh/bQ/HcCIwfuCjMOCRJ9Ft0ZCRxUHAIbIRpvGlQaTRptGEUVaBPCEv4SYxOfEtQQlQ2kCrwIwQZYBWwEpANzA+YCHQHX/9D+j/7u/TP8MPmg9YXyovBZ7xTu9OyL66HrKu1876/xJ/R09rb5jv0LAe4FgAspEsMYiR3oIXklwijtK0wtjizjKQEmpiHdHLIW6g+zCgsGzgGb/aL4KfR67+rpw+R74OXcuNlL1mHT9tGc0cvR4dJh1OjVQteP2bbc+OCZ5Tjq5O/Z9QH8aQJ0CHQOWhQvGfcc/x+HIpklPykALHct/C3ALT8tEyxkKdElqiFXHR4Z8RPJDQMINAIT/S74TPPP73Ltw+sO6mjoq+a35KLjTeR35hrqzO268PLyw/OK9D71GvY69xr4zvnC+9H9ff+kAKIBxgIqA0MD5AKFAtsDewZNChwO+xBrEgsS9w/6DZMMVgyBDCYM7AuOC6ELIwwYDFML8gkjB1QEFQH4/Qn8C/tY+6j7FftJ+hH5gPYI893uheuV6W3nS+XC43viIuJM4tbizOOC5X3naOk+67rsIu7J8Ar1Efr7/w8FiwkODp0S+RfaHdQjFSpFLy00RjgCOyU9ez00PN04MDQiLn0n7yCcGXERLAkiAKr2Oe374n7ZatH1ynrGU8Ndwe7AocG3w/jGI8oNzlTSV9ed3IviUul08Ab4Kv8+Bs0MFxPBGEIcox48IEYhSCF4H+ccjBk+FjMTcg8lC58GRAKP/sD6zfbf8gjwC+777AXsrerD6cjpyOqs7HLuyu+y8Onx2vOM9uP5Mv2nAZgGtQtNEQwW4RkrHh4iEiZIKbQq5Sq+KicrVCwcLZQsCivxKOwmtyQsIhkftxs/GNYVwBR0EwgR5Q2XChUItgbYBeUD4wB5/TL66/dT9j/0rvHd72vti+s16SzmgePX4Nfeld1X3CHbV9ry2djZNton2vrY4dYe1H/RX89kzR3Lz8gJx37GUcdFyTvLVcxJzafOztBR1P3XF9ye4BvluenO7kjzfvjv/okFVAvID5QTQBhcHb8hkyUGKaUthDIaN3Q6pT3UQMRDDUYcR55H30cORzhF4UNwQqlA2z0ZOkM2BTL/LJ8mYx8RGUUUYxAaDN4GcQEc/Vv61vdz9W7yd+7t6XDl1+Kq4UPi4uMp5cvlUOax5gboLuvY7b7v5fAq8gn1GPgR+iz7Y/t5+zz8Lf3A/T395/yM/af/fwL2AyUEogTLBKEEhAQhBNIEEwb9BkwGOwRHAhkBDAAd/4b9wfvo+ff4Gvnw+KH2f/If78Ds8+zM7cTsn+sW62Dq/epk66HrBO3l7Sbty+s66xzrUOv460LsE+347vzvOu887OjncuXQ5Yrn8ei+6F/nmea95/npFO3/7jPvhe5272zzefnB/+cD3gYIC7EPjRRUGfccCSGfJjMt1DM/OXk7sjxtPf09GD8ZP1k9BTojNnQyYi5QKjcmjiEKG3IScAkiA9X/gPyy+GP02+8t61bla+Aa3/Tf9+Br4BPeBN2a3VPg5+S76cjuVfE/9O34mP7yBfIL7xAOFboYYBzZIGIliCcEJvcgUhzsGRwZ2BhVF1gS0ApbAJH0TOot4ZLbPNpf2JbU39BQzDvJeckxyvfLIc6K0J/UHdqM4DHnIe3N8o35AP+0Ah0F7QY/CqMO1xCAEEQO6QscDNIMNAx+ChgIaAUWBPUD1wSrBeIFcgYfBygJsgqtC20NYQ8/ERsTixMcE9kTQhUnF70Yixc+FGoRMw/3DVsLWgeeAvL+//1V/g//8/4P/ED4rvRu8urxJPJ88g/zCvWT+B/8Df63/0EBGgN1AxoC+wDRAd8FGAvXDV4O6guvCAEHSQXEAsr/Hv6F/fz8Qfow9tbxu+/O76Tvwe7a67/naeP+3xveId3I3VXfo9874HDg9uC84qrjvuUX6ePrdO5073PwRPRv+bz/GwWcCBILRA3LD/QTohguHKAfHCP/Jkkr2C3tLeAtiC5BMKExdzGWMDswJDCoLuUrNygEI4od0BedEYgMSgiEBO0ALvxa9jPw6uq85r/ipN7l2tfXL9bg1brXBNuo3/nkxen47VLxqvRD+UH+TAOBCCoNDhHlE5IVqhY9GH4ZFBq+GbUX5BTKEYIOiAxxC24K4AdvBBoB8f2F+8X4dfY89ePz5PHJ7rrqYOa15FrkGOXo5V/lN+Wn5Z/mvueJ6Hboi+gt6evrve/488D2WPhj+aP5Y/rG+o/6z/nO+LT3m/f99rf1X/SM8uDvJu2B6kXoxOY/5aXkm+Uj6G3ri+0k7+HxQ/YJ++v+0gF5BJYIOg5uFbEckyKYJuApeC5MMxE3pTlKOs07fj4gQZtCs0F1Pmc77ji8NoQ0QTHVLOAm9CBoGx8WkRGZDPkFYv+O+Q71NfKT7wzs3+j+5Obgkt1O2gPZVtmU2TbandsL3gnhs+Iy42fk0+Y66cfqaOwb7wzySPQy9cr0GPVO9t32Lve89lP2s/bn9hf3E/cC97D3DPnD+p/7bfrA+Pv32vgs+z39a/7l/p/+D//g/8sAsQGWAWkBbQE+AisDPAMDAkUAzv4i/nz+8f6c//P+z/3x/XH/CwEjAWv/RP2J/Zr/bgKvBQIH4gZuBo0FZAUMBswGiAa6BYwFQwXaBCwEfwPDArYAwf0I+6L4Pfch97H3o/lx+1v93P7h/2wAjgAWAND+if6A/h7/CABqAGQAwQC9AZ0DsgTzA9ABegDp/wMAmgBHAYkDHgZBCBcJwQg5CEwIvQkJC+sLOg21DnURhhQ/FiQWtxUpFccV+xZnFzIWUBSVE7wUahhsHFwgPSP1JIYmiyibKlQs+iwsLdYtBC4mLZ4qjyYAIu0cexaGDXkDq/kB8O/lZNuZ0IHFYrshsROomqAlmomV/JJPkk6TxZUxmTGdN6FzpTaq2a8mtoa8DMQwzInTItup42PstPUv/9cHexCCGN8fkCikMdI5WECOQ29FeEYFRc5CXEDePH44+jLnLJwm5CDoGjkW4xHWDIgI/ALQ/TH7hvg59kr0r/FZ8Ovwq/OE9+T7VwD7A3IGsgmMDXUSCxlCH+Ak6ynCLZswwTKGM0oyQS8rK7ol3B5/F64PagiSAd/6MvV28MTsRurH6Bbof+cF5mDkzONa5CHmVejQ6r/tHPF69Zj5zfzT/lL/Sv/y/mH9B/tH+Pv1ivOj8IXtKupy56DkYeEJ3zjdCNzY24TcF95Q4D7jyOYh62Xw6vVS/IQD0QpoEn8ZUx/JJIsoaCqdLM8ulTAPMe4vDy1LKcYkCR+8FzwQ3gihAUH5JfB75+bfstpx1hjT29Bez7nOq8+h0dHU/djp3NHhBuhV7jX1Y/wcBIgLJxIXGIEdDCN9KEstmjHONDs3/jitOs07iju6Opo53DeXNR0zYy98K0AnDiIKHewWaQ8sCHIBeft09fnuXuft38/YR9KSzRTKoMfoxWbE1sL9wnXE8cWByMbKO80A0N3RSdRY2J3c0OHw5kjrBe9v8DjxhvE78qLz5/Rp9vj31/jg+Tz7u/to/NX8Xf2p/kL/E//2/8kBWgWkCRsMsw3BDo8PcRAOESgR9BH5E2kWdRlMHGcfHyN8JvsoJyl+KJoohSmdKwQu6y9lMCkvOC6bLUktXisvJ1Qihh3fGB8VVBDhCt8FggFg/jj7HPho9AjxS+/D7u7uUe7A7FHs0uzM7qLwsfFX8ujyzPPy9H31r/XL9bj12PbN+JP7mvzd+3D6RPkB+YX5m/mG+Lr2yfSj80zzNPMz8tDw2O6A7HjqJOmr50DmkuS/4rngqd5X3I3aSNnl13HXMtfF147Y+dhM2TXa2ty64FPk0OZw6KnqBO5h8lL3Wf1qBPQKrA+IEo4VkxkgHvMh5yR1J/MpRiwwLngvfy84LhgrjCiLKK8prCo1KgcoMiafJbYlayUeJO4hIx/2HL0bSxuxG04brBkIGJsVeROoEUoPsgwXCYAFMQJJ/3b8efny9kL1UfP+73vrZObA4g/g3d2/3Avcztp32YPXl9bl12zYC9hN15jW+dcN2/zex+Ov6DLtw/K1+An+rgJlBmQKxw65EnYWzxmnHOAfJyE4IREhdiDPH6MeLhy+F44SdQ1mCWYG4AOmAVL/7fs/96jxZOxE6Qfomud65rnkZ+Mv4tnhc+Fg4cDhjuLB4/TkhuYE6BjqIuyC7czuIfHj9CX5vvxnALcDfwfbCsgN3g+9EJERXhKDE0MU7BNbEnwQiw4CDeoK4gdJBH0AYfzw+FD23/S19HT0VfOU8Znwp/Co8fTyVfTW9qr6pP+UBDkJsA11Eg4Y2B0XI1InPytGL8UzHzeIOMU3lTWfM0Qy1i+ULDMoACQeIQ8fnxxNGM4SFg3GBzADFP9U+lz2LvOX8HrulusW6KjkyuEC38nbNdjw1BLT/9JU1PbVj9er2MDZK9ux3GjeTuBQ4j3lyOeP6n7tfe+E8czzB/Zk+Pj6x/1aATQF8Aj9C/4NtA6PD14RmxOGFXAWYxWlE8gR0g+ZDsAN5Qz8DO4M9As2Ct4HrwWQBNkEhAWoBR8EgQEu//T97v3//br87fpZ+JX1vfPT8kryO/JH8iry2/Eu8fzwDPL98/72w/mo+8H8Df1H/sv/+gGwA6UEbAXdBTUG3wUaBToEEQMGAU397PdY8hruxeuE6hjpJ+a14ifg89483xPg0OHg5Fzpve5M9AD6Rf/pBBALAhHsFhUciCErKM8v2TbLOyw9RTzHOko5UTgLN/00ojG8LHcn8yEvHOAWvRG1DDcHPQGp+5f3rPT58V/uQ+vC6ZDpCOrA6croMukB66TtzfAC8wH1jfcq+or8v/7BAM4CwQSIBW4FhgRTAyUCnwD0/n38tvkF9zr0hvH57hDs+OkH6cXocek56hvqLOkl6FTnzOdb6fnqLet06t/pT+rl64XtuO7d78/wPPHY8VXycfNY9ZH3MPq+/AP/+gFABeQIjAxVDyIRYBKyE9AUJRZ/Fy0Y/BdZF9IW9BUtFMURkQ53C80I0wY6Bc4DQQJSAE7+6/za/Jr9Wv5Z/mf+Mv5V/rf+g/8LAVACUAORAyYEWQViB8IJlgxMD2QRyRLdErESNBMaFJ8VCBeqF7oYTRlYGb8YthcuFv4TqhGnD1wN0Ar4B1cFcwPkAFP97vns9kj0MPK6787sbuqf6bHpNeoz6n3oIOY75CTjeuNW5P7kjeWH5Tzmd+fU6F7ptei0597mhuak5nPmJecB6EXoYulj6ljrH+w97JXsZ+6T8Sj1cfiW+/z+RQPsBokJ5AtiDnEROxQRFgsY2hndG6Adih72H50gXiCeH80d/RtaGusXURYPFXUTthJXEiUSOxHdD0IOLg0DDLoIpgU6A8AA7/98/h397fwj+xL5U/fz9ar1MvVw9Mn0ufWu9xr6v/tv/hsCTgWZCCwMxw6nESUUrRWqGIEbBh6wHx4gsB8kHvwb0hnPF8kUaRHtDUsKXQbzAUj9RPgC8iTro+Z247Hfz9q51UrSUNC/zvrL2cjvxvbFOsZHyBjLs83wzyjR8dBj0T/ST9Nk1VvXF9rh3v7kUurh7sbylfaC+sH+nwJqBQYJngxfETEWoxqjH+EjaSewKZApESggKDAodyj1KQ8sXi50Lzgv1C76Lpkujy3bKjkodifAKBQq3yrDKocoxyWpI8MgWh0nGAIQYAnUAxf/9fmW8xzs2OQD36Tau9dT1sfUztMe003TnNV31zXZoNta4WXpaPFj9tH51fzU/wEFBgpKDq8RIBNrFDEX5hmIG/wZ7xV/EvMPkw3TCeIEW/+s+ODxAeu85bfjX+PW4QXfqNov10XXJ9oL3rTicOdk7DHyn/cB/esB5QZ9DD0T5Bo8IkkpqS4UMdswPC+8LS4siitmKOwjwh5dF9UPVQjSAOT5bPMp7DPjYNifza3FHcJPwrPCbsL7wRrBwMK0xQzJ085r1EjZzN2P4Azk6Ok78Z35SAFtBZMH6gioCwAQlBM7FjEXcRgDHGQfXyEGIp8ilCT/JaokKyCrHBEdKCATIwMj/R4wGCUSDQ8ZDvwN+AsGCSkHcAaVBg0GiwRTApb/kv1H/FH8R/ya/Kb9lP9DAokDqwOeAwAF7wfzC0gPPRGcEtsTWBN/EeoOrg26DnQQsBDJDscK9wYVBPwANP3h+Nr0dfFz79fuVe/S79rvBe/I7THsw+rS6X/pkel06rbrUe1B7yDyvvSm9SL2W/bE9kX4/vpn/d7/TgFjAh0EsQVFB/YIBQn3BooEAgIgAi8DDwM6Ao8AFf4m/H76Gfkj+M32FPUn9P/zlvR69cT0OPP98Qry6vJN87nyZ/G98N/xP/M39GL0cfQr9A3zjPHO78TvN/G18pP0xPVK9yr5DPpD/JT/6QK0Bp8JTgxVELIUhhhTG+UcmB2kHXodrxwWHNsbUxtrG8UbXBytHHUcnhs8G00bkRsCHHUc0xwyHTAdihzHHN8cNB0zHrYdcBwZGvIWixT5EcwOwAsRCRMGagKQ/Sf4KvNn757rued05Obgdd603JvbfNpZ2PvVS9Oy0LrP8s5BzpjOr853zwXRidLq0yrUjdMh0w3U7NYz2p7d6eAC5HLnkOsY8ED0uPcy+nr83/+EA68H8Qt1ED4VAxmsGw8e9R41H+ceMB6DHo8feiDHH68eyhyXGiEYdhQ+EdAOsAseCYYHFgYsBQAFTwXkBZEGsAZABrsF0QX2BskHZQgDCSsJjAliCVkKKQz6DacPkhDCEbcTCBVJFjEY6BkLHI0eJSGtIu4jTCVkJ5cqFS3hLYot+yvKKbMmzyK4H4gcyBdXEr0MsQaFAPP4MvBc51Te0dUbzh3Gyb79uFC0MrHtrr+s7KpDqkSq36qFreSwYrREuNW7pL8/xc/LtNKA2Izc+9+I47HmTerM7rjzQPkt/7kFqgzlEpUXLhyJIMck7SfOKXYr4S1/MJ8yoDTZNaQ2tDazNAIx8iwTKeQlECLOHaUaSRjsFm8WBBb+FdcVqRSME+wRlw88DukNMQ/rEJQQzw0tCsMGMwPh/tf48/FR6zjm3+Ga3vPay9bR00bS/9Iz1SnYf9rV3UXjUuo68fr2+fvMAEEHWw0jEr8WzBobHoog9yGuImoiVyENIAsfXB62G9YW+BBQCnEDQfyf9CrsUeNm2z7VctH2zWHK7MjRyu3PZdaN3JjiUupC83D9jgjgEfQZfiEHKYkwrjdPPc9AlUOTRd1GDUhxR5FDVj0WNBoqpiCtFu8MzwJy997smuKA2RvSHsqswkq8C7d9sgivV62MrlmxKLYavZbEgM1f1n3fZum68pf75ALBCSsRkRm6IZUo4CzwLVIumy3qLNIsUCs+KJQj+h3dGOEU/RHeDwsOKwsdB4ABV/zN9yLzGfDH7Wvr8Okw5yHlc+Ra43Liy+IF5NfmCev07hLzNPeZ+6v+3wBEAn4DPwa+CSwNMhF7FPAWSRmwGlUc2R0tHnsc7Bi2FNYRORBRDnELmAZWAZn8ovjb9fLylu+56/bm0eI437XcXtvz2lDaA9kd2TvaLNww3kvfnd+r38vfEeFB47rmo+rb7hn08fphAuoI2A5CE3kXvxwIIu4nfizgL0EziDarOW87dDuYOmY5lDY2NB4yPDGXMXkx4C+2LoEtYyuIKBMl0yH7HgMc1xfyEqMOeAtPCfoGowIX/Tr27e/26k/nTuSz4M/ctdkX14fUJNIb0OzPodAk0RTRKdGn0oXVg9gw2jba2Nop3ebfReKU48jj+eMH5Q7mfOYU5l/lxuSw5N7kzuWh5ubmQ+hs6pvt1PBk8+T1kvj1+1D/nAFaA7QFUAnVDWkRPxTLFRYXWRlyG+wcOhyLGUEXyRaGF/gXZBfzFR0T6A85DQQLFQqfCXQJOwrgCtYLCQ3mDo0RsxP5FKQVCBaoFioX9BfKGPkZ6BvnHCcdWRzeGuYYsRZDFf8TthI1EScP1A3cDT8O/g5oDp4MmgoMCIcFZQOHAeb/VP/R/jz+9PxF+u72lfP38FLvqO1f6w/peubc5L3k8ORk5aPlKeXu5IblBueE6XXsk+/e8g72DfiK+UH6/Plh+ar4i/dY9tn0h/NV8irxdO9i7VvrC+kr56jlUOQR4y/i9eFr4lTiUOKU4qLjmuSh5czle+U/5jnnb+hP6tzryu0b8D3y0fSH9uL3AvoW/ecACwXvCHcNxhNyG1EjTSprL280yjkdP29E2ElOTzhUIFj2WWlZNVjAVu5T/U/nSgVG+EBSOloxHyfKHXQVyA57CIQBRPoJ8kTpk+Hu2nfUS84ayJ/Cor5pvJ676LtYvNG8fr0KvzHC08VryuDPFNYw3Qnk2enf7l7zUPj0/VoDOwhiDE4PPxFWEtgSlhOZFAMWXxejGHwYGBdCFQkTvhAtDm0LVwh3BSICMP+b/E/6Hfmc+LD4Jfi49S/yCfCj7+/wCvN89TL5ov3eAfwEMQdWCWQLEQ2IDnwPlg9OD/0OMw5YDeAKJAZnAWX9tvrc+Hn2BfPA7wDtB+u46TvoYed95uTlaeZr59Dphe2l8RX2zvmN/ND+cAFMBLkHQQuYDoMRDBONE2ATwxM1E/cQow29CdgGQQXvA4cCzf/e+5H4OPYw9ZL18vXO9Sv1HfQ59Jr10fc1+tH8ev/+Ab4DugS1BYEHPQrADeQQWRJvESEPPg0lDb4OHw+2De4KeQh2B/0HeQi0CFgIxwYrBTMEyAN7BGgFegZMB3kHTwdnBswFDAYvBn0G2AZUBvMFCgYZBuoFtAQDAqv/WP3W+/T6qvk8+Jz2dfX38/7x6O+i7YDsfOzr7KftQe4J70nwufG28pbzQPSF9Lv04PSJ9cz2l/hB+j37xftl+yr7fPtd+/P6g/o8++38nf5oAC8ClwSmB5wKfQ1cEAwTEhbcGacdziHpI+0joiOzI0kk6yN9Ibwd+BmSFh8T5w6YCtEFEwE8/M72ffER7C3nhOPe4KzevNym2w/cS92J3ofgJePS5qDqlu2u717x1vNE9yb7P/73/2sAegAOAPj/w//S/3j/Z/7m/OL6I/mj9xr2z/Ov8Ynv8u0G7bvskew+7MXr0+tT7L3sEO0y7cjuZPLq9mb7Kf8rAqsFaAmTDJIPERL4FMoYmxyjIMUk1Sf4KqUtXi/3MCQxJzAMLwoutS2VLTothSybKpUnEySHIGgc2hf2EqUNuQipBAcBV/6++0D50vbV85zwf+6c7XftEO1o62rpqegf6ffpFerp6OHm8+TO4zvjYePs48/kAeaw5+noIuq162rtQO9u8hr2GPr0/ZYAJgOKBVIHuwg/CbEIMAiEBmEDpv6g+I7za/AE743tq+pP51nkduKY4fng1OAW4GHfdeCm4p3lvOhL653tY/B681P2Q/mr/P4AYweyDa8T+xhjHagh3iUtKrQtrC9xMHgw9S86L5UtQyxHKtonPiXSIDIbkRSJDSIHZwGy+6L1ifC17LLpyOao41DgNt6P3uHfIOFk4Abf7d+x44Tq9/J/+8ED6wsLFA4cXCNvKRAukDFXNRA6Kz9xQ3BGW0cQR85FckOWQEs8DzdoMPknlx4+FBYLBgM9+ofyqupd4iba0NHNyavDb77oudW2qrUntk+4a7uQvb7A9MMAx6rIQcpYzC7PMtPm1WLXZ9in2KHYAdmv2D3YuNhA2vnb/9xQ3WveeuDG42joGO238ab13Pnb/mwENQrvEKwYMyFSKc8vCjQ5Nmo3ZjjKO4dA40N5REND+kBLP40+mTwaOfozJi2WJ4AkYyPfIiUhfB5HG3oWihGkDNcH0ASDAgMBcQAWAEf/XP1u+cz0yfCI75fwevGg8PHwD/Kn82b11/R/9Nb0tvVM9rf2kffA+ML5W/pF+2/7hfv6+nX5S/mp+QX65vms9xP1vfUB+O74A/kD93r1rPWN9VT2j/cU+JP5DPtd+0j7mfqF+cv6+P33/1oAef6P/AP8Vvxq/MH8Lv9vATMCpAGw/y3/xv9/ANAB1QGJ/0X82vi5+HL6YPxF/cr7Cfty+vX4w/eI9wD3uPXZ86PyO/Ke88b1SvYX9TryAfAE8kn2w/qy/Cj7Wvu6/UgAoALeA0MEbgRKA/QA8/+uACcCuQQcBi0GVwTtAF39zfpJ+sf61Pvk/P79IP/X/zMAfQAcAawBTQG9AWQDpQYoC2UOJxA2EHMO4gxJC7wKtQuaDSAPQw8MDzIPBRDOEUoTqxNjE2wSohExERERZRGxEvUTthMOEbMMmggnB68HSQi1ByAFVAL1/7L+6/3a/DX7Ovky91D1YPRR9Mn0p/Qr9NfzSPQC9Qb2RfY09Qr0JvM+84vzJvOQ8u3xYfLH8pXyxvFv8MnuYu1I7Mns4e4K8UbzMvXi9sL4F/ro+mn8q/07AKMC0gOjBJUEcATyBAQFUARbA9IBUQAM/2P+d/6l/sv+//5v/3z/G/+m/kb/qQALAgIDXAR+BgAJ9wvrDgkQCBD5D/MP/BAfEkwTCBS0FDcVqRWyFccUzhHzDnAMewplCYcHaAWiAwgCjwATAHv+5vwU/MD7FfzK+4z6MvnJ+G/5F/od+qH41vZy9dz0XvSo8tjvouwP6nPp/OoO7LPr0uoh6tLrVPC78xv1QPU39dD2dvnk+9n9rf+MAYoDYgVfBmsGPQbqBioImgoCDXoNBA2DDIwMDQ3eDD4LdwkFCPIF0gOAAVL+xPrN9qry5+4u67fnw+QL4iXgb9814KHifObR64Xyl/knAQwJMhHVGPkfiCYpLPQx5DdDPi5EHEg1SUBIiUXnQHU6QzNwKp8hHxiJDa0ClfeM7r7m6d4m1izMu8K4up60MbFaryOvMrGbtdS8Osaizh3Xt99w6Lnyg/1kB7AQyxnsIsAsuzUEPbpB8EPmQ71CNUEyP9Q8kDhTMxEufSipIaQYgg6UBDn7d/H+5pncJ9Nry33FmsBEvT27NrqCunq61bl3uR25/bkcu6K9EcHpxLbJwM0x0nbWY9rZ3mPkdOoy8Uz3w/wHAt0GTQx/EhYZFR9wJAQpxC10Mq822jqvPt5BoUOVRLRFvEcgSmZMaU4UUJRQFVBWTlRLl0crQy4+wjgANEovTyt9J3ojgyBfHiId8hpFFyITGw7ACJYEbwDS+8P2Y/De6LfhW9uQ1ZvQNcwJyCTDvb7Ju0i6X7ocuyO8/r0rwGDDdsezy6rP+9KI1brYrNvJ3YTfbuD04bbjxOOC4/Pi7eH04cPhZeG44e3hk+Hr4cbiqOVD6SfsTO+N8tn1Hvm9+/P90AB6BEIISAsYDtsQIRSSF/sZUxwbHoUfPCF2IvMjXSRiJFokGSMFImUgFB/8Hs0fkyAfIY8hUiJcI9sjdiOgIm0hkx9CHnIe3B55Hs8ceRkjFgsTVRHyEAQQVw1jCeIFiANBAvAAwP6k/NH62fjC9uf1r/Uv9pH29/X99Xv2yfYs9y/3Svaf9Xb1l/Zr92z34vaZ9Vv1uPWh9Qr26fUd9o73bfnu+vr7a/wf/C/9Tv5Q/lj+Uv4o/68AHwAc/6P+GP5p/db7svhh9r/0mPJG8Nrtu+vZ6d3oR+gm6CLop+ei53npCOyb7oLxK/Ra9wj70P0+AFgBjAESA0MFcAdUCHAHmwamBoYHTwg1CEIHGAafBPoCzQE6AUwB2wL0BFwGXQiGCp0M2w3SDfAMsgsvC1wK3gh7BpcDqACU/l39jvxu+yL6f/lN+ev55vp1/Kn9Lf6f/nn/TQFUA3wEyQRrBGsDzQG9/wz+y/11/if/tP5L/Ur75flb+ZH54fmj+n77aPyq/fX+tgDwAb4CYgPrA44ECwZgCOwLGxAJFHcWkRdnGIsZaRtEHcsdVxxvGeUWAhWbE8ESURBfDdMJkwVaAR799/jz9L7xfu4563znvOMS4CbddttK2UfWMNOb0PLO5c6Dz8vQ1dKn1fnZuN8K5ajod+uH7/T1ZP47BzwPyhYvHgEmKS4xNTk6tj2AP6dABUGzQL4/RT2cOdc1LTEvKzojRxkTD8oE1fqE8qvq8uLJ21jVC9Ahy5LG4cHOvpq9ir4iwXfEjsiSzQ3U5NrP4cXoR+8Y9v381gREDQMV5xs3Ifsl+SkQLGMswSsCKrwo8SZNJIIhUB5MGkYVYg9NCcUDff4b+g32P/Oi8f3vYO657NjrMOx27SDvWfHM8/T2cvtoADsFCgk5CmUJYQjjB7EH+wdPCJUHcwYgBRsE1gNOBFIEwQMBAoQAfwDCARgE9AUsB/UIPAqDC7sM6wyUDcUNRw2DDMQLjQqLCfMIVQiMB3UGeQTsAQ3/sfsL+Df0nvD27F3pOebO5FXk5uOO4tDgtt9S35rfDODJ4D7iCuTG5Wjnq+k/7SXxePRM9m73G/i7+Gj6V/wP/5UBmQIlA+UDsAXWCDsLugx/DX0NrA2FDZYNwg3iDQwOxg6bD0MQcxDED7YODg4kDcMLbApGCBEHMQd4CJIJCgnUBvcDxwGaAJ8AXwHvAloEdQRXAxcCTQIuBKYG1ghGCu4K5AuMDCYNgA3JDfYNCw4YDoENaAxhChQIcwboBe8EIwMdAUj/AP6t/Qf+O/9WAJ8AjADRAK0BYwMwBWAHnwnxC58NxA1LDZoMYQzLDbcPXBCdD+kNqwwtDLcKFghlBTUC///9/er7+Plq9z/1t/Mu8qjwzO4Y7V/rNOhb46zdqti+1U7UTtLDz9fMO8oFydbIWchcx6TGQsY6yM7Kq8zlzo3QetPx16PcseGd5Y7ptu1L8on3WPyRABoF9AlQD6IUaBgPHP8fuiPbJrYorSl8K7QttS8UMT8xejEGMm4ySjNQM1MzADPxMt8yrjH1Lzkuuiy2K4gq8ijxJhQleyM/IUMfSRxQGf4WnRQREosPsQtqB4EDqv+b/Wr8kfpe+Ib15vI78QrwR+7s6w7p0eXi46biPOFN3zvdDNuL2VDYSNfc1lvWTNVe1MHTMdT11ZfXTdm72+Xeo+LK5pXqCu4T8lb21fpq/5kCngXFCKoLXQ5qDw8Ptw7MDuAPYhElErsRqhBRDy4Ofg2pDKsLLAoWCAUGCgRjApYBnQC4//v+4f0N/Qf8pfoY+Wf3HvbU9R/2Gvc195T2nfZY96D4tvnW+mr88f5bAfIC0wOmBOMFdgggDKMPVhLUErsRTBB6Dx0PAg/nDkAOfQ1lC40ITgWDAgQBaADL/9P+rP2h/Kn8+Pyw/Xb+gP4Z/nb95vxH/V7+BAAzAUUBzgB8AP8AQgKfA1kEPAVVBuAGAwbYA5oB6ABUAeEAAf9O/JP6tPqL+1P75Ple+K/3Avjz+Gr6FfxU/Z/9Uf3t/MD8ofzG+3r6ofiQ9hz19/P787P0S/Xu9UP2qPaB9135G/zF/4ED6gaVCooOCRMxF/UanR0hH2cf9B7dHkkeQx0IG9MYABe3FAgRbgvpBPn+fPmL9M3v7uqW5nbiX9/G3Ajbp9lo2NnXy9cC2C/Zctys4V/nKO2J8tv3gP0iA1cJNA/qFLAZAh5mIucm1CoiLR8tmCsQKQAmJSKRHTAZQRX8EagOGQuPBQP+bfWW7grq+uY05Cvhld5q3FvbIdsv3JfdzN4m36zf7+Ez5jLsLvKC9rD6q/4AAdcBMgF/AMMAGgIAAzcD3wJdAjoAU/0j+y35wvjY+Sz6Hvpa+vX7YgBSBQsK7gy/DYgNbw4/EbAU/RcTGgca0hgVGD0XUhWzEvkPBw0sClcGsgIhAJj+mvwz+R72CvTG8r3wtu2f7OPtq+5X78LwS/GI8hT0K/X/9oD46/n9+4v+2QHjBKAHQwv7DYgQ3BPcFV4YyRv7HTIgZSJjJEMmVSd6J+Ym2CQiIj4gqx0mGWISiQpwA+v9fvkL9r7ySPCH7RDq7uas4rDe59o416LT69G7017X/dyW4tjmJevC7jrx5vM698r7swExCKQNaRLbFW4YChxaH+QhgyIHIXof7R30G1EacBenE5kOdglFBrYDrABw+5/zk+vz5G3gstyp2e3W7tRZ1CjSHs9BzkbPv9Ff0+jRE9Gx0vXVCdtN4HvkUOfC6EHssPAH9lH68vySABAFUAkHDAgO9Q46Ee8UtRfPGI8Ydhf+F8AYwRmdGasXQhamFvwYNxt3G4EZSxjoGU8dACBuIQIj/CPSJjwpZyn1KjgsPS4TMlA1yDVkNSczxDDFLz4vWC5TLmwskScJIsoaIBRPEJgMawhxArz58vGP66Tlld+F2VXTNc6/yXPFesJ3vya8Trnvt8u3ubiluqW7Mb54wefDiMX/xcXGssq00CrXyNwH4ALjZuaE67bx4fZN+iT9pQAlBU8Khg0AD3cQxRKEFREZmxspHJIbDRoOGuYbKh1EHV0bnxj5Fp8V8xNzEpsPkw3xDEEMrAw0C80I2AZCBXgFFwccCBkKOwuDC6gLWArDCBUHRge8CAgKJgpXCDwGYwUZBuMHdwkHCvsI5QZrBVAFTQZlB/MGkQWgA2MBof/5/Uf9Yv3J/On6FflT+M74rfoP/Az8Avya/On+AgIaBIIE4gOEAxMD1gJUA5IErwZhCI8ILwiVCLoJBAr+CAoHsQUkBT4EQAOcAicDUgR8BOMCO/6N+LP0HvOS9Dr2GvYY9HXxne5Q6xDo6eXC5S7mXuaI5Vrld+aP56voa+lz6T3pc+h16CbqGe2E8B/01vcn+9z9vP6v/tz+EgDiAAcB0QF3A8UF4ggSC5wMpQ83EzAWxxZVFcETZROsFJkWdxfMFxgX0RQqER0MSQflAyABD/4M+k713vKZ8pfyVvHN7obsfOs960/sNe8c84D3APuS/TgAqgKDBCoGzAewCbALpAz7DAEObw5RDjYOSA8UEbAQmQ72CycLnQyIDjMQNxENET4QRRDeEXoUWBa8FpEVjxMjElYRaxBgDvsKjQcoBPX/4Pte96vxtuwT6Enj1d9p3dnbo9p+2f/Z7dv13lbj2Oa76dDtB/MY+Uj/sATnCdkOwhJRF7obJx+tIWchkCCyIHMhciIeIdQe9Rz7GjkZVRZUEnYNdAcAAW/76/cN9mD04/Hr7inr6eaq4ijet9p82FbYHtsT3lXh6+T96GrtmO9B7zzui+337KDsKewb64PpLejE58rnieY35bDjJePB45/kguYQ6B/rmu8G9Xj7rQFtB78NlhRqHMMjdSkzLn0yMze5O3I+jT8SP948hDrbN8k0rTArKiAhsBieEAkIEP+y9CrpSd5f1ZPMzsQOvu24a7bAtBW0+rQet628LcT4zGTXyOCL6RTztf0cCrsXQSOkLEozXjgzPWNBOkV4SGBLcU18T6JPsE1NS2ZIpEQxPz44LTFqKk8kvR4jGOQQ6wlRAlP88fZG8KPqQ+VH4CXcvdiz1ajTatNL1T/Yatzb4AvlMOt48ZD2wvoQ/uEAKQN1BQ4GiQWVA7YAVf+Z/pL8PfcR8Dnop+A22abSocsHxf2+9bq9uFS2QbQFs0mzp7Mgtde3kbygw6vLJNSu3kXpK/QRAG8KuRPrGlYf9yMaKW8uBjKgNcs4nDvjPh4/LT5wPkg9DTxJO7c4mTWDM7AxHTD0LysuqSt9KHYktyEPINscChqWF28WfBecF0kXSBZTFOwSixGkDzIMTgiFBNkA3f2i+z753Pfn9lz16fS79Aj12fRz83rwvux26L/lQeTH4kDgANzI2APXSdY81SHUvNLP0PnPe9C/0UTUm9YO2GXajNzN3VffLOGi5HDpmOy47avtqO7m8Mj0ePj9+ur7h/uQ/Jz/igTMCJ4KPwynDlMSVxfrG7sfOyNPJhAoiimmKhYsli41MWMzATQ9M/EwXy6GK84nFySXHggYpRHyC68HBAQjAAv9avt9+t75afdW9HnyHPEa8aTyn/M09LD0X/We9+/5n/pP+nb6Ovz7/s4A1wEMAsUCjASeBfQF0gYJB8EG7gYHB9sGRgXsAST+lfu2+S/58vfs9ALyiu/Y7bLsYOuz6W/oVOjG6HToo+Y8453fA90A3E3bP9pq2APWLtTw0jPTldQR1rTX5NhQ283fT+X77HD1xv0wBjcOJhZeHewhPiREJt8pWy/BM6Y19DVLNR40OTIyLxIshCdrIVoaxxOPD6UMGwp+Ban/1Prw9bXxQ+7z6ifp+OiS6SLrDu0L7xfy3/SD9on3KfhZ+s7+MAOBBhQJ+ArZDLoOeBCJEUYSPhPdFAwXkBm1HFsftSF2JOEm4ShJKccnByfyJv8meCYWJJ4hxR5AGpkV5RH8DqoMaAmlA7H9ifjH9GjyAO8i7KTpyeeY5gzkheBG3MrY9NYI1SjSAs+8zHPLKcrwyOvINssjzmbPNs5zzLbMK8/30mrWW9m42/DeoOOE6Nbs3e/P8pT2JPst/3ECBgXmBz8MmBH0Fl0bHR9gIiAmhSkdK1krzCuCLL0tyi6+LWoq/yTRHr8Z1BXEEjUP/QrcBZ3/H/mT8X7qjOTp36bcYNq12HDYpdmr27zemOKq54rtBPOz+AT/ZgYGDzoW2xtLIHokZykULrExgTQVNnU1FzOgL+ErkyjdJSkjmR8YGq4TpAxlBZP+Ufi886DwJe6C7U/tGe5Z7yzwLfM19kX4Qflw+VD6ef1XASkFOAiUCZUJgQjvBjUG+QW4BFwD6gDO/Xn7ffkH+Eb3b/bK9ETxnewp6O7kY+PO4bffp90t2yTZW9cy1u7UyNI+0YrP086Iz5bQXtIl1YrX8NkX3Srh9uXn6h/w8fSy+pEBlwj3D9IWjBsnIFIkmiefKlktXzDlM8c25jiUOgk7gjrpOAI3LDUIM9UvZizsKdcnVCXvIKIbchWeDxMKaASe/wH8MPlM9mHzd+887N3oyeac5njmgeUJ5BnjvOMZ5IPkCuXi5CvlH+Xt5F/m9ejj7KvxePWA+Fv6Dvwj/sUAawMuBWQFdgXvBZwGAQgeCNIFbwJc/pL6FPgG9iD02PKU8HTuP+6L76PxDfRf9SD2f/f9+PX7SgAvBcEJ3g3sESIXdBzUIEIjvSSsJcwmKSjKKMsoeSjFJ00mpySFIWkdbRi+E0MQGQ6QC0gITAX2AvQB5gDz/r37ofeQ8wPx4e8V73Tubu0c7SPtuOzC67bqv+ki6pzqSOqe6d/oNukh6tPqlOq76eDosuin6SPraOyB7dntkO0K7a3r9emr6Q/qyOqF6o7p1ujb5zbnCuYH5JHivuFY4czhPuKc4sjjQuZ26Ofq+ez77bXwiPX2+yADzAgeDfwQwhXZG0ci/yg1L5o0gzmqPYdBpUXqSCZL7EsjSyRJgUYVQ/s/Jz1NObIzyywfJ4kjuCBpHAkW/Q4ZCvkH0AU3ATb6QvPr7unrE+ht43newNmQ1ozUqNN30/zSn9FQ0LfRFtUE2fTcGuE05nvs8vGY9hL7lv8TBFsIuAz/D5IRLxJSEhcTyxOHEgUQuAvVBrgCHgCv/n39vvqt9+j07/Id8UDuEOv46MLopOns6obrgOwc7iDwJPKH8zT0R/Uu9tP2DfYz9HrzW/Rq9rT3A/dl9OTw7+1T693pIenT6NzoEelE6n/s0O/l9Gf7eQHxBUEIEAqvC0YOChJoFssbgyGtJrYqKy5LMGIyQDQ8NMMz3jFrLpkrwifVI9wfJRukFjMR0gtcBgoBf/s09VDwi+xq6svq4+v27IvtfO297nbyEfem/KQASgSnCdAO0hQnGpMdWCL1JdcovSqMKdUnZSUkIjcfxxuDFX0OYAWf+hjx/uiD4fXaddQAzabFnr7OuFa1hbJfsU+yv7M2toy47brwvqzFYMy00mrYR9sV3urgp+QK6cjt+fP4+Ln8gQCCBSAMlxI1F78Z1ButHr0i5CZMLLswaDMCNVk0xTMvMlMxtzBmLf4oPSSjH7wcmxk5FgUT3xD5DsMMPwk1BVIDNAIOA0gDagHOAMUBGAScBh4GKAXiBDgEEgT3AwYFfQfGCjQLGAoyCOYDQgA8/Wj6PPj89iH1nPOv8NvrXumC6ajquutY6RjmWORU5MDkpOSt5nHqL+/W82f35Pgk+v/8JgDFA74FZwYtCYsL9w8gFv4X2Ba+FPwQfhHSE/4UchV9EwERlg5SDOwIGwaIBeQGdAdXBj0BgvuS+Kb1ifRu9eL1yPMw8J/sxuvK7XDtyOtN687ph+mb6qrr8+4M8b7vkO+o8GHyPPUT9ejzIPOa81T4D/1+//MB5AO0BB8E8AFcAnkEEAffCBYJaAcdB9wHMQd1B48IWAhvBhoEoAEvAYkC/AJsA2YF1QP1//D9mP3N/rv/+foA9QXz8/Nh90n5ZPn7+NP3nfZy9bT0zvY3+yIAXAWyByQH6gQiBF8FtwcpCs4KqgwLEJMT5RMuEuYOoQynC3AKwAoNCs8IeQllC1AN6Q6LC/IGhgZ1BzYHCwfhCJ8NDhPcFSsVsBScEwwQmw04DaUOLxEYEyMTKRJND20J8AHU/BT7h/vL+gf2ye49683tpfLN9Rfxe+fn35zev+K/5u7nuuZ75sLop+pG66jumPMJ+Rz6fPW98ZH0Q/t3AHECgv4m+yb6fPkZ+5L8Rv3+/Bz7qfzv/8ABggOvAu8DbwiUCyQNuAzrC+oM2Q9hFEUZGBv/GT0XLBT/E5UUpROIEXsNRwhnB3YHaAR0/7L47PMo8hDviOn64/7f292V3TjdZtq21zrVUdPs1P7XptjW2DjYFNrk3ubiKeep6lfrdut87W7xDfir/d0B5wVPCA4MTw8PEesTvhZKHIghdSQJKJsp6yqOLrYweDM1Nbc02TTmNXM3+zfTOE04ZjfYNvU0ujEoLr8oNyNKHOcUtQ44ChoIVwUHApr8PfZ58VPu3esf6gLo0OS04x7k8eR45rXmVucI6W/qvOpK6snpW+pZ6zfsLO1+7oHuGu6K7S7rRehj5NPfKd3822ran9l42ejZityC3vneheAe4ALgQeFW4qHmu+vj8TL4RvwnADwCqgLZAbUATwDlAKYBTgGG/yj+kf0Z/mz/twAZAgMEmgMVAIj8J/vQ/b0CuAdEDTQSGhe5Gm4cXB+eJEYqjS+oNFU5yz00QEZCkkQZRhJHAEZ5QhU+pznTNfsx0y69J9QgbBsdFdcP4AknAqL5c/AQ5w/gBNcHzHHCvblmtP2wx6yyqYGofqotr4e1S7wfw8fLl9Yi4ULsYvmlBvYTpSCOLJY4tEIrSS5LKknKRaJCwz4GOaExvCeWHp8VMwxPA0/3T+2G5DbdHNkC0nfKOMRDvrm6DbqyuqO6w7pguy+9zb/ewUHG+MzB08zZXeCI523ydP4zCYcVYh9ZJc8pCCxCLW0v8DDMMn0wiSyFKRYm7SMSINQZGxR3Dd4ErP5Z+Ejylu4A6hvmTOXx43bjreQV5qroy+409WD7UQF9BwgUpSF4LBMzezlCP2hEdkg5SvZNTFJhVd1UBlH+SShB0zYoLscjmhhfDAD/dfdY8Wnobt7c1BfO9cpnyMrF5MQixlnHa8p4zZnOQtAi0/fSCdK70frP9tB81JTWm9la3djf1OK+5A/nSet17Nnrzuzh6kzsxO8l8gj4cv2oAoQL+xOxG0cilSbdK4cxTDT3NiE74z2uQP1BZ0BOPnw5fTHfKc4hshkrEecGjvr+8DTt/+sx6UvlGd/82y3eFuDA4hvleeQp4cvg++RQ7v34fwC9BUAJWQugD5kW6B3pJN4mXCT6IgUipiH5HowXJxMRERUNcwvGCOUEtwNqAjMBCAC3/d34zvJl79vsPes86q/lI+as5tzitOCE2pXVPdIyzQLLVspqx4HGRsngy+rNU801y4XJLMtez3/SfdQb1oTYP96U59rsIPEO9hH6PAGvB74Ljg9ZFJQaaiI7KsMv1DJSM/8ztjdLOio7UDm1NgY6QkA2R3FLTEugTLdQelWaVSZPV0ldRURDVkHPOw83czRiL+QnGx8JFRANIgbA/eD2sPJU8O/tHev954nmc+ZJ5dfiPuDw2jLWbdXh1pPZedwK3XTa8tfL0s/Nycmfx8jFgcEFwAy/Vb0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==\" type=\"audio/x-wav\" />\n",
       "                    Your browser does not support the audio element.\n",
       "                </audio>\n",
       "              "
      ],
      "text/plain": [
       "<IPython.lib.display.Audio object>"
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "!tts --text 'It must be remembered, however, that most modern printing is done by machinery on soft paper' \\\n",
    "    --model_path $test_ckpt --config_path $test_config --out_path out3.wav\n",
    "\n",
    "import IPython\n",
    "\n",
    "IPython.display.Audio('out3.wav')"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "simple",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.14"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
