{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 请完成本文件内所有的TODO部分\n",
    "# 参考multihead attention相关源代码\n",
    "\n",
    "import math\n",
    "import torch\n",
    "from torch import nn\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "import collections, re\n",
    "import torch\n",
    "from torch import nn\n",
    "from torch.nn import functional as F\n",
    "import math\n",
    "def count_corpus(tokens):\n",
    "    \"\"\"统计词元的频率\"\"\"\n",
    "    if len(tokens) == 0 or isinstance(tokens[0], list):\n",
    "        tokens = [token for line in tokens for token in line]\n",
    "    return collections.Counter(tokens)\n",
    "\n",
    "class Vocab:\n",
    "    def __init__(self, tokens=None, min_freq=0, reserved_tokens=None):\n",
    "        if tokens is None:\n",
    "            tokens = []\n",
    "        if reserved_tokens is None:\n",
    "            reserved_tokens = []\n",
    "        counter = count_corpus(tokens)\n",
    "        self._token_freqs = sorted(counter.items(), key=lambda x: x[1],\n",
    "                                   reverse=True)\n",
    "        self.idx_to_token = ['<unk>'] + reserved_tokens\n",
    "        self.token_to_idx = {token: idx\n",
    "                             for idx, token in enumerate(self.idx_to_token)}\n",
    "        for token, freq in self._token_freqs:\n",
    "            if freq < min_freq:\n",
    "                break\n",
    "            if token not in self.token_to_idx:\n",
    "                self.idx_to_token.append(token)\n",
    "                self.token_to_idx[token] = len(self.idx_to_token) - 1\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.idx_to_token)\n",
    "\n",
    "    def __getitem__(self, tokens):\n",
    "        if not isinstance(tokens, (list, tuple)):\n",
    "            return self.token_to_idx.get(tokens, self.unk)\n",
    "        return [self.__getitem__(token) for token in tokens]\n",
    "\n",
    "    def to_tokens(self, indices):\n",
    "        if not isinstance(indices, (list, tuple)):\n",
    "            return self.idx_to_token[indices]\n",
    "        return [self.idx_to_token[index] for index in indices]\n",
    "\n",
    "    @property\n",
    "    def unk(self):  # 未知词元的索引为0\n",
    "        return 0\n",
    "\n",
    "    @property\n",
    "    def token_freqs(self):\n",
    "        return self._token_freqs\n",
    "\n",
    "\n",
    "def tokenize(lines): # char level\n",
    "    return [list(line) for line in lines]\n",
    "\n",
    "def read_time_machine():\n",
    "    with open('./data/timemachine.txt', 'r') as f:\n",
    "        lines = f.readlines()\n",
    "    return [re.sub('[^A-Za-z]+', ' ', line).strip().lower() for line in lines]\n",
    "\n",
    "def load_corpus_time_machine(max_tokens=-1): \n",
    "    \"\"\"返回时光机器数据集的词元索引列表和词表\"\"\"\n",
    "    lines = read_time_machine()\n",
    "    tokens = tokenize(lines)\n",
    "    vocab = Vocab(tokens)\n",
    "    # 因为时光机器数据集中的每个文本行不一定是一个句子或一个段落，\n",
    "    # 所以将所有文本行展平到一个列表中\n",
    "    corpus = [vocab[token] for line in tokens for token in line]\n",
    "    if max_tokens > 0:\n",
    "        corpus = corpus[:max_tokens]\n",
    "    return corpus, vocab\n",
    "\n",
    "corpus, vocab = load_corpus_time_machine()\n",
    "import torch, random\n",
    "def seq_data_iter_sequential(corpus, batch_size, num_steps):\n",
    "    \"\"\"使用顺序分区生成一个小批量子序列\"\"\"\n",
    "    # 从随机偏移量开始划分序列\n",
    "    offset = random.randint(0, num_steps)\n",
    "    num_tokens = ((len(corpus) - offset - 1) // batch_size) * batch_size\n",
    "    Xs = torch.tensor(corpus[offset: offset + num_tokens])\n",
    "    Ys = torch.tensor(corpus[offset + 1: offset + 1 + num_tokens])\n",
    "    Xs, Ys = Xs.reshape(batch_size, -1), Ys.reshape(batch_size, -1)\n",
    "    num_batches = Xs.shape[1] // num_steps\n",
    "    for i in range(0, num_steps * num_batches, num_steps):\n",
    "        X = Xs[:, i: i + num_steps]\n",
    "        Y = Ys[:, i: i + num_steps]\n",
    "        yield X, Y\n",
    "        \n",
    "class SeqDataLoader:  \n",
    "    \"\"\"加载序列数据的迭代器\"\"\"\n",
    "    def __init__(self, batch_size, num_steps, use_random_iter, max_tokens):\n",
    "        self.data_iter_fn = seq_data_iter_sequential\n",
    "        self.corpus, self.vocab = load_corpus_time_machine(max_tokens)\n",
    "        self.batch_size, self.num_steps = batch_size, num_steps\n",
    "\n",
    "    def __iter__(self):\n",
    "        return self.data_iter_fn(self.corpus, self.batch_size, self.num_steps)\n",
    "\n",
    "def load_data_time_machine(batch_size, num_steps,  \n",
    "                           use_random_iter=False, max_tokens=10000):\n",
    "    \"\"\"返回时光机器数据集的迭代器和词表\"\"\"\n",
    "    data_iter = SeqDataLoader(\n",
    "        batch_size, num_steps, use_random_iter, max_tokens)\n",
    "    return data_iter, data_iter.vocab\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [],
   "source": [
    "def sequence_mask(X, valid_len, value=0):\n",
    "    \"\"\"在序列中屏蔽不相关的项\"\"\"\n",
    "    maxlen = X.size(1)\n",
    "    mask = torch.arange((maxlen), dtype=torch.float32,\n",
    "                        device=X.device)[None, :] < valid_len[:, None]\n",
    "    X[~mask] = value\n",
    "    return X\n",
    "\n",
    "def masked_softmax(X, valid_lens):\n",
    "    \"\"\"通过在最后一个轴上掩蔽元素来执行softmax操作\"\"\"\n",
    "    if valid_lens is None:\n",
    "        return nn.functional.softmax(X, dim=-1)\n",
    "    else:\n",
    "        shape = X.shape\n",
    "        if valid_lens.dim() == 1:\n",
    "            valid_lens = torch.repeat_interleave(valid_lens, shape[1])\n",
    "        else:\n",
    "            valid_lens = valid_lens.reshape(-1)\n",
    "        X = sequence_mask(X.reshape(-1, shape[-1]), valid_lens, value=-1e6)\n",
    "        return nn.functional.softmax(X.reshape(shape), dim=-1)\n",
    "\n",
    "class DotProductAttention(nn.Module):\n",
    "    def __init__(self, dropout, **kwargs):\n",
    "        super(DotProductAttention, self).__init__(**kwargs)\n",
    "        self.dropout = nn.Dropout(dropout)\n",
    "\n",
    "    def forward(self, queries, keys, values, valid_lens=None):\n",
    "        d = queries.shape[-1]\n",
    "        # 计算注意力分数，将结果除以 sqrt(d) 来标准化\n",
    "        # hint: 使用 torch.bmm 或者 @运算 来进行批量矩阵乘法\n",
    "        # TODO\n",
    "        scores = torch.bmm(queries, keys.transpose(1, 2)) / math.sqrt(d)\n",
    "        # 掩蔽无效位置，对剩下位置计算softmax归一化后的注意力分数\n",
    "        # TODO   \n",
    "        attn_weights = masked_softmax(scores, valid_lens)\n",
    "        # 把注意力分数和对应的values相乘\n",
    "        # TODO\n",
    "        result = torch.bmm(attn_weights, values)\n",
    "        return result\n",
    "        \n",
    "class MultiHeadAttention(nn.Module):\n",
    "    \"\"\"多头注意力\"\"\"\n",
    "    def __init__(self, key_size, query_size, value_size, num_hiddens,\n",
    "                 num_heads, dropout, bias=False, **kwargs):\n",
    "        super(MultiHeadAttention, self).__init__(**kwargs)\n",
    "        self.num_heads = num_heads\n",
    "        self.attention = DotProductAttention(dropout)\n",
    "        self.W_q = nn.Linear(query_size, num_hiddens, bias=bias)\n",
    "        self.W_k = nn.Linear(key_size, num_hiddens, bias=bias)\n",
    "        self.W_v = nn.Linear(value_size, num_hiddens, bias=bias)\n",
    "        self.W_o = nn.Linear(num_hiddens, num_hiddens, bias=bias)\n",
    "\n",
    "\n",
    "    #@save\n",
    "    def forward(self, queries, keys, values, valid_lens):\n",
    "        # queries，keys，values一开始的形状: (batch_size，L，num_hiddens)\n",
    "        \n",
    "        # 将q, k, v分别通过对应W_q, W_k, W_v\n",
    "        # TODO\n",
    "        queries = self.W_q(queries)\n",
    "        keys = self.W_k(keys)\n",
    "        values = self.W_v(values)               \n",
    "        # 切出num_heads个头，维度变成 (batch_size, L, num_heads, num_hiddens / num_heads)\n",
    "        # TODO\n",
    "        queries = queries.reshape(queries.shape[0], queries.shape[1], self.num_heads, -1)\n",
    "        keys = keys.reshape(keys.shape[0], keys.shape[1], self.num_heads, -1)\n",
    "        values = values.reshape(values.shape[0], values.shape[1], self.num_heads, -1)\n",
    "        # 通过permute和reshape使维度变成 (batch_size * num_heads, L, num_hiddens / num_heads)\n",
    "        # 此时，queries[i], keys[i], values[i] 表示特定batch_id & head_id下的qkv序列\n",
    "        # TODO\n",
    "        queries = queries.permute(0, 2, 1, 3)\n",
    "        queries = queries.reshape(-1, queries.shape[2], queries.shape[3])  \n",
    "        \n",
    "        keys = keys.permute(0, 2, 1, 3)\n",
    "        keys = keys.reshape(-1, keys.shape[2], keys.shape[3])\n",
    "        \n",
    "        values = values.permute(0, 2, 1, 3)\n",
    "        values = values.reshape(-1, values.shape[2], values.shape[3])       \n",
    "        if valid_lens is not None:\n",
    "            valid_lens = torch.repeat_interleave(\n",
    "                valid_lens, repeats=self.num_heads, dim=0)\n",
    "        output = self.attention(queries, keys, values, valid_lens) \n",
    "        output = output.reshape(-1, self.num_heads, output.shape[1], output.shape[2])\n",
    "        output = output.permute(0, 2, 1, 3)\n",
    "        output = output.reshape(output.shape[0], output.shape[1], -1)        \n",
    "        # 此时output的形状是 (batch_size * num_heads, L, num_hiddens / num_heads)，把他恢复成 (batch_size，L，num_hiddens)\n",
    "\n",
    "        return self.W_o(output)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([2, 4, 100])"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 测试一下效果\n",
    "num_hiddens, num_heads = 100, 5\n",
    "attention = MultiHeadAttention(num_hiddens, num_hiddens, num_hiddens,\n",
    "                               num_hiddens, num_heads, 0.5)\n",
    "attention.eval()\n",
    "batch_size, num_queries = 2, 4\n",
    "num_kvpairs, valid_lens = 6, torch.tensor([3, 2])\n",
    "X = torch.ones((batch_size, num_queries, num_hiddens))\n",
    "Y = torch.ones((batch_size, num_kvpairs, num_hiddens))\n",
    "attention(X, Y, Y, valid_lens).shape # it should be torch.Size([2, 4, 100])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [],
   "source": [
    "class PositionWiseFFN(nn.Module):\n",
    "    \"\"\"基于位置的前馈网络\"\"\"\n",
    "    def __init__(self, ffn_num_input, ffn_num_hiddens, ffn_num_outputs,\n",
    "                 **kwargs):\n",
    "        super(PositionWiseFFN, self).__init__(**kwargs)\n",
    "        self.dense1 = nn.Linear(ffn_num_input, ffn_num_hiddens)\n",
    "        self.relu = nn.ReLU()\n",
    "        self.dense2 = nn.Linear(ffn_num_hiddens, ffn_num_outputs)\n",
    "\n",
    "    def forward(self, X):\n",
    "        return self.dense2(self.relu(self.dense1(X)))\n",
    "    \n",
    "class AddNorm(nn.Module):\n",
    "    \"\"\"残差连接后进行层规范化\"\"\"\n",
    "    def __init__(self, normalized_shape, dropout, **kwargs):\n",
    "        super(AddNorm, self).__init__(**kwargs)\n",
    "        self.dropout = nn.Dropout(dropout)\n",
    "        self.ln = nn.LayerNorm(normalized_shape)\n",
    "\n",
    "    def forward(self, X, Y):\n",
    "        return self.ln(self.dropout(Y) + X)\n",
    "    \n",
    "class PositionalEncoding(nn.Module):\n",
    "    \"\"\"Positional encoding.\n",
    "    \"\"\"\n",
    "    def __init__(self, num_hiddens, dropout, max_len=1000):\n",
    "        super(PositionalEncoding, self).__init__()\n",
    "        self.dropout = nn.Dropout(dropout)\n",
    "        # Create a long enough `P`\n",
    "        self.P = torch.zeros((1, max_len, num_hiddens))\n",
    "        X = torch.arange(max_len, dtype=torch.float32).reshape(\n",
    "            -1, 1) / torch.pow(10000, torch.arange(\n",
    "            0, num_hiddens, 2, dtype=torch.float32) / num_hiddens)\n",
    "        self.P[:, :, 0::2] = torch.sin(X)\n",
    "        self.P[:, :, 1::2] = torch.cos(X)\n",
    "\n",
    "    def forward(self, X):\n",
    "        X = X + self.P[:, :X.shape[1], :].to(X.device)\n",
    "        return self.dropout(X)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "class DecoderBlock(nn.Module):\n",
    "    \"\"\"解码器块\"\"\"\n",
    "    def __init__(self, key_size, query_size, value_size, num_hiddens,\n",
    "                 norm_shape, ffn_num_input, ffn_num_hiddens, num_heads,\n",
    "                 dropout, i, **kwargs):\n",
    "        super(DecoderBlock, self).__init__(**kwargs)\n",
    "        self.i = i\n",
    "        self.attention = MultiHeadAttention(\n",
    "            key_size, query_size, value_size, num_hiddens, num_heads, dropout)\n",
    "        self.addnorm1 = AddNorm(norm_shape, dropout)\n",
    "        self.ffn = PositionWiseFFN(ffn_num_input, ffn_num_hiddens, num_hiddens)\n",
    "        self.addnorm2 = AddNorm(norm_shape, dropout)\n",
    "\n",
    "    def forward(self, X, state):\n",
    "        if state[self.i] is None:\n",
    "            key_values = X\n",
    "        else:\n",
    "            key_values = torch.cat((state[self.i], X), axis=1)\n",
    "            \n",
    "        state[self.i] = key_values \n",
    "        if self.training:\n",
    "            batch_size, num_steps, _ = X.shape\n",
    "            dec_valid_lens = torch.arange(\n",
    "                1, num_steps + 1, device=X.device).repeat(batch_size, 1)\n",
    "        else:\n",
    "            dec_valid_lens = None\n",
    "\n",
    "        # 自注意力\n",
    "        X = self.attention(X, key_values, key_values, dec_valid_lens)\n",
    "        Y = self.addnorm1(X, X)\n",
    "        \n",
    "        return self.addnorm2(Y, self.ffn(Y)), state"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [],
   "source": [
    "class TransformerDecoderOnly(nn.Module):\n",
    "    def __init__(self, vocab_size, key_size, query_size, value_size,\n",
    "                 num_hiddens, norm_shape, ffn_num_input, ffn_num_hiddens,\n",
    "                 num_heads, num_layers, dropout, **kwargs):\n",
    "        super(TransformerDecoderOnly, self).__init__(**kwargs)\n",
    "        self.num_hiddens = num_hiddens\n",
    "        self.num_layers = num_layers\n",
    "        self.embedding = nn.Embedding(vocab_size, num_hiddens)\n",
    "        self.pos_encoding = PositionalEncoding(num_hiddens, dropout)\n",
    "        self.blks = nn.Sequential()\n",
    "        for i in range(num_layers):\n",
    "            self.blks.add_module(\"block\"+str(i),\n",
    "                DecoderBlock(key_size, query_size, value_size, num_hiddens,\n",
    "                             norm_shape, ffn_num_input, ffn_num_hiddens,\n",
    "                             num_heads, dropout, i))\n",
    "        self.dense = nn.Linear(num_hiddens, vocab_size)\n",
    "                \n",
    "    def init_state(self, *args):\n",
    "        return [None] * self.num_layers\n",
    "    \n",
    "    def forward(self, X, state=None):\n",
    "        # 输入X形状: (batch_size, seq_length)\n",
    "        # 对每个token进行embedding并加上位置编码\n",
    "        X = self.pos_encoding(self.embedding(X) * math.sqrt(self.num_hiddens))\n",
    "\n",
    "        for i, blk in enumerate(self.blks):\n",
    "            X, state = blk(X, state)\n",
    "        \n",
    "        return self.dense(X), state\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [],
   "source": [
    "sz = 64\n",
    "num_hiddens, num_layers, dropout = sz, 2, 0.1\n",
    "ffn_num_input, ffn_num_hiddens, num_heads = sz, sz * 2, 16\n",
    "key_size, query_size, value_size = sz, sz, sz\n",
    "norm_shape = [sz]\n",
    "\n",
    "batch_size, num_steps = 32, 35\n",
    "train_iter, vocab = load_data_time_machine(batch_size, num_steps)\n",
    "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
    "model = TransformerDecoderOnly(len(vocab), key_size, query_size, value_size, num_hiddens,\n",
    "    norm_shape, ffn_num_input, ffn_num_hiddens, num_heads,\n",
    "    num_layers, dropout).to(device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [],
   "source": [
    "def predict(prefix, num_preds, net, vocab, device):\n",
    "    input_ids = [vocab[x] for x in prefix]\n",
    "    X = torch.tensor(input_ids, device=device).unsqueeze(0)\n",
    "    state = net.init_state()\n",
    "    output_seq = prefix\n",
    "    output_tokens = []\n",
    "    for _ in range(num_preds):\n",
    "        Y, state = net(X, state)\n",
    "        Y = Y[:, -1, :]\n",
    "        X = Y.argmax(dim=-1)\n",
    "        pred = X.squeeze(dim=0).type(torch.int32).item()\n",
    "        output_tokens.append(pred)\n",
    "        output_seq += vocab.to_tokens(pred)\n",
    "    print(output_tokens)\n",
    "    return output_seq"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\12743\\AppData\\Local\\Temp\\ipykernel_75796\\221368983.py:19: UserWarning: Converting a tensor with requires_grad=True to a scalar may lead to unexpected behavior.\n",
      "Consider using tensor.detach() first. (Triggered internally at C:\\actions-runner\\_work\\pytorch\\pytorch\\pytorch\\torch\\csrc\\autograd\\generated\\python_variable_methods.cpp:836.)\n",
      "  return math.exp(metric[0] / metric[1])\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 49, ppl: 10.146384677429436\n",
      "[1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3]\n",
      "The Time Traveller the the the the the the the the the the the the t\n",
      "epoch: 99, ppl: 8.927043322257841\n",
      "[5, 6, 1, 3, 9, 2, 10, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3]\n",
      "The Time Travellerin there the the the the the the the the the the t\n",
      "epoch: 149, ppl: 7.593424799326383\n",
      "[1, 4, 6, 11, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 5, 8, 1, 3]\n",
      "The Time Traveller and the the the the the the the the the the tis t\n",
      "epoch: 199, ppl: 6.372593659458382\n",
      "[1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 5, 8, 1, 4, 12, 12, 12, 2, 10, 4, 10, 5, 18, 9, 4, 10, 2, 1, 9, 4, 10, 2, 1, 9, 4, 6, 11, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1]\n",
      "The Time Traveller the the tis alllerarighare hare hand the the the \n",
      "epoch: 249, ppl: 6.670348959739874\n",
      "[1, 3, 9, 2, 10, 2, 1, 3, 9, 2, 1, 3, 5, 13, 2, 1, 3, 10, 4, 22, 2, 12, 12, 2, 10, 19, 1, 4, 6, 18, 9, 4, 6, 18, 1, 7, 1, 7, 1, 7, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3]\n",
      "The Time Traveller there the time travellery anghang o o o the the t\n",
      "epoch: 299, ppl: 5.9255641291814145\n",
      "[1, 4, 6, 7, 16, 1, 3, 9, 2, 1, 3, 5, 13, 2, 1, 3, 10, 4, 22, 2, 12, 12, 12, 2, 10, 5, 16, 5, 15, 4, 13, 2, 1, 3, 5, 15, 4, 13, 4, 13, 4, 13, 4, 13, 4, 13, 4, 13, 4, 13]\n",
      "The Time Traveller anof the time travelllerificame ticamamamamamamam\n",
      "epoch: 349, ppl: 5.491099216527042\n",
      "[1, 4, 6, 11, 1, 4, 6, 11, 5, 10, 2, 15, 3, 9, 2, 1, 3, 9, 2, 1, 3, 5, 15, 4, 1, 3, 1, 3, 5, 15, 4, 1, 13, 2, 11, 5, 8, 1, 3, 9, 2, 1, 13, 2, 1, 3, 10, 2, 1, 3]\n",
      "The Time Traveller and andirecthe the tica t tica medis the me tre t\n",
      "epoch: 399, ppl: 5.316360769403887\n",
      "[1, 3, 9, 2, 10, 2, 1, 20, 8, 19, 15, 9, 7, 12, 7, 18, 5, 8, 1, 3, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 5, 8, 1, 3, 9, 2, 1, 3, 5, 8, 1, 8, 1, 3, 9, 2, 1, 3, 5, 8]\n",
      "The Time Traveller there psychologis t the the tis the tis s the tis\n",
      "epoch: 449, ppl: 5.14583069187861\n",
      "[1, 4, 6, 7, 3, 9, 2, 10, 1, 7, 21, 26, 2, 15, 3, 1, 4, 21, 2, 15, 3, 1, 4, 21, 2, 15, 3, 1, 4, 21, 2, 11, 1, 4, 6, 11, 1, 3, 1, 3, 9, 2, 1, 3, 1, 3, 9, 2, 1, 13]\n",
      "The Time Traveller another object abect abect abed and t the t the m\n",
      "epoch: 499, ppl: 5.500637012062026\n",
      "[1, 4, 6, 7, 3, 1, 4, 15, 15, 2, 1, 7, 6, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 10, 2, 1, 7, 21, 19, 7, 14, 1, 3, 1, 3, 9, 5, 8, 1, 7, 14, 3, 1, 4, 5, 8, 1, 4]\n",
      "The Time Traveller anot acce onthe the the re obyou t this out ais a\n",
      "epoch: 549, ppl: 4.660627052085794\n",
      "[1, 4, 6, 7, 3, 9, 2, 10, 1, 5, 8, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 16, 1, 8, 20, 4, 15, 2, 1, 9, 2, 1, 9, 2, 1, 7, 13, 4, 22, 2, 1, 7]\n",
      "The Time Traveller another is o o o o o o o o of space he he omave o\n",
      "epoch: 599, ppl: 4.952674153824536\n",
      "[1, 4, 6, 11, 1, 8, 7, 17, 6, 1, 4, 1, 5, 11, 1, 4, 1, 3, 9, 2, 10, 2, 1, 4, 1, 4, 1, 4, 1, 18, 7, 6, 11, 1, 4, 6, 11, 1, 4, 6, 7, 6, 11, 1, 4, 1, 4, 1, 8, 7]\n",
      "The Time Traveller and sown a id a there a a a gond and anond a a so\n",
      "epoch: 649, ppl: 4.87547157504801\n",
      "[1, 4, 6, 11, 7, 1, 5, 13, 4, 8, 5, 12, 19, 1, 2, 1, 2, 4, 21, 7, 11, 19, 1, 2, 15, 23, 1, 2, 24, 5, 8, 3, 9, 5, 8, 3, 9, 2, 8, 3, 9, 2, 10, 5, 8, 1, 13, 2, 1, 13]\n",
      "The Time Traveller ando imasily e eabody eck existhisthestheris me m\n",
      "epoch: 699, ppl: 4.6201661818905455\n",
      "[2, 1, 4, 10, 18, 14, 20, 4, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 1, 5, 11, 7, 23, 1, 16, 4, 8, 7, 10, 1, 4, 8, 5, 11, 7, 10, 1, 4, 8, 5, 11, 1, 4, 8, 15, 9, 2, 11, 1]\n",
      "The Time Travellere argupasssssssssss idok fasor asidor asid asched \n",
      "epoch: 749, ppl: 5.119459017435923\n",
      "[23, 6, 1, 7, 16, 1, 4, 1, 3, 9, 2, 1, 4, 22, 2, 10, 2, 10, 2, 4, 11, 5, 13, 2, 10, 5, 6, 3, 1, 17, 2, 1, 4, 12, 19, 1, 9, 4, 6, 1, 4, 6, 1, 4, 6, 11, 5, 13, 4, 6]\n",
      "The Time Travellerkn of a the averereadimerint we aly han an andiman\n",
      "epoch: 799, ppl: 4.515537072496862\n",
      "[12, 2, 1, 20, 8, 9, 4, 20, 8, 2, 1, 3, 7, 20, 8, 1, 17, 4, 23, 1, 4, 8, 1, 4, 5, 11, 1, 3, 7, 14, 6, 1, 4, 6, 1, 8, 4, 6, 1, 8, 4, 6, 1, 8, 4, 6, 1, 8, 4, 6]\n",
      "The Time Travellerle pshapse tops wak as aid toun an san san san san\n",
      "epoch: 849, ppl: 4.919935082334234\n",
      "[1, 4, 6, 7, 3, 9, 2, 1, 3, 9, 2, 1, 3, 5, 8, 1, 18, 2, 1, 18, 2, 2, 2, 2, 1, 7, 13, 2, 1, 7, 16, 1, 8, 2, 1, 8, 2, 5, 6, 19, 1, 8, 2, 2, 1, 8, 2, 2, 2, 1]\n",
      "The Time Traveller anothe the tis ge geeee ome of se seiny see seee \n",
      "epoch: 899, ppl: 4.390966177004239\n",
      "[1, 4, 6, 7, 3, 9, 2, 10, 1, 11, 1, 7, 1, 7, 3, 9, 2, 1, 7, 3, 9, 2, 1, 22, 2, 1, 3, 9, 2, 1, 22, 2, 1, 22, 2, 1, 22, 2, 1, 3, 9, 2, 1, 22, 2, 1, 22, 2, 10, 19]\n",
      "The Time Traveller another d o othe othe ve the ve ve ve the ve very\n",
      "epoch: 949, ppl: 5.213934549391714\n",
      "[1, 4, 6, 11, 1, 7, 1, 8, 7, 1, 3, 9, 2, 1, 13, 2, 1, 18, 10, 4, 12, 1, 4, 8, 7, 1, 4, 8, 7, 1, 9, 4, 6, 2, 10, 1, 4, 22, 2, 10, 1, 4, 3, 1, 4, 3, 1, 4, 3, 9]\n",
      "The Time Traveller and o so the me gral aso aso haner aver at at ath\n",
      "epoch: 999, ppl: 4.834040978009483\n",
      "[1, 4, 6, 11, 1, 8, 7, 1, 3, 9, 2, 1, 13, 2, 12, 4, 22, 2, 10, 1, 20, 10, 1, 20, 10, 19, 1, 7, 22, 2, 6, 2, 6, 1, 4, 6, 2, 6, 1, 4, 6, 2, 6, 1, 4, 6, 1, 4, 6, 2]\n",
      "The Time Traveller and so the melaver pr pry ovenen anen anen an ane\n",
      "epoch: 1049, ppl: 4.294176378605182\n",
      "[1, 4, 6, 11, 1, 8, 7, 1, 3, 9, 2, 1, 13, 2, 1, 3, 10, 2, 1, 22, 2, 12, 12, 12, 12, 19, 7, 14, 6, 18, 9, 1, 8, 3, 1, 8, 4, 10, 2, 4, 10, 1, 20, 12, 19, 1, 7, 6, 18, 10]\n",
      "The Time Traveller and so the me tre vellllyoungh st sarear ply ongr\n",
      "epoch: 1099, ppl: 4.383302325151059\n",
      "[23, 6, 7, 17, 1, 7, 17, 1, 7, 16, 1, 19, 7, 14, 1, 15, 7, 14, 12, 12, 7, 17, 1, 15, 4, 8, 4, 5, 12, 12, 1, 21, 2, 1, 21, 2, 1, 21, 2, 1, 15, 7, 14, 3, 9, 2, 1, 15, 7, 6]\n",
      "The Time Travellerknow ow of you coullow casaill be be be couthe con\n",
      "epoch: 1149, ppl: 4.948697013832402\n",
      "[1, 4, 6, 11, 1, 7, 3, 9, 2, 1, 13, 2, 12, 19, 7, 14, 1, 4, 10, 6, 1, 3, 9, 2, 1, 11, 5, 8, 1, 3, 9, 4, 3, 9, 4, 3, 1, 5, 16, 5, 11, 5, 11, 1, 3, 2, 1, 3, 5, 11]\n",
      "The Time Traveller and othe melyou arn the dis thathat ifidid te tid\n",
      "epoch: 1199, ppl: 4.759415535834227\n",
      "[1, 4, 6, 11, 1, 8, 7, 1, 5, 1, 6, 1, 8, 7, 1, 5, 1, 16, 5, 12, 7, 6, 18, 1, 4, 5, 3, 9, 2, 10, 2, 1, 3, 1, 3, 1, 3, 9, 2, 1, 3, 5, 3, 5, 3, 1, 3, 5, 16, 2]\n",
      "The Time Traveller and so i n so i filong aithere t t the titit tife\n",
      "epoch: 1249, ppl: 4.19821869679184\n",
      "[1, 4, 6, 11, 1, 7, 17, 2, 1, 18, 1, 3, 9, 5, 10, 11, 5, 16, 2, 1, 4, 1, 4, 1, 3, 1, 3, 9, 5, 13, 2, 4, 6, 11, 5, 13, 2, 4, 6, 11, 1, 9, 5, 15, 4, 6, 1, 9, 5, 8]\n",
      "The Time Traveller and owe g thirdife a a t thimeandimeand hican his\n",
      "epoch: 1299, ppl: 4.644625810849643\n",
      "[1, 18, 7, 6, 22, 2, 10, 19, 1, 4, 6, 2, 1, 4, 6, 1, 4, 6, 7, 3, 9, 7, 3, 9, 2, 1, 4, 3, 9, 2, 1, 4, 3, 9, 2, 1, 4, 3, 9, 2, 1, 17, 4, 3, 9, 2, 1, 21, 2, 1]\n",
      "The Time Traveller gonvery ane an anothothe athe athe athe wathe be \n",
      "epoch: 1349, ppl: 4.204590965399326\n",
      "[1, 4, 6, 11, 1, 7, 16, 1, 3, 9, 2, 10, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 5, 13, 2, 1, 21, 14, 10, 6, 3, 9, 2, 1, 3, 10, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9]\n",
      "The Time Traveller and of there the the thime burnthe tre the the th\n",
      "epoch: 1399, ppl: 3.7316199293563397\n",
      "[1, 4, 6, 7, 3, 9, 2, 10, 1, 11, 5, 8, 1, 16, 16, 16, 16, 16, 16, 3, 2, 15, 14, 10, 10, 2, 1, 3, 9, 2, 1, 4, 12, 12, 12, 12, 5, 6, 11, 5, 8, 15, 2, 1, 4, 10, 2, 1, 4, 10]\n",
      "The Time Traveller another dis fffffftecurre the allllindisce are ar\n",
      "epoch: 1449, ppl: 4.689187640612985\n",
      "[12, 2, 1, 5, 6, 4, 6, 11, 1, 9, 5, 8, 1, 9, 5, 8, 1, 4, 6, 11, 1, 4, 6, 11, 1, 9, 5, 8, 1, 16, 5, 12, 21, 19, 1, 4, 6, 11, 7, 7, 14, 17, 1, 16, 1, 4, 15, 4, 8, 1]\n",
      "The Time Travellerle inand his his and and his filby andoouw f acas \n",
      "epoch: 1499, ppl: 4.288036368127667\n",
      "[1, 4, 6, 7, 3, 9, 2, 1, 10, 2, 4, 11, 1, 3, 9, 2, 1, 3, 9, 2, 1, 20, 8, 19, 7, 14, 1, 4, 12, 1, 17, 7, 16, 1, 19, 7, 14, 10, 3, 9, 7, 7, 7, 7, 7, 16, 1, 11, 5, 8]\n",
      "The Time Traveller anothe read the the psyou al wof yourthooooof dis\n",
      "epoch: 1549, ppl: 4.04008023962874\n",
      "[1, 4, 6, 7, 3, 9, 2, 1, 18, 1, 8, 4, 19, 7, 14, 1, 8, 7, 1, 8, 4, 19, 7, 1, 15, 15, 15, 15, 15, 23, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 4, 6, 3, 1, 7, 1, 7]\n",
      "The Time Traveller anothe g sayou so sayo ccccck o o o o o o ant o o\n",
      "epoch: 1599, ppl: 4.484570186131531\n",
      "[1, 4, 6, 11, 1, 8, 7, 1, 18, 2, 3, 8, 7, 1, 3, 7, 1, 3, 7, 1, 3, 9, 2, 1, 3, 7, 13, 2, 3, 7, 13, 2, 3, 1, 8, 4, 12, 1, 20, 10, 4, 18, 7, 22, 2, 10, 19, 1, 9, 5]\n",
      "The Time Traveller and so getso to to the tometomet sal pragovery hi\n",
      "epoch: 1649, ppl: 4.183660897457866\n",
      "[1, 4, 6, 7, 3, 9, 2, 1, 8, 4, 3, 9, 2, 1, 18, 4, 21, 7, 12, 12, 12, 12, 12, 12, 12, 11, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 7, 16, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 5, 13]\n",
      "The Time Traveller anothe sathe gabollllllld the the tof the the tim\n",
      "epoch: 1699, ppl: 4.443396568624179\n",
      "[1, 4, 6, 11, 7, 17, 6, 1, 21, 1, 21, 14, 1, 18, 1, 18, 1, 3, 9, 5, 15, 7, 1, 3, 9, 5, 8, 1, 4, 1, 5, 3, 1, 5, 6, 2, 1, 5, 3, 1, 5, 3, 9, 2, 1, 3, 9, 2, 1, 3]\n",
      "The Time Traveller andown b bu g g thico this a it ine it ithe the t\n",
      "epoch: 1749, ppl: 3.9169630813761014\n",
      "[1, 4, 6, 11, 7, 1, 8, 1, 5, 11, 1, 16, 5, 12, 21, 19, 18, 1, 17, 9, 4, 11, 1, 3, 1, 3, 9, 2, 6, 1, 3, 9, 2, 6, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 10, 2, 1, 3, 9]\n",
      "The Time Traveller ando s id filbyg whad t then then the the thre th\n",
      "epoch: 1799, ppl: 3.698254129089679\n",
      "[1, 4, 1, 18, 7, 6, 11, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 5, 13, 2, 1, 3, 10, 4, 22, 2, 12, 2, 10, 1, 22, 2, 12, 2, 10, 1, 15, 4, 13, 4, 6, 2, 12, 2, 10, 1, 15, 4, 13, 4]\n",
      "The Time Traveller a gond the the time traveler veler camaneler cama\n",
      "epoch: 1849, ppl: 3.9827215764795367\n",
      "[1, 4, 6, 11, 7, 17, 11, 1, 18, 10, 7, 14, 2, 1, 3, 9, 5, 11, 5, 16, 1, 7, 10, 2, 1, 7, 13, 2, 1, 7, 16, 1, 7, 13, 2, 1, 7, 16, 1, 7, 13, 2, 1, 7, 13, 2, 1, 7, 16, 1]\n",
      "The Time Traveller andowd groue thidif ore ome of ome of ome ome of \n",
      "epoch: 1899, ppl: 3.850424926341699\n",
      "[5, 20, 12, 19, 1, 3, 7, 6, 12, 7, 6, 12, 19, 1, 7, 3, 9, 2, 4, 1, 7, 3, 1, 7, 6, 18, 7, 3, 1, 7, 3, 1, 7, 3, 9, 2, 10, 7, 16, 1, 7, 14, 1, 17, 4, 23, 5, 13, 1, 3]\n",
      "The Time Travelleriply tonlonly othea ot ongot ot otherof ou wakim t\n",
      "epoch: 1949, ppl: 3.8521236246940833\n",
      "[1, 4, 6, 11, 1, 8, 7, 1, 8, 7, 1, 5, 1, 5, 1, 16, 5, 12, 7, 17, 9, 19, 1, 5, 3, 9, 4, 19, 1, 5, 8, 1, 5, 18, 1, 5, 18, 9, 3, 9, 3, 9, 3, 9, 3, 9, 3, 9, 4, 19]\n",
      "The Time Traveller and so so i i filowhy ithay is ig ighthththththay\n",
      "epoch: 1999, ppl: 3.8644920256107094\n",
      "[1, 9, 4, 22, 2, 1, 2, 24, 5, 8, 3, 2, 6, 15, 5, 7, 6, 1, 3, 1, 3, 9, 5, 8, 3, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1]\n",
      "The Time Traveller have existencion t thist the the the the the the \n",
      "epoch: 2049, ppl: 3.580170459777501\n",
      "[5, 1, 4, 1, 8, 14, 8, 14, 8, 9, 9, 9, 9, 9, 5, 4, 22, 2, 7, 18, 14, 18, 1, 2, 7, 6, 7, 17, 1, 5, 10, 1, 5, 10, 1, 5, 10, 1, 5, 10, 1, 5, 10, 1, 5, 10, 1, 5, 10, 2]\n",
      "The Time Travelleri a susushhhhhiaveogug eonow ir ir ir ir ir ir ire\n",
      "epoch: 2099, ppl: 3.8351008185449547\n",
      "[1, 9, 4, 22, 2, 1, 2, 24, 5, 8, 3, 2, 6, 15, 2, 6, 3, 1, 4, 6, 1, 4, 6, 11, 1, 4, 6, 7, 17, 1, 14, 6, 11, 1, 3, 9, 5, 8, 1, 3, 9, 5, 8, 1, 3, 9, 5, 8, 1, 3]\n",
      "The Time Traveller have existencent an and anow und this this this t\n",
      "epoch: 2149, ppl: 3.9626918763956613\n",
      "[1, 7, 16, 1, 3, 9, 2, 1, 10, 2, 1, 20, 10, 7, 20, 10, 7, 20, 10, 7, 20, 2, 6, 11, 1, 4, 8, 2, 15, 5, 22, 2, 1, 5, 6, 15, 7, 6, 2, 1, 15, 2, 1, 15, 7, 1, 3, 1, 3, 9]\n",
      "The Time Traveller of the re propropropend asecive incone ce co t th\n",
      "epoch: 2199, ppl: 3.545857327519196\n",
      "[1, 9, 4, 6, 11, 1, 8, 7, 3, 3, 3, 9, 2, 1, 5, 13, 2, 1, 3, 10, 4, 22, 2, 12, 19, 1, 15, 23, 6, 11, 5, 8, 1, 5, 3, 2, 1, 3, 5, 3, 5, 3, 5, 8, 1, 3, 5, 3, 5, 8]\n",
      "The Time Traveller hand sottthe ime travely ckndis ite tititis titis\n",
      "epoch: 2249, ppl: 3.9938982971746095\n",
      "[1, 9, 4, 11, 2, 1, 8, 4, 6, 11, 1, 11, 1, 4, 6, 11, 1, 11, 5, 13, 5, 8, 1, 9, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 3, 1, 17, 4, 1, 17]\n",
      "The Time Traveller hade sand d and dimis hfffffffffffffffffffft wa w\n",
      "epoch: 2299, ppl: 3.781584921940776\n",
      "[2, 1, 18, 10, 2, 2, 2, 4, 21, 14, 10, 2, 2, 2, 2, 2, 6, 11, 1, 15, 7, 6, 11, 1, 15, 4, 13, 4, 3, 1, 3, 1, 3, 1, 3, 9, 2, 1, 3, 7, 1, 3, 10, 4, 8, 1, 18, 1, 12, 12]\n",
      "The Time Travellere greeeabureeeeend cond camat t t the to tras g ll\n",
      "epoch: 2349, ppl: 3.7603781157212506\n",
      "[1, 4, 12, 1, 16, 7, 10, 1, 3, 9, 2, 18, 10, 2, 1, 3, 9, 2, 1, 3, 5, 13, 2, 1, 2, 1, 11, 5, 13, 2, 1, 18, 2, 1, 18, 9, 3, 10, 4, 6, 11, 5, 15, 4, 22, 2, 4, 12, 1, 3]\n",
      "The Time Traveller al for thegre the time e dime ge ghtrandicaveal t\n",
      "epoch: 2399, ppl: 4.0472983844843355\n",
      "[1, 4, 15, 7, 14, 13, 7, 13, 14, 6, 5, 3, 2, 12, 1, 4, 3, 2, 1, 4, 21, 2, 1, 20, 12, 2, 1, 3, 9, 2, 1, 4, 6, 2, 1, 4, 15, 4, 6, 2, 1, 4, 6, 2, 1, 4, 6, 11, 7, 11]\n",
      "The Time Traveller acoumomunitel ate abe ple the ane acane ane andod\n",
      "epoch: 2449, ppl: 4.171244990060059\n",
      "[1, 9, 4, 11, 1, 8, 4, 12, 2, 11, 1, 4, 6, 11, 1, 20, 4, 8, 2, 10, 2, 15, 5, 6, 11, 1, 9, 2, 2, 8, 1, 4, 6, 11, 1, 4, 6, 11, 1, 4, 6, 11, 1, 4, 6, 11, 1, 4, 6, 11]\n",
      "The Time Traveller had saled and paserecind hees and and and and and\n",
      "epoch: 2499, ppl: 4.12311984911506\n",
      "[1, 3, 9, 2, 1, 20, 10, 2, 2, 2, 1, 4, 11, 7, 22, 2, 6, 15, 5, 6, 15, 4, 3, 1, 13, 4, 12, 1, 22, 2, 1, 3, 9, 2, 12, 2, 1, 22, 2, 10, 5, 2, 10, 5, 2, 10, 5, 4, 12, 1]\n",
      "The Time Traveller the preee adovencincat mal ve thele verierierial \n",
      "epoch: 2549, ppl: 3.8386300395507758\n",
      "[1, 4, 12, 2, 24, 5, 8, 5, 8, 4, 5, 7, 6, 1, 5, 8, 1, 13, 1, 4, 18, 1, 4, 6, 18, 1, 5, 8, 3, 1, 5, 8, 1, 5, 11, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 19, 7, 6, 18, 9]\n",
      "The Time Traveller alexisisaion is m ag ang ist is id t t t t ayongh\n",
      "epoch: 2599, ppl: 3.44762449590462\n",
      "[1, 7, 6, 12, 19, 1, 4, 1, 3, 9, 2, 10, 18, 1, 7, 6, 2, 4, 6, 18, 1, 7, 16, 1, 4, 6, 18, 1, 21, 4, 11, 19, 1, 8, 5, 12, 2, 6, 5, 6, 5, 6, 18, 1, 8, 1, 8, 7, 10, 2]\n",
      "The Time Traveller only a therg oneang of ang bady silenining s sore\n",
      "epoch: 2649, ppl: 3.7978807415025573\n",
      "[1, 4, 12, 2, 24, 5, 8, 8, 5, 8, 7, 6, 1, 3, 1, 16, 5, 12, 14, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16]\n",
      "The Time Traveller alexissison t filufffffffffffffffffffffffffffffff\n",
      "epoch: 2699, ppl: 3.774640870170432\n",
      "[1, 17, 5, 12, 1, 4, 19, 2, 6, 7, 11, 1, 10, 2, 10, 2, 13, 5, 3, 1, 4, 8, 1, 4, 12, 19, 1, 4, 11, 5, 3, 1, 4, 6, 11, 5, 8, 1, 17, 9, 4, 6, 11, 2, 1, 14, 6, 1, 9, 4]\n",
      "The Time Traveller wil ayenod reremit as aly adit andis whande un ha\n",
      "epoch: 2749, ppl: 4.259221122535352\n",
      "[1, 4, 15, 2, 1, 13, 7, 22, 2, 6, 3, 2, 12, 1, 4, 21, 14, 3, 1, 3, 9, 2, 1, 7, 1, 21, 14, 18, 9, 4, 10, 1, 23, 1, 5, 13, 2, 1, 5, 8, 1, 9, 5, 8, 1, 9, 5, 8, 1, 8]\n",
      "The Time Traveller ace moventel abut the o bughar k ime is his his s\n",
      "epoch: 2799, ppl: 3.947572202216482\n",
      "[1, 4, 12, 17, 4, 19, 8, 1, 18, 2, 3, 3, 3, 5, 8, 1, 10, 2, 11, 4, 19, 1, 10, 2, 10, 2, 10, 2, 10, 2, 11, 1, 16, 5, 6, 1, 4, 19, 1, 16, 5, 6, 1, 3, 9, 2, 6, 11, 5, 13]\n",
      "The Time Traveller always getttis reday rererered fin ay fin thendim\n",
      "epoch: 2849, ppl: 3.814128110735106\n",
      "[1, 7, 6, 12, 19, 1, 4, 1, 12, 17, 4, 11, 13, 7, 6, 7, 6, 7, 6, 1, 23, 6, 7, 6, 7, 6, 7, 17, 6, 1, 3, 9, 2, 1, 7, 16, 1, 7, 16, 1, 3, 1, 8, 1, 13, 2, 1, 4, 11, 1]\n",
      "The Time Traveller only a lwadmononon knononown the of of t s me ad \n",
      "epoch: 2899, ppl: 3.89368096164336\n",
      "[1, 4, 15, 9, 4, 22, 2, 1, 6, 2, 1, 6, 2, 20, 7, 1, 13, 2, 1, 4, 22, 2, 1, 4, 20, 12, 23, 1, 9, 4, 22, 2, 1, 9, 4, 22, 2, 1, 9, 2, 1, 9, 4, 22, 2, 1, 9, 4, 22, 2]\n",
      "The Time Traveller achave ne nepo me ave aplk have have he have have\n",
      "epoch: 2949, ppl: 3.554548934670991\n",
      "[1, 7, 6, 18, 1, 8, 20, 4, 8, 4, 15, 2, 1, 4, 8, 8, 8, 3, 2, 1, 4, 5, 7, 7, 14, 12, 4, 10, 1, 9, 4, 1, 9, 4, 1, 8, 1, 8, 1, 8, 1, 8, 19, 15, 9, 7, 18, 7, 12, 7]\n",
      "The Time Traveller ong spasace assste aiooular ha ha s s s sychogolo\n",
      "epoch: 2999, ppl: 3.6092399655393277\n",
      "[1, 4, 6, 11, 1, 8, 7, 14, 8, 15, 9, 1, 15, 9, 1, 15, 9, 4, 8, 2, 2, 2, 1, 4, 6, 2, 1, 10, 2, 2, 1, 15, 4, 6, 2, 1, 4, 6, 2, 1, 4, 6, 2, 1, 4, 6, 2, 1, 4, 6]\n",
      "The Time Traveller and sousch ch chaseee ane ree cane ane ane ane an\n",
      "epoch: 3049, ppl: 3.7463121618666215\n",
      "[1, 4, 15, 2, 1, 18, 7, 6, 11, 1, 3, 9, 2, 1, 3, 1, 3, 5, 13, 2, 27, 10, 19, 7, 14, 18, 9, 3, 1, 8, 4, 22, 2, 1, 4, 21, 14, 18, 9, 7, 14, 18, 9, 3, 1, 19, 7, 14, 18, 9]\n",
      "The Time Traveller ace gond the t timeqryought save abughought yough\n",
      "epoch: 3099, ppl: 4.093547310333424\n",
      "[1, 4, 6, 11, 1, 11, 7, 3, 9, 2, 1, 7, 16, 1, 3, 3, 9, 2, 1, 16, 5, 13, 2, 10, 2, 10, 2, 10, 2, 10, 2, 6, 8, 5, 7, 13, 2, 1, 8, 5, 7, 13, 2, 1, 13, 2, 1, 13, 2, 1]\n",
      "The Time Traveller and dothe of tthe fimererererensiome siome me me \n",
      "epoch: 3149, ppl: 3.6685512145066856\n",
      "[1, 4, 6, 11, 1, 20, 4, 6, 11, 7, 17, 4, 7, 17, 6, 1, 18, 3, 9, 2, 1, 8, 1, 18, 7, 1, 18, 2, 1, 18, 2, 1, 18, 2, 5, 6, 18, 3, 2, 10, 7, 1, 4, 3, 12, 2, 10, 23, 12, 2]\n",
      "The Time Traveller and pandowaown gthe s go ge ge geingtero atlerkle\n",
      "epoch: 3199, ppl: 3.6638852984425307\n",
      "[1, 3, 10, 4, 15, 2, 11, 1, 15, 7, 14, 6, 1, 15, 7, 16, 10, 2, 1, 3, 9, 2, 1, 11, 5, 13, 2, 1, 7, 1, 7, 1, 3, 10, 2, 1, 3, 10, 2, 1, 3, 10, 4, 22, 2, 1, 3, 10, 2, 1]\n",
      "The Time Traveller traced coun cofre the dime o o tre tre trave tre \n",
      "epoch: 3249, ppl: 3.5334399308506774\n",
      "[1, 4, 12, 17, 4, 19, 8, 1, 18, 2, 10, 2, 1, 18, 2, 11, 1, 18, 2, 19, 2, 12, 2, 16, 7, 14, 6, 18, 1, 10, 2, 6, 18, 1, 8, 5, 6, 18, 1, 8, 7, 6, 18, 1, 4, 11, 5, 14, 6, 18]\n",
      "The Time Traveller always gere ged geyelefoung reng sing song adiung\n",
      "epoch: 3299, ppl: 3.4633836050833566\n",
      "[1, 4, 6, 7, 16, 1, 3, 9, 2, 1, 21, 4, 3, 10, 7, 14, 10, 6, 11, 1, 3, 9, 5, 15, 23, 6, 18, 9, 2, 1, 5, 8, 1, 4, 22, 2, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4]\n",
      "The Time Traveller anof the batrournd thicknghe is ave a a a a a a a\n",
      "epoch: 3349, ppl: 3.61885086613491\n",
      "[1, 4, 1, 15, 9, 4, 1, 6, 7, 6, 2, 20, 7, 3, 1, 14, 8, 1, 18, 2, 4, 1, 18, 10, 4, 5, 11, 1, 12, 4, 1, 12, 4, 1, 15, 23, 8, 4, 6, 2, 1, 15, 23, 8, 2, 1, 15, 23, 8, 2]\n",
      "The Time Traveller a cha nonepot us gea graid la la cksane ckse ckse\n",
      "epoch: 3399, ppl: 3.6802060426472285\n",
      "[23, 6, 11, 1, 7, 17, 6, 1, 7, 6, 1, 7, 6, 2, 10, 17, 6, 18, 1, 3, 9, 2, 1, 8, 4, 19, 1, 4, 5, 11, 1, 16, 10, 2, 2, 2, 2, 22, 2, 6, 1, 4, 10, 7, 5, 13, 4, 10, 19, 1]\n",
      "The Time Travellerknd own on onerwng the say aid freeeeven aroimary \n",
      "epoch: 3449, ppl: 4.207495070608731\n",
      "[1, 3, 9, 2, 10, 2, 1, 15, 14, 12, 19, 1, 5, 8, 1, 5, 8, 1, 5, 8, 1, 5, 8, 1, 5, 8, 1, 5, 8, 7, 1, 5, 8, 4, 10, 10, 10, 10, 10, 10, 10, 10, 10, 2, 1, 11, 1, 12, 5, 16]\n",
      "The Time Traveller there culy is is is is is iso isarrrrrrrrre d lif\n",
      "epoch: 3499, ppl: 3.6699872347272824\n",
      "[12, 19, 7, 14, 6, 18, 1, 4, 6, 11, 1, 18, 1, 18, 7, 16, 1, 8, 5, 24, 1, 16, 2, 15, 7, 10, 2, 12, 5, 6, 15, 2, 12, 19, 7, 1, 4, 21, 14, 10, 2, 6, 15, 4, 21, 14, 10, 2, 6, 15]\n",
      "The Time Travellerlyoung and g gof six fecorelincelyo aburencaburenc\n",
      "epoch: 3549, ppl: 3.5415019764748954\n",
      "[7, 6, 1, 4, 6, 7, 16, 1, 3, 9, 2, 1, 19, 5, 3, 1, 15, 7, 14, 1, 18, 10, 2, 1, 11, 5, 13, 2, 6, 8, 5, 7, 16, 7, 16, 7, 16, 7, 16, 7, 16, 7, 16, 7, 16, 1, 3, 1, 3, 1]\n",
      "The Time Travelleron anof the yit cou gre dimensiofofofofofofof t t \n",
      "epoch: 3599, ppl: 3.694564597640614\n",
      "[1, 4, 12, 2, 24, 5, 8, 3, 1, 4, 18, 4, 21, 7, 6, 11, 1, 3, 9, 5, 8, 1, 16, 2, 15, 7, 14, 3, 1, 21, 14, 3, 1, 19, 7, 14, 3, 1, 19, 7, 14, 1, 19, 7, 14, 1, 19, 7, 14, 1]\n",
      "The Time Traveller alexist agabond this fecout but yout you you you \n",
      "epoch: 3649, ppl: 3.4445758961980824\n",
      "[7, 22, 2, 6, 1, 3, 7, 1, 8, 4, 6, 3, 9, 2, 10, 7, 3, 9, 2, 1, 21, 2, 1, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 16, 1, 4, 15, 5, 8, 1, 4, 15, 5, 11, 2, 1, 3]\n",
      "The Time Travelleroven to santherothe be ooooooooooooof acis acide t\n",
      "epoch: 3699, ppl: 3.6452061610239412\n",
      "[23, 1, 4, 16, 7, 10, 1, 15, 7, 1, 15, 7, 12, 7, 22, 2, 4, 15, 3, 10, 3, 10, 3, 9, 2, 12, 1, 11, 1, 3, 9, 2, 1, 18, 4, 15, 5, 25, 2, 10, 4, 6, 11, 1, 18, 9, 2, 1, 18, 7]\n",
      "The Time Travellerk afor co coloveactrtrthel d the gacizerand ghe go\n",
      "epoch: 3749, ppl: 3.6297597100255543\n",
      "[2, 1, 20, 10, 9, 4, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 13, 4, 6, 19, 1, 4, 5, 8, 1, 2, 4, 12, 1, 7, 10, 18, 14, 13, 4, 3, 1, 21, 12, 7, 6, 1, 3, 9, 4, 15, 12]\n",
      "The Time Travellere prhat a a a a a imany ais eal orgumat blon thacl\n",
      "epoch: 3799, ppl: 3.565488357329615\n",
      "[1, 4, 6, 19, 7, 3, 9, 2, 4, 3, 1, 8, 4, 12, 12, 1, 16, 7, 16, 1, 17, 5, 11, 1, 8, 19, 1, 21, 2, 16, 1, 10, 2, 16, 5, 13, 2, 6, 7, 14, 3, 1, 10, 4, 6, 7, 1, 3, 9, 2]\n",
      "The Time Traveller anyotheat sall fof wid sy bef refimenout rano the\n",
      "epoch: 3849, ppl: 4.144019723733258\n",
      "[2, 1, 7, 16, 1, 3, 9, 2, 1, 20, 10, 7, 23, 2, 15, 2, 15, 14, 6, 2, 8, 8, 8, 1, 17, 9, 7, 6, 2, 10, 8, 1, 17, 9, 4, 6, 1, 4, 6, 1, 4, 6, 7, 1, 4, 1, 4, 1, 4, 1]\n",
      "The Time Travellere of the prokececunesss whoners whan an ano a a a \n",
      "epoch: 3899, ppl: 3.666469699097201\n",
      "[1, 4, 6, 7, 16, 1, 3, 9, 2, 1, 21, 4, 10, 7, 14, 3, 9, 2, 1, 2, 1, 13, 4, 3, 5, 6, 11, 1, 3, 9, 2, 13, 2, 10, 4, 22, 2, 12, 12, 12, 12, 12, 2, 24, 20, 2, 10, 5, 6, 8]\n",
      "The Time Traveller anof the barouthe e matind themeravelllllexperins\n",
      "epoch: 3949, ppl: 3.8358401492841296\n",
      "[1, 4, 12, 17, 4, 1, 15, 2, 1, 15, 2, 20, 7, 6, 1, 18, 2, 4, 10, 11, 4, 6, 1, 15, 7, 6, 5, 15, 23, 8, 1, 4, 6, 11, 1, 4, 6, 11, 1, 8, 1, 4, 6, 11, 1, 18, 7, 16, 7, 6]\n",
      "The Time Traveller alwa ce cepon geardan conicks and and s and gofon\n",
      "epoch: 3999, ppl: 3.5739098155925655\n",
      "[1, 4, 1, 18, 7, 6, 8, 8, 1, 4, 5, 1, 4, 1, 4, 6, 11, 1, 17, 3, 1, 5, 11, 1, 3, 9, 2, 1, 12, 4, 11, 1, 4, 11, 1, 4, 17, 4, 17, 4, 17, 4, 17, 4, 14, 20, 1, 4, 1, 4]\n",
      "The Time Traveller a gonss ai a and wt id the lad ad awawawawaup a a\n",
      "epoch: 4049, ppl: 3.9371985833079086\n",
      "[1, 3, 9, 2, 10, 2, 1, 16, 14, 3, 9, 2, 1, 11, 5, 13, 2, 16, 16, 16, 16, 16, 16, 16, 5, 12, 21, 19, 1, 10, 2, 1, 17, 2, 1, 14, 1, 19, 7, 10, 2, 2, 1, 10, 2, 2, 2, 2, 2, 2]\n",
      "The Time Traveller there futhe dimefffffffilby re we u yoree reeeeee\n",
      "epoch: 4099, ppl: 3.3828176686759717\n",
      "[12, 2, 1, 5, 6, 1, 4, 6, 11, 1, 17, 1, 3, 9, 2, 1, 8, 4, 6, 11, 1, 4, 6, 11, 1, 4, 16, 5, 12, 17, 4, 6, 11, 1, 14, 6, 1, 14, 6, 1, 9, 4, 6, 11, 1, 14, 6, 11, 1, 14]\n",
      "The Time Travellerle in and w the sand and afilwand un un hand und u\n",
      "epoch: 4149, ppl: 3.7740824959037944\n",
      "[2, 10, 1, 4, 12, 1, 2, 7, 6, 11, 3, 9, 2, 1, 17, 4, 12, 2, 10, 2, 10, 2, 10, 2, 10, 2, 1, 3, 9, 4, 12, 19, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 3, 9, 2, 1, 20]\n",
      "The Time Travellerer al eondthe walerererere thaly the the the the p\n",
      "epoch: 4199, ppl: 3.5290792176928707\n",
      "[1, 4, 1, 15, 9, 5, 10, 2, 1, 6, 11, 7, 3, 1, 11, 10, 5, 16, 10, 4, 1, 21, 10, 5, 15, 23, 6, 8, 8, 1, 4, 6, 11, 1, 4, 6, 11, 1, 4, 6, 11, 1, 4, 6, 11, 1, 4, 6, 11, 1]\n",
      "The Time Traveller a chire ndot drifra bricknss and and and and and \n",
      "epoch: 4249, ppl: 3.3471532661552215\n",
      "[1, 4, 12, 2, 24, 5, 8, 3, 2, 6, 15, 2, 3, 1, 7, 16, 1, 3, 9, 2, 10, 2, 10, 2, 2, 11, 1, 7, 1, 3, 10, 5, 13, 2, 1, 3, 9, 2, 10, 2, 1, 3, 10, 2, 1, 3, 10, 4, 22, 2]\n",
      "The Time Traveller alexistencet of therereed o trime there tre trave\n",
      "epoch: 4299, ppl: 3.569538730805638\n",
      "[1, 4, 12, 7, 11, 1, 5, 3, 1, 17, 6, 1, 3, 9, 2, 1, 8, 4, 1, 13, 4, 12, 12, 1, 11, 2, 24, 20, 2, 11, 1, 22, 4, 8, 8, 3, 9, 7, 14, 3, 9, 2, 11, 1, 22, 2, 10, 19, 1, 4]\n",
      "The Time Traveller alod it wn the sa mall dexped vassthouthed very a\n",
      "epoch: 4349, ppl: 3.6903490406476114\n",
      "[1, 2, 24, 20, 12, 4, 5, 8, 1, 3, 1, 3, 7, 1, 9, 2, 10, 5, 13, 2, 10, 5, 13, 2, 6, 8, 1, 7, 14, 10, 4, 6, 11, 1, 4, 6, 11, 1, 17, 5, 6, 11, 1, 17, 5, 6, 1, 17, 9, 5]\n",
      "The Time Traveller explais t to herimerimens ourand and wind win whi\n",
      "epoch: 4399, ppl: 3.4565108222016194\n",
      "[1, 4, 1, 15, 9, 5, 4, 1, 4, 1, 4, 1, 12, 13, 4, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 4, 6, 2, 7, 16, 10, 18, 14, 13, 4, 6, 5, 6, 3, 9, 1, 7, 3, 10, 4, 3, 2, 1, 7, 3]\n",
      "The Time Traveller a chia a a lma l l l l laneofrgumaninth otrate ot\n",
      "epoch: 4449, ppl: 3.318267679484603\n",
      "[1, 4, 12, 2, 24, 5, 8, 3, 2, 10, 5, 7, 1, 14, 15, 2, 10, 5, 2, 10, 5, 2, 11, 1, 4, 22, 2, 10, 1, 4, 12, 19, 1, 13, 4, 12, 19, 1, 3, 2, 10, 5, 11, 2, 10, 5, 11, 1, 3, 2]\n",
      "The Time Traveller alexisterio ucerieried aver aly maly teriderid te\n",
      "epoch: 4499, ppl: 3.702607598459373\n",
      "[1, 4, 12, 1, 15, 14, 13, 7, 22, 2, 10, 3, 2, 10, 1, 4, 3, 2, 10, 1, 4, 12, 1, 4, 1, 4, 1, 4, 12, 1, 4, 1, 4, 1, 4, 1, 4, 12, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4]\n",
      "The Time Traveller al cumoverter ater al a a al a a a al a a a a a a\n",
      "epoch: 4549, ppl: 3.619610210918269\n",
      "[1, 4, 12, 1, 15, 2, 1, 13, 7, 16, 14, 12, 12, 19, 7, 14, 12, 19, 7, 14, 12, 19, 1, 4, 6, 11, 1, 8, 2, 11, 1, 8, 7, 17, 7, 16, 7, 14, 12, 19, 1, 8, 2, 1, 8, 7, 14, 12, 19, 1]\n",
      "The Time Traveller al ce mofullyoulyouly and sed sowofouly se souly \n",
      "epoch: 4599, ppl: 3.655162934666845\n",
      "[1, 7, 6, 2, 1, 11, 1, 8, 7, 16, 1, 3, 9, 2, 1, 3, 9, 2, 1, 9, 2, 10, 2, 1, 20, 10, 2, 1, 11, 5, 13, 2, 1, 3, 10, 2, 1, 3, 10, 2, 1, 3, 5, 13, 2, 1, 3, 10, 2, 1]\n",
      "The Time Traveller one d sof the the here pre dime tre tre time tre \n",
      "epoch: 4649, ppl: 3.645721564787498\n",
      "[1, 4, 12, 17, 4, 19, 1, 1, 6, 7, 16, 10, 2, 12, 19, 1, 3, 9, 4, 13, 7, 9, 2, 1, 21, 2, 1, 21, 2, 1, 21, 2, 1, 21, 4, 10, 2, 18, 4, 20, 10, 2, 4, 20, 10, 2, 4, 6, 1, 3]\n",
      "The Time Traveller alway  nofrely thamohe be be be baregapreaprean t\n",
      "epoch: 4699, ppl: 4.244568672919888\n",
      "[1, 4, 1, 11, 7, 16, 1, 19, 7, 14, 1, 15, 7, 14, 10, 1, 13, 2, 1, 4, 6, 6, 3, 9, 2, 1, 3, 10, 2, 1, 11, 5, 8, 3, 9, 2, 1, 13, 4, 22, 2, 10, 2, 10, 2, 1, 20, 10, 2, 6]\n",
      "The Time Traveller a dof you cour me annthe tre disthe maverere pren\n",
      "epoch: 4749, ppl: 3.392610951997176\n",
      "[1, 4, 12, 2, 24, 5, 8, 3, 2, 10, 5, 8, 3, 2, 11, 1, 4, 15, 3, 2, 10, 5, 7, 6, 1, 4, 12, 19, 1, 4, 8, 1, 4, 8, 1, 3, 1, 3, 1, 3, 9, 2, 10, 5, 6, 18, 1, 3, 1, 3]\n",
      "The Time Traveller alexisteristed acterion aly as as t t thering t t\n",
      "epoch: 4799, ppl: 4.000347167753215\n",
      "[1, 4, 6, 7, 3, 12, 19, 1, 7, 14, 1, 4, 5, 6, 18, 1, 4, 1, 9, 4, 6, 18, 1, 13, 7, 10, 11, 1, 8, 5, 22, 2, 6, 1, 8, 4, 12, 12, 12, 19, 1, 18, 10, 2, 7, 6, 7, 6, 1, 20]\n",
      "The Time Traveller anotly ou aing a hang mord siven sallly greonon p\n",
      "epoch: 4849, ppl: 3.8830679771551275\n",
      "[2, 1, 4, 6, 7, 16, 1, 3, 9, 2, 10, 2, 1, 11, 5, 13, 2, 6, 8, 5, 7, 6, 8, 5, 7, 6, 8, 1, 14, 10, 2, 11, 1, 8, 5, 7, 6, 2, 11, 7, 6, 2, 11, 7, 6, 2, 11, 7, 6, 2]\n",
      "The Time Travellere anof there dimensionsions ured sionedonedonedone\n",
      "epoch: 4899, ppl: 3.5238806861619625\n",
      "[1, 13, 4, 6, 1, 9, 4, 8, 21, 8, 1, 12, 2, 4, 8, 1, 2, 24, 5, 8, 3, 1, 4, 12, 12, 12, 12, 19, 1, 6, 11, 1, 3, 9, 5, 8, 1, 10, 4, 6, 11, 1, 2, 15, 5, 8, 1, 11, 1, 11]\n",
      "The Time Traveller man hasbs leas exist alllly nd this rand ecis d d\n",
      "epoch: 4949, ppl: 3.5704622333704177\n",
      "[1, 11, 5, 13, 4, 6, 1, 9, 2, 11, 1, 9, 2, 6, 2, 22, 2, 6, 1, 9, 2, 22, 2, 6, 1, 9, 2, 22, 2, 6, 1, 9, 2, 22, 2, 22, 2, 6, 1, 9, 2, 6, 1, 9, 2, 6, 1, 9, 2, 6]\n",
      "The Time Traveller diman hed heneven heven heven heveven hen hen hen\n",
      "epoch: 4999, ppl: 4.172145111750433\n",
      "[18, 9, 1, 4, 11, 2, 1, 12, 2, 16, 3, 9, 2, 1, 3, 5, 13, 2, 10, 2, 1, 12, 11, 5, 15, 4, 12, 19, 1, 5, 8, 15, 4, 12, 19, 1, 3, 9, 5, 10, 2, 1, 5, 22, 2, 10, 2, 10, 2, 10]\n",
      "The Time Travellergh ade lefthe timere ldicaly iscaly thire ivererer\n",
      "[1, 4, 24, 4, 10, 2, 7, 6, 4, 12, 4, 16, 4, 10, 4, 10, 2, 4, 10, 2, 4, 20, 4, 10, 2, 4, 10, 4, 16, 4, 20, 14, 4, 10, 4, 20, 14, 4, 10, 7, 11, 7, 11, 7, 20, 14, 4, 10, 4, 10]\n",
      "The Time Traveller axareonalafarareareaparearafapuarapuarododopuarar\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "[<matplotlib.lines.Line2D at 0x2dc10290f50>]"
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiwAAAGdCAYAAAAxCSikAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjYsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvq6yFwwAAAAlwSFlzAAAPYQAAD2EBqD+naQAASahJREFUeJzt3Qd4VFX6x/E3hSS0hA6JdJFeRaqAIgiiomJn2QUV8a+ru7pYcVfFiivqrgrirg1dC4IKWBClF2nSO9JJ6C0JoSQhmf/znjCTmWRmMgOT3JnM9/M812l3JjeXOPObc95zToTNZrMJAABAEIu0+gAAAACKQmABAABBj8ACAACCHoEFAAAEPQILAAAIegQWAAAQ9AgsAAAg6BFYAABA0IuWUiA3N1f27dsnFStWlIiICKsPBwAA+EDnrj1x4oQkJSVJZGRk6Q8sGlbq1Klj9WEAAIDzkJycLLVr1y79gUVbVuy/cHx8vNWHAwAAfJCenm4aHOyf46U+sNi7gTSsEFgAAAgtvpRzUHQLAACCHoEFAAAEPQILAAAIegQWAAAQ9AgsAAAg6BFYAABA0COwAACAoEdgAQAAQY/AAgAAgh6BBQAABD0CCwAACHoEFgAAEPRKxeKHxeVsTq68PG2T2GwiT/VrKnFloqw+JAAAwhItLF7k2Gzy8a+7ZPyiXZKVk2v14QAAELYILF5ESP5y19rKAgAArEFg8SIiP6+IEFgAAAiNwDJq1Cjp0KGDVKxYUWrUqCE33XSTbNmyxWWfM2fOyIMPPihVq1aVChUqyC233CIHDx70+ro2m02effZZSUxMlLJly0rv3r1l69atYjXXvEJiAQAgJALLvHnzTBhZsmSJzJgxQ7Kzs6VPnz5y8uRJxz5/+9vf5Pvvv5dJkyaZ/fft2yc333yz19d97bXX5O2335b33ntPli5dKuXLl5e+ffua8GOlCKcmFrqEAACwToRNmzfO0+HDh01LiwaTHj16SFpamlSvXl2++OILufXWW80+mzdvlmbNmsnixYulc+fOhV5Df3xSUpI8+uij8thjj5n79HVq1qwp48ePlzvvvLPI40hPT5eEhATzvPj4eAmU3FybNHx6mrm+6pmrpXL5mIC9NgAA4S7dj8/vC6ph0R+gqlSpYi5XrFhhWl20S8euadOmUrduXRNY3Nm5c6ccOHDA5Tl68J06dfL4HCvQwAIAQAjOw5KbmyuPPPKIXH755dKyZUtznwaPmJgYqVSpksu+2lqij7ljv1/38fU5mZmZZnNOaMVddHsBDVEAAOACnXcLi9ayrF+/XiZMmCAlTYt/tRXGvtWpU6f4a1iK5ScAAIBiCywPPfSQ/PDDDzJnzhypXbu24/5atWpJVlaWpKamuuyvo4T0MXfs9xccSeTtOSNGjDDdUfYtOTlZihsNLAAAhEhg0W4RDSuTJ0+W2bNnS4MGDVweb9++vZQpU0ZmzZrluE+HPe/Zs0e6dOni9jX1NTSYOD9Hu3h0tJCn58TGxpriHOetuNgbWRjWDABAiAQW7Qb67LPPzCggnYtFa0x0O336tHlcu2eGDh0qw4cPN60vWoR79913m+DhPEJIC3E19Ni7XbQW5qWXXpLvvvtO1q1bJ4MHDzYjh3SeF6s5OoXIKwAAhEbR7bhx48zllVde6XL/xx9/LHfddZe5/q9//UsiIyPNhHFaGKvzqbz77rsu+2uri32EkXriiSfMXC733Xef6U7q1q2bTJ8+XeLi4sRqpo7FRvsKAAAhOw9LsCiueVhUo6enydlcmywZ0UtqJVgfoAAAKC1KbB6WcEANCwAA1iOw+Lhic+i3QwEAELoILEVxtLAAAACrEFgAAEDQI7D4OKy5FNQmAwAQsggsvhbdklcAALAMgcXHolsAAGAdAksRaGEBAMB6BBZfa1gYJwQAgGUILL5MzU8LCwAAliKw+NzCAgAArEJgKYqjhoXIAgCAVQgsRaCFBQAA6xFYikANCwAA1iOw+DisGQAAWIfA4jOaWAAAsAqBxee1hCw+EAAAwhiBxdcaFqsPBACAMEZgKQItLAAAWI/A4utaQrSxAABgGQJLkRjWDACA1QgsRWC1ZgAArEdgKQKrNQMAYD0CSxFoYQEAwHoEliJEONpYAACAVQgsRWBqfgAArEdg8RFdQgAAWIfAUgSKbgEAsB6Bxdep+ckrAABYhsDiI/IKAADWIbD4PKyZyAIAgFUILD6vJQQAAKxCYPFxHhYaWAAAsA6Bxed5WEgsAABYhcDi67Bm8goAAJYhsPg6rNnqAwEAIIwRWIpACwsAANYjsBQhrkyUuTyZedbqQwEAIGz5HVjmz58v/fv3l6SkJNNdMmXKFJfH9T532+jRoz2+5siRIwvt37RpUwkG8WWjzeUJAgsAAKETWE6ePClt2rSRsWPHun18//79LttHH31kAsgtt9zi9XVbtGjh8ryFCxdKMA1rPpOdY/WhAAAQtvKaD/zQr18/s3lSq1Ytl9tTp06Vnj17SsOGDb0fSHR0oecGg8U7jprL13/eIrdfVsfqwwEAICwVaw3LwYMH5ccff5ShQ4cWue/WrVtNN5MGm0GDBsmePXs87puZmSnp6ekuW3GJic47RYdOZBbbzwAAABYGlk8++UQqVqwoN998s9f9OnXqJOPHj5fp06fLuHHjZOfOndK9e3c5ceKE2/1HjRolCQkJjq1OneJr+fj4rg7mMjrSMYMcAAAoTYFF61e0tSQuLs7rftrFdNttt0nr1q2lb9++Mm3aNElNTZWJEye63X/EiBGSlpbm2JKTk4vpNxBpWL28uTyby7hmAABCpobFVwsWLJAtW7bIV1995fdzK1WqJI0bN5Zt27a5fTw2NtZsJSHqXMsKDSwAAJTCFpYPP/xQ2rdvb0YU+SsjI0O2b98uiYmJYrXoyLxTpA0subSyAAAQGoFFw8Tq1avNprTeRK87F8lqEeykSZPk3nvvdfsavXr1kjFjxjhuP/bYYzJv3jzZtWuXLFq0SAYMGCBRUVEycOBAsVpU/uqHksN0twAAhEaX0PLly80wZbvhw4ebyyFDhpjCWTVhwgSx2WweA4e2nhw5csRxOyUlxex79OhRqV69unTr1k2WLFlirlstKsopsOTa5NzEtwAAoARF2DRZhDht0dHRQlqAGx8fH9DXPp2VI82enW6ub3i+r5SPLbayHwAAwkq6H5/frCVUBKceIckN/WwHAEBIIrD4EViIKwAAWIPA4uNaQooGFgAArEFg8aOFhSYWAACsQWApQqRTYqGGBQAAaxBYikADCwAA1iOw+FN0SwsLAACWILAUIcIpsRBXAACwBoHFDzSwAABgDQKLD+yNLDbaWAAAsASBxY+RQrSwAABgDQKLD+xVLAQWAACsQWDxAV1CAABYi8Dix/T8tLAAAGANAosvHC0sAADACgQWv2pYiCwAAFiBwOIDRgkBAGAtAos/RbcEFgAALEFg8adLiCoWAAAsQWDxYz0hWlgAALAGgcWvFhYAAGAFAosvHDUsRBYAAKxAYPGjhSWXvAIAgCUILD6IjKRTCAAAKxFYfMDihwAAWIvA4s8oIasPBACAMEVg8QEtLAAAWIvA4s9Mt7SxAABgCQKLT/ISS26u1ccBAEB4IrD4wD5IiBYWAACsQWDxAYsfAgBgLQKLDyIcZbcAAMAKBBYf0MICAIC1CCw+yJ/nlsQCAIAVCCx+TBzHWkIAAFiDwOJXlxCJBQAAKxBY/Jo4DgAAWIHA4scoIRpYAAAIkcAyf/586d+/vyQlJZnajilTprg8ftddd5n7nbdrrrmmyNcdO3as1K9fX+Li4qRTp06ybNkyCbYWFtpYAAAIkcBy8uRJadOmjQkYnmhA2b9/v2P78ssvvb7mV199JcOHD5fnnntOVq5caV6/b9++cujQIQkGLH4IAIC1ov19Qr9+/czmTWxsrNSqVcvn13zzzTdl2LBhcvfdd5vb7733nvz444/y0UcfyVNPPSVWY5QQAAClsIZl7ty5UqNGDWnSpIk88MADcvToUY/7ZmVlyYoVK6R37975BxUZaW4vXrzY7XMyMzMlPT3dZSuZFhYSCwAApSKwaHfQp59+KrNmzZJ//vOfMm/ePNMik5OT43b/I0eOmMdq1qzpcr/ePnDggNvnjBo1ShISEhxbnTp1pDgxSggAgBDrEirKnXfe6bjeqlUrad26tVx88cWm1aVXr14B+RkjRowwNS922sJSnKHF3iVEAwsAAKV0WHPDhg2lWrVqsm3bNreP62NRUVFy8OBBl/v1tqc6GK2RiY+Pd9mKE1PzAwBQygNLSkqKqWFJTEx0+3hMTIy0b9/edCHZ5ebmmttdunSRoBrWTF4BACA0AktGRoasXr3abGrnzp3m+p49e8xjjz/+uCxZskR27dplQseNN94ojRo1MsOU7bRraMyYMY7b2r3z/vvvyyeffCKbNm0yhbo6fNo+aihYJo5jlBAAACFSw7J8+XLp2bOn47a9lmTIkCEybtw4Wbt2rQkeqampZnK5Pn36yIsvvmi6cey2b99uim3t7rjjDjl8+LA8++yzptC2bdu2Mn369EKFuFbJL7olsQAAYIUIWykYq6tFtzpaKC0trVjqWfq9tUA27U+XT+/pKD0aVw/46wMAEI7S/fj8Zi0hH1DCAgCAtQgs/nQJhX5jFAAAIYnA4gMmjgMAwFoEFj9GCdHCAgCANQgsfnUJWX0kAACEJwKLD5iaHwAAaxFYfMAoIQAArEVg8QGjhAAAsBaBxQe0sAAAYC0Ci181LEQWAACsQGDxp4WFvAIAgCUILD6ItLewWH0gAACEKQKLL5iHBQAASxFY/Cq6JbEAAGAFAosfw5pzySsAAFiCwOID1hICAMBaBBY/WlgAAIA1CCw+YPFDAACsRWDxa1gziQUAACsQWPxACwsAANYgsPgxNT+jhAAAsAaBxa+p+UksAABYgcDiT9Gt1QcCAECYIrD4wDGqmcQCAIAlCCw+YJQQAADWIrD4gHlYAACwFoHFJ4wSAgDASgQWv4puSSwAAFiBwOLXsGaLDwQAgDBFYPEBw5oBALAWgcWPUUI0sQAAYA0Ciw9oYQEAwFoEFh9E2EcJMUwIAABLEFh8QQsLAACWIrD4gFFCAABYi8DigwjH1PwAAMAKBBYfRDoGCRFZAAAIicAyf/586d+/vyQlJZmWhylTpjgey87OlieffFJatWol5cuXN/sMHjxY9u3b5/U1R44caV7LeWvatKkE3WrNAAAgNALLyZMnpU2bNjJ27NhCj506dUpWrlwpzzzzjLn89ttvZcuWLXLDDTcU+botWrSQ/fv3O7aFCxdKsHUJ5dLCAgCAJaL9fUK/fv3M5k5CQoLMmDHD5b4xY8ZIx44dZc+ePVK3bl3PBxIdLbVq1ZJgRNEtAAClvIYlLS3NtFBUqlTJ635bt241XUgNGzaUQYMGmYATNBjWDABAaLWw+OPMmTOmpmXgwIESHx/vcb9OnTrJ+PHjpUmTJqY76Pnnn5fu3bvL+vXrpWLFioX2z8zMNJtdenq6lMTEcbSwAABQygKLFuDefvvtZmTNuHHjvO7r3MXUunVrE2Dq1asnEydOlKFDhxbaf9SoUSbUlPzU/CQWAABKTZeQPazs3r3b1LR4a11xR7uPGjduLNu2bXP7+IgRI0xXk31LTk6WkhnWXKw/BgAAlFRgsYcVrUmZOXOmVK1a1e/XyMjIkO3bt0tiYqLbx2NjY00Ict5KpkuIxAIAQEgEFg0Tq1evNpvauXOnua5FshpWbr31Vlm+fLl8/vnnkpOTIwcOHDBbVlaW4zV69eplRg/ZPfbYYzJv3jzZtWuXLFq0SAYMGCBRUVGm9iUYOLqEyCsAAIRGDYuGkZ49ezpuDx8+3FwOGTLETAD33Xffmdtt27Z1ed6cOXPkyiuvNNe19eTIkSOOx1JSUkw4OXr0qFSvXl26desmS5YsMdeDQX4NCwAACInAoqHDW9eIL90m2pLibMKECRLcGCUEAICVWEvIB4wSAgDAWgQWP0YJ5ZJXAACwBIHFB1H2tYRILAAAWILA4oMyUXmnKTs31+pDAQAgLBFYfBBtDyxnaWEBAMAKBBYfxETldQmdpYUFAABLEFj8aWHJoYUFAAArEFh8EH2uhSU7hxYWAACsQGDxQcy5FpazBBYAACxBYPHB2XPDmRduO2r1oQAAEJYILD7Yc+yUuTySkWn1oQAAEJYILD64snHeIoxxZThdAABYgU9gH1QpH2Mua8XHWX0oAACEJQKLD+LKRJnLM9kU3QIAYAUCix+B5XR2jtWHAgBAWCKw+MBeu0JgAQDAGgQWH5Q918KSdTaXFZsBALAAgcUHZWPyAos6c5ZWFgAAShqBxQdx0fmB5XQWgQUAgJJGYPFBZGSExETnnaozZxkpBABASSOw+CjuXGChhQUAgJJHYPGzjuUMI4UAAChxBBYfVYwrYy6Pn8qy+lAAAAg7BBYfNahW3lxuP5Rh9aEAABB2CCx+Bpbk46etPhQAAMIOgcVHiQl5Cx/uTyOwAABQ0ggsPkqqVNZc7k09Y/WhAAAQdggsPkpKyAssa5JTxWZjen4AAEoSgcVHNeJjHddnbDxo6bEAABBuCCw+qlQub1izWpOSaumxAAAQbggsPop1Wk8o/tycLAAAoGQQWPzQPDHeXB49yeRxAACUJAKLHxpWz5uL5ciJTKsPBQCAsEJg8UP9qnmB5dtVe60+FAAAwgqBxQ+sIwQAgDUILH4Y1r2h4/rpLFZtBgCgpBBY/FCnSjnH9XfnbrP0WAAACCcEFj9ERUY4rr8zm8ACAEDQBpb58+dL//79JSkpSSIiImTKlCkuj+u09c8++6wkJiZK2bJlpXfv3rJ169YiX3fs2LFSv359iYuLk06dOsmyZcv8PTQAAFBK+R1YTp48KW3atDEBw53XXntN3n77bXnvvfdk6dKlUr58eenbt6+cOeN50cCvvvpKhg8fLs8995ysXLnSvL4+59ChQxLMth/OsPoQAAAICxG2C1jJT1tYJk+eLDfddJO5rS+lLS+PPvqoPPbYY+a+tLQ0qVmzpowfP17uvPNOt6+jLSodOnSQMWPGmNu5ublSp04d+ctf/iJPPfVUkceRnp4uCQkJ5mfFx+dN7lZcXv95i4yZk9cd1LtZTflgyGXF+vMAACit/Pn8DmgNy86dO+XAgQOmG8hOD0QDyeLFi90+JysrS1asWOHynMjISHPb03MyMzPNL+m8lZTH+jZxXJ+9mUUQAQAoCQENLBpWlLaoONPb9scKOnLkiOTk5Pj1nFGjRpkgZN+0NcYKd3Soa8nPBQAg3ITkKKERI0aY5iP7lpycXKI//4ErLzaXcWVC8vQBABByAvqJW6tWLXN58KBrV4netj9WULVq1SQqKsqv58TGxpq+LuetJMVG5522/y3eXaI/FwCAcBXQwNKgQQMTMmbNmuW4T+tLdLRQly5d3D4nJiZG2rdv7/IcLbrV256eY7VF246ay7O5NlNoDAAAgiywZGRkyOrVq81mL7TV63v27DGjhh555BF56aWX5LvvvpN169bJ4MGDzcgh+0gi1atXL8eIIKVDmt9//3355JNPZNOmTfLAAw+Y4dN33323BKPODas4rmfl5Fp6LAAAhINof5+wfPly6dmzp0vYUEOGDDFDl5944gkTNu677z5JTU2Vbt26yfTp082EcHbbt283xbZ2d9xxhxw+fNhMOKeFtm3btjXPKViIGyzuv/JiefvcTLdnsnMlNjrK6kMCAKBUu6B5WIJFSc7DovSUNRgxzVxf+GRPqV05f40hAAAQ5POwhAvt+rK7Ycyvlh4LAADhgMBygY6dzLL6EAAAKPUILAFwKuus1YcAAECpRmAJgGnr3M/ICwAAAoPAcp7qVskvtK0QyyghAACKE4HlPP3ytx6O6/d/ttLSYwEAoLQjsJynuDK0qgAAUFIILAAAIOgRWAJkx+EMqw8BAIBSi8ASID9vcF1tGgAABA6B5QI8f0MLx/V/Tt9s6bEAAFCaEVguQP82SS63mfUWAIDiQWC5AFXKx7jcXrQ9fwVqAAAQOASWAMrJDfmFrwEACEoElgA6k51j9SEAAFAqEVguUPPEeMf1WgllLT0WAABKKwLLBfp0aEfH9Q8W7LD0WAAAKK0ILBeoWoVYx/UFWym6BQCgOBBYAozCWwAAAo/AEmDfrdlr9SEAAFDqEFgCLPnYaasPAQCAUofAEmCzNx+y+hAAACh1CCwB8MqAVo7rq5NTLT0WAABKIwJLADSqUcHqQwAAoFQjsARAdFSEy+2Xftho2bEAAFAaEVgCoEyk62n8YOFOy44FAIDSiMASABGuDSwAACDACCwAACDoEVgCINfG7LYAABQnAksARAh9QgAAFCcCSwA0T4q3+hAAACjVCCwBEBUZIT2bVHe57/CJTMuOBwCA0obAEiAv3NjS5XaHl2dadiwAAJQ2BJYAqVOlXKH7jmTQygIAQCAQWIpRTi6jhwAACAQCSzH6ekWKHEo/Y/VhAAAQ8ggsxWj0z1uk4yuzJDsn1+pDAQAgpBFYAqj7JdXc3n86O6fEjwUAgNIk4IGlfv36EhERUWh78MEH3e4/fvz4QvvGxcVJKHplQCu39zMRLgAAFyZaAuy3336TnJz8FoX169fL1VdfLbfddpvH58THx8uWLVsctzW0lJaRQiqX4lsAAIIrsFSv7jqB2quvvioXX3yxXHHFFR6fowGlVq1aUlqdJbAAABC8NSxZWVny2WefyT333OO11SQjI0Pq1asnderUkRtvvFE2bNjg9XUzMzMlPT3dZQtmLI4IAEAQB5YpU6ZIamqq3HXXXR73adKkiXz00UcydepUE25yc3Ola9eukpKS4vE5o0aNkoSEBMemQSdY/PCXboXuo4UFAIALE2GzFd/X/759+0pMTIx8//33Pj8nOztbmjVrJgMHDpQXX3zRYwuLbnbawqKhJS0tzdTDWK3+Uz+63L6vR0N5+tpmlh0PAADBSD+/teHBl8/vgNew2O3evVtmzpwp3377rV/PK1OmjLRr1062bdvmcZ/Y2FizBavoyAiXVpX/zt8hf+hYV+pXK2/pcQEAEKqKrUvo448/lho1ash1113n1/N0hNG6deskMTFRQtWsRwsXGPf993xLjgUAgNKgWAKL1qFoYBkyZIhER7s24gwePFhGjBjhuP3CCy/IL7/8Ijt27JCVK1fKH//4R9M6c++990qoqutmeHPm2VwZ9dMmGTvHc8sRAAAowS4h7Qras2ePGR1UkN4fGZmfk44fPy7Dhg2TAwcOSOXKlaV9+/ayaNEiad68uYQqTyOi/jNvh7n885UXh+xcMwAAlLqi22As2rGq8NbZtpf7SXQUqyIAAMJbuh+f33xqWiAn9DMiAAAlisBigVwWbwYAwC8EFgvM3XJIth06YfVhAAAQMoptHhZ49sDnK83lrlf9G/INAEC4ooWlmDAICACAwCGwFJPlf+9d5D6Lth0pkWMBACDUEViKSdUKsTJzeA+v+3y4cGeJHQ8AAKGMwFKMGtWo6PVxVnEGAMA3BJZi1uqiBI+Pzfv9sExfv1/2HD1VoscEAECoIbAUs17Nanh9/P7PVkqP0XPkTHZOiR1TaXIq66wM/2q1zNh40OpDAQAUIwJLkEg/nV3kPpv2p8vkVSlSClZTCBhdn+nbVXtl2KfLrT4UAEAxYh6WYOHDMOh+by0wl5XKxkjPpt5bbsLFoRNnrD4EAEAJoIUlSPy67YgkH/OtlmXj/vRiPx4AAIIJLSzFrGJcGZ/2+9tXa3ye/ZYuIWfM0AcA4YAWlmI2qFNdqVe1nNWHUWoxozAAhAcCSzGLKxMlcx69Ui6tW8mn/TPP5o0WOpKRKTM3HpQcN3O10MACAAg3BJYSEBkZId880NWnfZv8Y7qM/G6DKbC999Pl8vnS3YX2Ia/ko4EFAMIDgaWERPjRdzF+0S45fCLTXLfPL+K87pBzC0turk0enbhGPliwI5CHCwBAUKHoNgSCzvq9afKHD5Y67vt08S7T8vLkNU1ldXKqfLMyxdx/b/eGEm6oYQGA8EBgKUF3dqgjE35L9us5kREi6/amudx39GSWuXx0Ut7IonAWQacQAIQFuoRK0FP9mkp8nH8ZMeJcaAEAIJwRWEpQpXIxsnZkX7+eExkRYTZ/HUo/IxOXJ8ve1NOyLsW1haY0oUsIAMIDXUIWaJEULxv2+TZb7azNh+TaVol+/4wbxvwqB9Lzp63//qFu0qq255WjAQAIZrSwWKB8rH85MdKPf6X35m2Xr37b4xJW1JIdR11u6zwvL/2wUbYePOHXsQAAYAUCiwVevqmlX3UpvnYJ9Xx9rrz602Z58pt1hR47k50j09btl4zMs+b2E1+vlQ8W7nQsqBiq6BECgPBAYLHAJTUryvZXrpXRt7b2af9j50YFFWXnkZMeH3tjxu/y589XykNfrDS316akmsuzbmbSBQAg2BBYLJxf5db2tX3a9/nvNwbs587dcljCdUI+AEDoIrBYKFg+bLXuZdLyZNNC83//W+5ofQEAIFgwSshiUx+8XG4c+2uJ/swf1+6XIxn53Uxa96Ia1agg2w5lyM8bDsquV69z+1ybzRY0QQsAED5oYbFYmzq+reIcSA+eq2MpaPdR9zUwB9LOmFWjdV6Xy16aKWuSU01wCQZkJwAID7SwBIFnrm8uL/4QuDqV85WdUziELNh6WP704TK5skl1R/2LtgjFREfKda0S5dnrm0vl8jFiFabmB4DwQAtLEBjarYEEq48W7nRbrJt1Nlcmr9ord4//zaIjAwCEEwJLkBj7h0slFOlq0VaiSwgAwgOBJUhc1zrRY6FrqNC6lu/W7JO5Ww7JxN+SJTsn1+pDAgCUEtSwwK2TmWfNLLh7jp3yaf/jJ7Ok3YszXO47dipL7r/iYtm0P90U7ba8KPBrGdHAAgDhgRYWuDXw/SU+hxU1ftGuQvf9uu2IaWXR4HP9OwtNCPK1pebhCavkzV+2FFqBuuCsv3QJAUB4ILAEmVcGtJJgsDYlza/9PQ1y1jWM7NLPZJvL01k58vnS3bI/7bTHupipq/fJ27O3Oe47lXVWOr4ySy59cYZjSLXOJ/P+gryi4AuVeTaHLiwACGJ0CQWZP3SqK8dOZsrrv/wuoWT7oYxC9+XabC5BZvP+E5J6Klu++i3ZtMhUqxArj/dtLB8t3CUfDLlM6lQpZ/ZbuvNYodfan5a/+rR2L0VHRXicT8ZfOuLp0hdmSMW4MrJ4xFUBmRhP57SplRAnsdFRATlGAAh3AW9hGTlypHnDd96aNm3q9TmTJk0y+8TFxUmrVq1k2rRpEs4e7NlIFjzRU27zca0hqx0+kSk/rttf6H4NFs7zy+kQaO0e+nnDAXP7SEamWVl6y8ETLvPQ2GfeVe4mqMvxMGmdtricD+36OpmVIwfSz8iFrAU5ds42s8SBdoVdMXqu3Dpu8fm/GACg+LuEWrRoIfv373dsCxcu9LjvokWLZODAgTJ06FBZtWqV3HTTTWZbv369hCsNedraoK0IoaDDyzPd3p+bK7Jqz3GvrSV2v2w8KJ8u3uXTZHaeJtnVFhf7vDHipYtKu6NSjp9yaQnKf233L67ha9uhEx4f33LghIz+eYs8/vVaMyOwWrfXt241fU1t5QEAlHBgiY6Ollq1ajm2atWqedz3rbfekmuuuUYef/xxadasmbz44oty6aWXypgxYyTcRUWGRmDxZMeRk7Lexw9t9ezUDYUCwdncXEk+dsolpLwybZPH13ihwIzBmw+ky3NT15tWIPXopDXy98nrpe+/5ruEEbtlbrqjcnNt8o8p66X3m/Nl3Lztbn9u6inXYmA7DWzOdTzu/PnzldJq5M9yNCPvGAEAJRRYtm7dKklJSdKwYUMZNGiQ7Nmzx+O+ixcvlt69e7vc17dvX3O/J5mZmZKenu6ylUZRRdRSXFSprAQz7fLxtxZn2KfLXW4//e066f7aHOn95jzHfZ8u3i0b93n+N9eAoyOK1DX/XiCfLN4tT3y9xhTV2ruNtAvIvo9zC8sfPljq8lo6r0zLkT/Ll8vy/ob/PWOr25/pHLOc/9UGvLuo0O9U0E/rD0jm2VyZsnqf1/0AwF/6Zc2fL45hFVg6deok48ePl+nTp8u4ceNk586d0r17dzlx4oTb/Q8cOCA1a9Z0uU9v6/2ejBo1ShISEhxbnTp1pDSqWiG20H2P9WnsuK7r+9jpSsulwcxNh1xue/oQf2e2++CgNODoiKIJ50KGmrPlcKFh1W/OyAtT3tZx/OuXq+RUllMLiYcM6e01Fmw9Yt40PLXCAEBxdtlf/85C+f2g+8/gsB4l1K9fP8f11q1bmwBTr149mThxoqlTCYQRI0bI8OHDHbe1haU0hhZdY2jlnuNybctE6dG4uhl50qF+Ffn9YIY0qFZe7uvRUBpWryB9W9SU2pXLSZvnf5G003lDh0s7bZUoylPfrnO5/ZcvV7ncPp2dI9+uTJHhE9dc8PG4jofyXOezc9S1pkZpx+EMU2j80FWX5L9GkKyADaD0WbbzmDSuWVFCWbEPa65UqZI0btxYtm3Ln1PDmda4HDx40OU+va33exIbG2u20q58bLSMv7uj47YOk1VvD2znduHE+lXLyRo/508JJ9rS4UxbTtyFlRW7j5tAWOU8V6H2Fjs0k2hPn3YTbT980rT8FGX6+gMyZs5WeevOdnJx9dLRkgagZOWWgi9ExT5xXEZGhmzfvl0SExPdPt6lSxeZNWuWy30zZsww98M/Y0J0AUWrzNjoGpTtbhm3SK54bY5fr6WT4fnC/pahYaUgrWOxFwc7u/+zFbJ+b7oM/2q1ua2jqe7/3wpGFgHwmfPgglAV8MDy2GOPybx582TXrl1myPKAAQMkKirKDF1WgwcPNl06dg8//LCpd3njjTdk8+bNZh6X5cuXy0MPPRToQyv1dCj0HzvXddye89iVMuvRKyw9plB1wsMyAvaQoPUxQz5aZupiftt1TIZ+kl9Y6+2LjLduHx0WrV1HzkOundm7+3Q01fQNB2Tq6r0SLm+wOtOxjg7TLtLiRLccSgubzSavTc+f04rA4kZKSooJJ02aNJHbb79dqlatKkuWLJHq1fMKRHXEkM7NYte1a1f54osv5L///a+0adNGvv76a5kyZYq0bNky0IcWFjo2qOq4rt0adCGcP/2Q9FYfM+/3w/Lhwp0ubwr2kUWeuJtgr6C5Ww6bEVYFh0MXfL/JKBCqXv5xo9z+3uISX2Jg26EMszq3Dv8OFK3Xavv8LzL657xz+9bMrfLf+Tvk5ncXSXEZNW2TdB41y5x7INSt3HNc3p2bPw3DSz9ukmemhPb8ZgGvYZkwYYLXx+fOnVvovttuu81suHD9W+d1vbV2Whn5nssbyEe/7pQb2yaZNXrgmxEFinbdfQvXWXs3eBliXdDDE1a77Q5ydjD9jFz20kypXK6MrHq2j8990PZ1lWZvPiQtkuJl+FdrpHaVsvLD2v3Ss0l1GTeovUT6OLfP3tTTUrNirJw4c1YGvPur3ND2Ihl+df4INWeOIecRIt0vqSaJCWXdtkz9tH6/dLm4qtSomFeL5Y2O4NJWrrFztsvjfZuWyAiH/8zfYS7fX7BDnrqmaUCWaACskn668Beu/y3ZLc/f0MLn94Fgw+KHpYy+yd7QJknqVyvvuG/EtU1lwn2d5Z+3tPbpNXo1rVGMRxg6PIW7BiPyl47wJ6zYvT3L85Bs9c65RR+Pn8p2abUomFc+WLDTbR3Lc1M3mGGMy3Ydk29X7jX7/LzhoKxJSXXso7UyMzcedNtMvGjbEbn81dnyxw+XygcLd8iuo6ccx/zZkt3yk4dWoie+XitdRs2WO/6z2LT2OAe7MXO2mbB205hfxRfe3k51+QNfadDZc9T3VcfVf+btkG7/nMMw9FJGW0S1G1fDeDiI8DQFg4QuAksYKBMVKZ0bVpW4MkUvxNe7WQ15aUBLmfznriVybPCuyTM/eayv0Dfexv/If9xO10TSRSYL0knstPZGQ1Cff82Tez9dLhN+Kzyp42dLd5vLJTuOyVmnQLPzyEkz4+8Dn3tfdFIXr9TWnl+3HS1U4LzPzbIMGgz6v7NQPliQ18KhvLVu6PIHB869zq4jJ81SCO6Cl75un3/Nlx6j/Sugtp9bDWBFzVKsNuxLM11i9n8fLcA+fjJL5mw+JE9PXuexJilQNJD+4f0ljnmF/C0E94XWav358xVeuzv13+BsEK94rmFFQ8tT36yVcJ4pPfs8/o10NftBHywx66VZidWaw8z0R7rL7qOn5JIaFeSqN/Jnj1VDutST52/Mqx1y16yPkue8llIgSkS09mbSihTTeqN+WLNfBnao69JEHOHcvuH0M3UVcX+kns7yuStG113S7d7uDc8dg3fpZ7LNMP8rX8/rYs7MzpE/danvsk/Kcd+/SS/ZkR+unNe3GvzhMpl4v/cRi9e9nbdWWqVyZaRPi1rS7sVf5Ex2/oeCtkg5d+0FmnZLLtp+1Gz2bjut/dHutC/u7SRdG3leGqUoOl+QTgeg3ZTT1h0wW5VyMXJ5o6qFQqW26mn9z69PXiUx0cH7XVh/l2CnIXR/2mmpVzW/pTxQM6XrrN13X54/HYYvvl+z33wB0U0X57VK8P5VoVg0rRUvfVvUMgW5NSq6zmVT8PNw2l+7yws3tpAf/tLNLANw+2WhsXp0aaU1LO6as8fN3W66cHyl88zYLd5xVG59r0Ahq4+z+eqHU8HZg5099MUq2bQ/vdBLakvErE0HZej430zXlLuwUJSCBcfPf++6hpS/7vzvErf3a7eathroCC3tKuv8yiyZvfmgxwUwlXNYUfZwqNJOZctt7y0ytQTe6LfgUT9tMit/F8Vdt6CGFee1tbS1xdMIqP/O3y49XptjWooKFk7rl5rnvtvgUrypXYXuWlr031r/Pb3VGy3dcVQenbjGtEAFwvmMfPFUCqbnJ1jCjJ5jXfHd09+aLzy1UuoX1vNpYQkGBJYwpX/Mvz51lcz4Ww/Hfdrq4qx5UrwM7lJfWl6UYPZ97dY2FhwpnOdpcRdM/jl98wX1y6/ck1/b4itdqFELg3WdJW/6vbVA7vp4mWw8F1xUuxdnmGHgszYfMh+oq9z9/CKaWHS0kPOHnnZdefugtH+w6TpTC89NIKgfzjr78btzvTdz3/reYjOLtK4zpd1t94zPG8L+/vwdjpW5lS+FjO/N3y6/7Tpe5GgN/RastTSDPlhqQo59NJbWEjmHNQ1TU5yGt+scQodOnHH5cNZ1ZJo9O910T7nzyrTNsufYKdNS1PDp/Posf+Ywcg5D3orD7/jvEvlmZYr8bWLenEK6Jthjk9a4HZmlLUdXjJ4jq5Pd/32O+HatXPbSDDnmZ/gpeHQanjUYau1Yp1dmyXseFjgtSfZFWL9Y6nkdvvPtEjqfWvLIIClAJ7CEeW3LJTUrymdDO8mT1zSVOzrkz+GC4FPcyy7oUOK1KakuWcF5PhrnN3p7Aa8v05boMG1nzvU1WoPiTJdK0N/TpVvKTauF0i4Qdy0c7lz89DQzbFnXmdJvrzp3jrbsfL9mn7w2fYvX43f3gakB8eVpm0ydi53Oo7PPS3Bcl5JmQocvnL8Fj1+0yzEaS+tU9Pdwfsx5BmdtPfvnT1tclozQrhr15bJkR7h445ctZoXwB78oXI/kS7FxwX92578DDYcakuxBS4ue9T7nUKN/EyfOZMu1by+Qr1ekmBqcgv7vfyvMedCWOA02Bc+d/j7aeqV1WNqCtP2wb+dWX8f+/5IGJQ3PGgztNUCv/uQ6TUFB+lxfJ2383+Jd8s2KFJe/Gw3NvtKArK1W936y3Gv9UEFaN/V3DwHV/u+i/7/cOm6RLNpedCtecMQValggIt0uqWY2f1SIjTZDZ7XA0m5gxzqON0WE3pwN7uY4cf6G5/yh5FxbcyG0bsWZLpWg9Q860s2Zdl0VRbtRtMtB15n6c8+LPQ5bto806uQ0Z5G/PHXBdfVwv37gzNxUuHlfP8jdfRN2visn1/XD8fOle0y3rq4vVnC5CaVBwE7XHSto7u+HHSPR7KuXO5vwW7Lcf0Xh8+eiwD+/c6uKBkkNb9UqxMiz/VuYBUT7NK8pN1/q2qXsPLzf3XHaHT2ZZYKN81pcLodiE7nunQWy4/BJ+XJYZ0k+fsp8KA/rkVcP5Y62mGm9z7WtPC8B4/ozbCYwVK8YK394f6nUqVJWFjxxldfnaPfSM1M3mOsD2l1kTpn972bjC32lXEzRH786w7V9HbKZmw66/H+hx6R/042qV5DezV0XENYV7bd6CMffrtorr97S2hQhayDS32fxiKtkwe9H5MZ2SRIbXXhwBi0sCGla8PjV/3Ux/+ON+UM7mTm8h4y6OX/Y9HWtEgvVyBSnKxrnr1wN//kyIduynfmh4ZNz3/qLg3571W/ddp768f92bqkC524h7XLQVoeOL8/yWl8zcXmKPDrpwhe99JW7sLImOVVaPDfdMTpKi3P1d9LWAucPZdu5Ljhngz9aJqv2HHfbvH/IzfIOdlsPnpC7P/7N67HqGlva+qRdNd4mQNQPcA0G+oHa6O/5o9V+2ZC3MOmRjCxTH2Pu23jQLDHhzes/e2/pUu5KVvSDW8OK+mHtPtPqpa1fBVvvCtIWFV8nNp6z5ZAZlq8f7ir52GmXVsHur82WzQfyuz21hcnesqVybDaX0Tn2aQV0CgB/RpHVf+pHR5emhnhtDdLRfuKmm8ubrq/OMmHFuev2iW/WyphzQbagIMkrBBb4J6FsGXPZpWHet1P9lnB96yRpVMN1FdCzublmaQAt7nVeLqA4rPhHb9O9ZXdp3UrF+vPC1eu//O6xO6Y4veH0c51lFRieqd/qnWnXTzB78hsdNp1rZiBVOlx88qq9ZpJH52JSbYVq/1Let+yCw9Q1XBTkqebD/s27SDabKUJ2Do3uaMvJGzO2FFr/ynmEUMGuPdcf45oWdK4eLTTW312H0Lsz9JPfvIYYbX2ys3f76ND0j3/Nm1SxIOfaqoK0tkmXg9D6lg17Pe+nrYIaYHSiRrVi9zETVpzPi/5O9q49+zFr0NAW6oITVHqqNXL+u9YuvT99uEzc0YBf1OSUGibdddPqsG93gqWFhS4h+OXHv3aTXzYclDs61CnyW7KuNj370SvMt8V9qWfMDKzePHN9c0k7lSVt6lSSv09eLz2bVi/UxaRBqWD3QNUKsRITnf8/1DcPdHWZ3A2hzdfJ+Qp+cAaq26q4bHaquXFuXSgYvArWALkrzvRVUaOT1NsevmW7cyi9cGuOcw7xNrpEJz4sSAuNmyfGm9YMd/RcaN2Fc4uqpyJf/dKk/j1zq8dCWk8/R0c82WubdEkIT3Reo4K/6y3jFrsN1861MfO2HHL7d6uF4L4U2r5T4N9Ia5GualJDbmlf21EUfj50fiMNjX/sVM+sTWcXHUVgQQiqXbmc3NOt6DH8rc4tDWBv2v7Pn9qbZuZJy1MKFY+9eFNLWb0nVQZ1quuY3K5nkxpm1MULN7aU46eyTBO/eu9P7eXVnzYVCjLRkU7f6iIiJCkhzjFJ2cpnrjavoc3Gw841n2pf90A3hX6AFbR1IRRt2O9ag6ROO0225602xdO6Wtp95M3y3cfNZqeBxJ2z5wLr+Yz68VT/UbCFyN46prRVw9PEavZjsRvpNAxfg6sWFTdLrFio1dBXP67dbzant8Hzot2JGhp1+2uvS2Tx9iPmPfn2y/K+oDap6dqSXtIILAgoHSY9c9Mhufty10m8tMum+yXVzda+XmUzr0v9auWkYbUKJpj8qXM9l/3tQ0T1ebr2zNaX+5n/6cvGRJlaGZ3wzHkEy+N9m8iCrYfNMGz141+7mxlRtSWoSvkYsznPsaBr2gC4MM61HBeyXIUzd4XA5+PTJbul07mua39pd1dR9ItXwZ6Sgq1jds7LYrhjLyrW9cMuxN/OdUsFgvMSIvbi7i0lsKaXNwQWBJQOk9bNmyFdXcOMLzS4OK8soC022gc8sn8Lc1ubL1f842pH0KlcPkY+GHKZy2tULhfj8fU1RMVERfo0GgVA8NPgM/YPxff62p0UV8a3Jo2iCp3dTTKIwggsCEk63fi6kX1dhoQWNXFXs8R4eaT3JVIrPm+14CkPXi6PTFglr9zcSrpeXM2MaCCwAKWH1rsUJ3fzA6H4RNg8zdccQtLT0yUhIUHS0tIkPj7e6sNBiNKiuX9MXi+9mtWQ+z/zvsAfAISjXa9eZ9nnN8OagXN0wqTRt7WRa1omOu4b2NH3IdnzHr/SzD8DAAg8AgvgRWJCXveRc62LvTiub4v82SV1xkxdWXXsoEsLvYbOS9GxfhVzfXAX1+Jib4p7/hoACCXUsABetCswCd2k/+simw6km1WvdZG5g+mZclfX+nJTu4sc+7z3x/Yus3pueL6vKRrWuWk0vPg0eZeI/OO65vLZkvNf/AwAShMCC+DG/Md7ytZDJ8ww7IbVysuOczNvamFvi6S8OWYSE8qawt2CrmlZS966s61jUir7LLzOM4Da559xXrW3d7MaclfXBmY+hkrlYkxBsc5No6vzTl2dN3dNzybVZY6XicQAoLQisABu1K1azmzqH9c3M7NHdmvk+wKRukiZTpF+cfUKhR577ZbWZm2S29rXdgksHwzpUGjflwe0Mpcj+jUzK8vqZg8s2g2VlFDWDOl+7ru8RdYCRYd4+zuJlbZGrdrjfb6JYHdd68SAzQMCBIvul1Rzu1BmqGGUEFAE/V9kTUqaaflwt5LphdiwL00mr9wrf7nqEknwYdKo7YczpNcb8wpV61/1xlzHAnAXSif9e6RXY7OGjfN6PB8MvsysCqsLsLmr9Vk8opdZpE+DTu835slJN+vc6EKZe4+fllFO05Tb/fWqRi7Twr90U0v5h1Ogu7h6+SLXSLkQemy6LpZy9zsCoWrJiF5mev2nvlnndiHOUBklRAsLUASd6r9tneJZUFG7l+xdTL7QFpv3/nipVK/oWgz81X1d5N8zf3dZ/M2bCrHRpqvJ2a9PXSVVy8c4lkfodolri1LBJezVk9c0laHdGkiZc2uN6LpOatrD3eWK0XML7d+0VkW5skmNQoFF58bRCQW/Wp5s6oLUHzvXk+jICHP74d6XmPvO5uS6rAzsSXxctKSf8bxas69vwhVjo81U87oSdEEtL4qXYd0bmunZC65jFAr6t0mSOzvUkUEfBPcikbhwtc4NHihiqqqgxyghIMTosGv7aCW76hVjTffRw70ukbpVyskPf+nmeOz61q5Dra9uXlPWP9+30Ovqcgn2sFLQwI75i13OfexKx3Wds0Zrc+xrRtnpiKnPhnaScjGur6eresc51fLoz9R6ocUjrjJh58MhHcw6VPpcdWfHuo6woqKjIuW+Hg0dtz++q4NMur9LoeNd81wfl0kFnT1/QwvTZeeLJ/s1lRZJhb/1aXfc9w91kxvbXiTLnu4la57t49Prabi7ULdcWvuCnv/VfZ1l0wvXyNt3tpXLG1UzK6qXhOFXNy6RnwPPRlzbTC7E5Y2sXdKEFhagFPnb1Y3NZh8WvfVghvz7jrbyg1Ndhi51X3AByI/vLlw/Y9/n25Up8lS//De6+tXKy6f3dDQruzb2sgyDttDobMTakqOr2tpHUmnosJv8565S49zMw6rlRQnyvVPYcufe7g0cK+j2aFzdBBPnImdtDdMA9fptrR1rq/zrjjaSULaMJJSNMWFPh5drC8OY2VvlaTdv4gue6CnLdx+TG9pcJF0vrip/nbBK1u/NWyOnU4Mq8u6g9o599Wd56s57+tqmklSprDz//Ub58S/dzH6dG1aVOlXKypLtR10WwXNHf6/thzIcXWWtayfIG7e3kW9Wpnh9njlP3RqY302zpB77iTPZZm2dgkHO+bYO13eeHl5/3hN9mzq6BhvXrOB1QUM7PWeLth91BMR+rWqZNcF+P3jC/C3OHH6FPDt1vdlH/8bemvW73HxpbSkfE21WHka+G9smOYruL5SGU3tr4vl0e75xW1uxEjUsQBhwfnPSD3kNHM5L2he1rEGgrdxzXE6cOStXNK5+Xs/Xt62CrTpf/bZHpqzaZ4aVazDQYPboxNUmwNx1+YW3bPz58xUybd0B+XDIZdKrWeHusdv/s1iW7TzmV3//nM2H5O7x7teZWfTUVSbs6HD4qav3mqCjLVL6b5Vy/JScysqRPv+aX+h5utBnzfg4mfrg5YVGprlzzb/nmxWDlY56u2nsr+b6ywNayvWtksy5PH4yS/amnjatTQ1GTPP4WsO6N5D7r7hYdh45Kbe+t7jQOdB/N+1iKxfj+bsy9UP5dMVkbZnS8/bjuv3y0Bd5izIOaHeR/P26ZvLmjN/li6V7TH2Z0vXVCnr2+uZyj5uWvfM5z4GuX1HMdAvAxcT/y+82ycl1Hf1T0mFFXVq38nmHFVUwrKg7OtSVL+/r7Gjt0JaDf9/ZLiBhRY0ZeKnpunIXVvIeb+fSXeWLnk1ryGUFuvfsNKwoDR23XVbHjAaz/1vVrlzOY+vWqJtbyU8Pd/cprNhbtew03O145VrzwTSoUz3HudTFRHU/Pe83X5o/51DB2aD/fl1z07XXrm5lM1nizU7zEyl9vrewovRnV/KhAF1/T3fqVy0nVzWtYa7r5dqRRXfXDelST8qWiTIta8VJu2I/v7eTKYJ1Z+UzV7vtRtPzdm3LRBMGX7+tjfzrjrZSrUKsvDKglewcda2pL3PXnds8Md5tWLGHIa0p09o1Z/qFRgNRUkKcaZ0t6nyXJFpYgDBx5eg5suvoKfPNWT+MUDzG/7rTdPW8eXsb081RlLTT2dLm+V/Mda2F+dfM303d0WXnZkf2Ztzc7fLP6ZvlsT6N5fVffjf3af2Scwjx5ee/M2ur6bLz9Xn6rf7pyetMLY/W5dwybrH58Jv+SA8JBPvvZf99rn9nYaF99INaFyv9w/tLCy1yOmFYZ5m+Yb9c0yLRhC6dDkA/0L/6LVle/GGj15aD/WmnJfVUtvR7a4G5reFJu610FJ52J77282YZ0qW+Wf29x+g5Pv9OGt7edAoAOuJv8/4TZr6nJTuOyvi7O5pjtJ/b0be2NkHVV6uTUx2tYyP7N89rfRnW2ad/U/2du4yanXdcr1xrwr62vOr3grm/H5YmNSs6ArSVn98EFiBMpJ7KMkOVL7+4miWtKuEk7VS2T8PU7RZtO2I+JLTGxB/69n0g/YwZZbV05zFJPnbKrw+586U/d+P+dGlUo4IZ6r82JdVcL6r1xFf6e3R/bY6pofnuoW6y5+gpqRgXLe1enFEoZMzefNDMk2SnH9beWtXsXSFaoP7WrK2On+FpPx1xtnZk4SJ1NXlViglXWvC+cV+6+Td0Hopvpy0VWovirmXQnVNZZ/0+lzabTR7/eq3pNtQ6Nn+7evX9QVvlAvVv6CsCCwAgpGndjIYUe5G21iRd/PS0QoFF62V6vp43hF675Eb0a+o1GGTn5MrJzLOmCFtDV8NqFaRsgdFszi2S17SoJe/9Kb/Iuig6xL3DyzMdt3U0Wavavrd4hZt05mEBAIQyrZtxpq0XDauXN10zzg0HOvLlk3s6SrUKMT7NaaRLZejSF8rb/hPu6yKTV+01c9X4Q6cY0NFh36zYK18M6+SYmwgXjhYWAEBIyDybI5OWp0iPS6o7ls5AaKOFBQBQ6mi9jM6AjPDEsGYAABD0CCwAACDoEVgAAEDQI7AAAICgR2ABAADhF1hGjRolHTp0kIoVK0qNGjXkpptuki1btnh9zvjx481EP85bXFz+Cq4AACC8BTywzJs3Tx588EFZsmSJzJgxQ7Kzs6VPnz5y8uRJr8/T8df79+93bLt37w70oQEAgBAV8HlYpk+fXqj1RFtaVqxYIT16eF4YS1tVatWqFejDAQAApUCx17Do7HWqShXvK49mZGRIvXr1pE6dOnLjjTfKhg0bPO6bmZlpZsdz3gAAQOlVrIElNzdXHnnkEbn88sulZcuWHvdr0qSJfPTRRzJ16lT57LPPzPO6du0qKSkpHutkdCpf+6YhBwAAlF7FupbQAw88ID/99JMsXLhQateu7fPztO6lWbNmMnDgQHnxxRfdtrDoZqctLBpaWEsIAIDQERRrCT300EPyww8/yPz58/0KK6pMmTLSrl072bZtm9vHY2NjzQYAAMJDwLuEtMFGw8rkyZNl9uzZ0qBBA79fIycnR9atWyeJiYmBPjwAABCCAt7CokOav/jiC1OPonOxHDhwwNyvTT5ly5Y11wcPHiwXXXSRqUVRL7zwgnTu3FkaNWokqampMnr0aDOs+d577/XpZ9p7tSi+BQAgdNg/t32pTgl4YBk3bpy5vPLKK13u//jjj+Wuu+4y1/fs2SORkfmNO8ePH5dhw4aZcFO5cmVp3769LFq0SJo3b+7Tzzxx4oS5pPgWAIDQo5/j2rBhWdFtSdFRRfv27TMtOjqfSyDZC3qTk5Mp6C1GnOeSwXkuOZzrksF5Du3zrBFEw0pSUpJLQ0aJFt2WJP0l/S3s9Zf+A/E/Q/HjPJcMznPJ4VyXDM5z6J7nolpW7Fj8EAAABD0CCwAACHoEliLofC/PPfcc874UM85zyeA8lxzOdcngPIfPeS4VRbcAAKB0o4UFAAAEPQILAAAIegQWAAAQ9AgsAAAg6BFYijB27FipX7++xMXFSadOnWTZsmVWH1LQ0pW5+/fvb2Ys1BmHp0yZ4vK41nc/++yzZlFLXVeqd+/esnXrVpd9jh07JoMGDTITE1WqVEmGDh0qGRkZLvusXbtWunfvbv5NdObF1157TcKJrsHVoUMHM7NzjRo15KabbpItW7a47HPmzBmzrlfVqlWlQoUKcsstt8jBgwdd9tElMq677jopV66ceZ3HH39czp4967LP3Llz5dJLLzUjA3Str/Hjx0u40GVGWrdu7Zgoq0uXLvLTTz85HuccF49XX33VvH888sgjjvs414ExcuRIc26dt6ZNm4bOedZRQnBvwoQJtpiYGNtHH31k27Bhg23YsGG2SpUq2Q4ePGj1oQWladOm2f7+97/bvv32Wx15Zps8ebLL46+++qotISHBNmXKFNuaNWtsN9xwg61Bgwa206dPO/a55pprbG3atLEtWbLEtmDBAlujRo1sAwcOdDyelpZmq1mzpm3QoEG29evX27788ktb2bJlbf/5z39s4aJv3762jz/+2Pz+q1evtl177bW2unXr2jIyMhz73H///bY6derYZs2aZVu+fLmtc+fOtq5duzoeP3v2rK1ly5a23r1721atWmX+7apVq2YbMWKEY58dO3bYypUrZxs+fLht48aNtnfeeccWFRVlmz59ui0cfPfdd7Yff/zR9vvvv9u2bNlie/rpp21lypQx511xjgNv2bJltvr169tat25te/jhhx33c64D47nnnrO1aNHCtn//fsd2+PDhkDnPBBYvOnbsaHvwwQcdt3NycmxJSUm2UaNGWXpcoaBgYMnNzbXVqlXLNnr0aMd9qampttjYWBM6lP5x6/N+++03xz4//fSTLSIiwrZ3715z+91337VVrlzZlpmZ6djnySeftDVp0sQWrg4dOmTO27x58xznVT9YJ02a5Nhn06ZNZp/Fixeb2/pGExkZaTtw4IBjn3Hjxtni4+Md5/aJJ54wb27O7rjjDhOYwpX+7X3wwQec42Jw4sQJ2yWXXGKbMWOG7YorrnAEFs51YANLmzZt3D4WCueZLiEPsrKyZMWKFabbwnnNIr29ePFiS48tFO3cudOsxu18PnX9CO1ms59PvdRuoMsuu8yxj+6v533p0qWOfXr06CExMTGOffr27Wu6RHTV73CUlpZmLqtUqWIu9e82Ozvb5Vxrs2/dunVdznWrVq2kZs2aLudRFzjbsGGDYx/n17DvE45//zk5OTJhwgQ5efKk6RriHAeedkVoV0PB88G5DqytW7eabvuGDRua7nft4gmV80xg8eDIkSPmTcr5H0bpbf3ghX/s58zb+dRL7RN1Fh0dbT6Infdx9xrOPyOc6Erl2td/+eWXS8uWLR3nQQOdhj9v57qo8+hpH31zOn36tISDdevWmb587Yu///77ZfLkydK8eXPOcYBpGFy5cqWpzyqIcx04nTp1MvUk06dPNzVa+kVS6wF1teRQOM+lYrVmIFzpt9L169fLwoULrT6UUqlJkyayevVq04r19ddfy5AhQ2TevHlWH1apkpycLA8//LDMmDHDFNKj+PTr189xXQvKNcDUq1dPJk6caAZCBDtaWDyoVq2aREVFFaqQ1tu1atWy7LhClf2ceTufenno0CGXx7X6XEcOOe/j7jWcf0a4eOihh+SHH36QOXPmSO3atR3363nQLs3U1FSv57qo8+hpHx0xEwpvboGg3zh1lEP79u3Nt/82bdrIW2+9xTkOIO2K0P/vdVSJtqjqpqHw7bffNtf12znnunhoa0rjxo1l27ZtIfE3TWDx8kalb1KzZs1yaX7X29qHDf80aNDA/CE7n09tItTaFPv51Ev9n0XfwOxmz55tzrt+E7Dvo8Onta/VTr+Z6TfhypUrSzjQmmYNK9o9oedHz60z/bstU6aMy7nWGh/tq3Y+19rd4RwQ9Tzqm4p2edj3cX4N+z7h/Pevf4uZmZmc4wDq1auXOU/akmXftI5N6yvs1znXxUOnjNi+fbuZaiIk/qYvuGy3lA9r1lEs48ePNyNY7rvvPjOs2blCGq5V/jrUTTf903rzzTfN9d27dzuGNev5mzp1qm3t2rW2G2+80e2w5nbt2tmWLl1qW7hwoRk14DysWSvZdVjzn/70JzO8VP+NdAhdOA1rfuCBB8zw8Llz57oMTzx16pTL8EQd6jx79mwzPLFLly5mKzg8sU+fPmZotA45rF69utvhiY8//rgZLTB27NiwGgb61FNPmZFXO3fuNH+veltHrP3yyy/mcc5x8XEeJaQ414Hx6KOPmvcN/Zv+9ddfzfBkHZasIw1D4TwTWIqgY8j1H1DnY9Fhzjo/CNybM2eOCSoFtyFDhjiGNj/zzDMmcGgQ7NWrl5nfwtnRo0dNQKlQoYIZKnf33XebIORM53Dp1q2beY2LLrrIBKFw4u4c66Zzs9hpCPzzn/9shuHqm8eAAQNMqHG2a9cuW79+/cw8NvqmpW9m2dnZhf5N27Zta/7+GzZs6PIzSrt77rnHVq9ePfO765uy/r3aw4riHJdcYOFcB4YOL05MTDS/v7536u1t27aFzHmO0P9ceDsNAABA8aGGBQAABD0CCwAACHoEFgAAEPQILAAAIOgRWAAAQNAjsAAAgKBHYAEAAEGPwAIAAIIegQUAAAQ9AgsAAAh6BBYAABD0CCwAAECC3f8DMbSr43xCWz8AAAAASUVORK5CYII=",
      "text/plain": [
       "<Figure size 640x480 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "def train_epoch(net, train_iter, loss, optimizer, device):\n",
    "    metric = [0, 0]\n",
    "    state = None\n",
    "    for X, Y in train_iter:\n",
    "        state = net.init_state()\n",
    "        y = Y.reshape(-1)\n",
    "        X, y = X.to(device), y.to(device)\n",
    "        y_hat, state = net(X, state)\n",
    "        y_hat = y_hat.view(-1, y_hat.shape[-1])\n",
    "        l = loss(y_hat, y.long()).mean()\n",
    "        \n",
    "        optimizer.zero_grad()\n",
    "        l.backward()\n",
    "        optimizer.step()\n",
    "        \n",
    "        metric[0] += l * y.numel()\n",
    "        metric[1] += y.numel()\n",
    "        \n",
    "    return math.exp(metric[0] / metric[1])\n",
    "\n",
    "def train(net, train_iter, vocab, lr, num_epochs, device):\n",
    "    loss = nn.CrossEntropyLoss()\n",
    "    optimizer = torch.optim.AdamW(net.parameters(), lr)\n",
    "    cur_predict = lambda prefix: predict(prefix, 50, net, vocab, device)\n",
    "    ppl_list = []\n",
    "    for epoch in range(num_epochs):\n",
    "        ppl = train_epoch(\n",
    "            net, train_iter, loss, optimizer, device)\n",
    "        ppl_list.append(ppl)\n",
    "        if (epoch + 1) % 50 == 0:\n",
    "            print(f'epoch: {epoch}, ppl: {ppl}')\n",
    "            net.eval()\n",
    "            with torch.no_grad():\n",
    "                print(cur_predict('The Time Traveller'))\n",
    "            net.train()\n",
    "            \n",
    "    print(cur_predict('The Time Traveller'))\n",
    "    return ppl_list\n",
    "lr, num_epochs = 0.01, 5000\n",
    "ppl_list = train(model, train_iter, vocab, lr, num_epochs, device)\n",
    "from matplotlib import pyplot as plt\n",
    "plt.plot(ppl_list)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "base",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.13.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
