{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "fa816164",
   "metadata": {},
   "source": [
    "## 数据准备"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "48a4d118",
   "metadata": {},
   "source": [
    "### 代码包引入"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "783839ba",
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "import torch.utils.data as Data\n",
    "import numpy as np\n",
    "from torch import optim\n",
    "import random\n",
    "from tqdm import *\n",
    "import matplotlib.pyplot as plt\n",
    "import sys"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "ddfa59fa",
   "metadata": {},
   "source": [
    "### 数据集生成"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "5a0ee89f",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "([['kju:', 'kei', 'em', 'i:', 'vi:', 'pi:'],\n",
       "  ['bi:', 'kju:', 'eit∫', 'eks', 'ef', 'di:']],\n",
       " [['q', 'k', 'm', 'e', 'v', 'p'], ['b', 'q', 'h', 'x', 'f', 'd']])"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 数据集生成\n",
    "soundmark = ['ei',  'bi:',  'si:',  'di:',  'i:',  'ef',  'dʒi:',  'eit∫',  'ai', 'dʒei', 'kei', 'el', 'em', 'en', 'əu', 'pi:', 'kju:',\n",
    "        'ɑ:', 'es', 'ti:', 'ju:', 'vi:', 'd∧blju:', 'eks', 'wai', 'zi:']\n",
    "\n",
    "alphabet = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q',\n",
    "         'r','s','t','u','v','w','x','y','z']\n",
    "\n",
    "t = 1000 #总条数\n",
    "r = 0.9   #扰动项\n",
    "seq_len = 6\n",
    "src_tokens, tgt_tokens = [],[] #原始序列、目标序列列表\n",
    "\n",
    "for i in range(t):\n",
    "    src, tgt = [],[]\n",
    "    for j in range(seq_len):\n",
    "        ind = random.randint(0,25)\n",
    "        src.append(soundmark[ind])\n",
    "        if random.random() < r:\n",
    "            tgt.append(alphabet[ind])\n",
    "        else:\n",
    "            tgt.append(alphabet[random.randint(0,25)])\n",
    "    src_tokens.append(src)\n",
    "    tgt_tokens.append(tgt)\n",
    "src_tokens[:2], tgt_tokens[:2]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "f99844b6",
   "metadata": {},
   "outputs": [],
   "source": [
    "from collections import Counter  # 计数类\n",
    "\n",
    "flatten = lambda l: [item for sublist in l for item in sublist]  # 展平数组\n",
    "# 构建词表\n",
    "class Vocab:\n",
    "    def __init__(self, tokens):\n",
    "        self.tokens = tokens  # 传入的tokens是二维列表\n",
    "        self.token2index = {'<pad>': 0, '<bos>': 1, '<eos>': 2, '<unk>': 3}  # 先存好特殊词元\n",
    "        # 将词元按词频排序后生成列表\n",
    "        self.token2index.update({\n",
    "            token: index + 4\n",
    "            for index, (token, freq) in enumerate(\n",
    "                sorted(Counter(flatten(self.tokens)).items(), key=lambda x: x[1], reverse=True))\n",
    "        })\n",
    "        # 构建id到词元字典\n",
    "        self.index2token = {index: token for token, index in self.token2index.items()}\n",
    "\n",
    "    def __getitem__(self, query):\n",
    "        # 单一索引\n",
    "        if isinstance(query, (str, int)):\n",
    "            if isinstance(query, str):\n",
    "                return self.token2index.get(query, 3)\n",
    "            elif isinstance(query, (int)):\n",
    "                return self.index2token.get(query, '<unk>')\n",
    "        # 数组索引\n",
    "        elif isinstance(query, (list, tuple)):\n",
    "            return [self.__getitem__(item) for item in query]\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.index2token)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "01ad7798",
   "metadata": {},
   "source": [
    "### 数据集构造"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "246e630e",
   "metadata": {},
   "outputs": [],
   "source": [
    "from torch.utils.data import DataLoader, TensorDataset\n",
    "\n",
    "#实例化source和target词表\n",
    "src_vocab, tgt_vocab = Vocab(src_tokens), Vocab(tgt_tokens)\n",
    "src_vocab_size = len(src_vocab)  # 源语言词表大小\n",
    "tgt_vocab_size = len(tgt_vocab)  # 目标语言词表大小\n",
    "\n",
    "#增加开始标识<bos>和结尾标识<eos>\n",
    "encoder_input = torch.tensor([src_vocab[line + ['<pad>']] for line in src_tokens])\n",
    "decoder_input = torch.tensor([tgt_vocab[['<bos>'] + line] for line in tgt_tokens])\n",
    "decoder_output = torch.tensor([tgt_vocab[line + ['<eos>']] for line in tgt_tokens])\n",
    "max_len = seq_len + 1\n",
    "\n",
    "# 训练集和测试集比例8比2，batch_size = 16\n",
    "train_size = int(len(encoder_input) * 0.8)\n",
    "test_size = len(encoder_input) - train_size\n",
    "batch_size = 16\n",
    "\n",
    "# 自定义数据集函数\n",
    "class MyDataSet(Data.Dataset):\n",
    "    def __init__(self, enc_inputs, dec_inputs, dec_outputs):\n",
    "        super(MyDataSet, self).__init__()\n",
    "        self.enc_inputs = enc_inputs\n",
    "        self.dec_inputs = dec_inputs\n",
    "        self.dec_outputs = dec_outputs\n",
    "\n",
    "    def __len__(self):\n",
    "        return self.enc_inputs.shape[0]\n",
    "\n",
    "    def __getitem__(self, idx):\n",
    "        return self.enc_inputs[idx], self.dec_inputs[idx], self.dec_outputs[idx]\n",
    "\n",
    "train_loader = DataLoader(MyDataSet(encoder_input[:train_size], decoder_input[:train_size], decoder_output[:train_size]), batch_size=batch_size)\n",
    "test_loader = DataLoader(MyDataSet(encoder_input[-test_size:], decoder_input[-test_size:], decoder_output[-test_size:]), batch_size=1)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "040214ea",
   "metadata": {},
   "source": [
    "## 模型构建"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "54dddf8b",
   "metadata": {},
   "source": [
    "### 位置编码"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "8ad8a03f",
   "metadata": {},
   "outputs": [],
   "source": [
    "def get_sinusoid_encoding_table(n_position, d_model):\n",
    "    def cal_angle(position, hid_idx):\n",
    "        return position / np.power(10000, 2 * (hid_idx // 2) / d_model)\n",
    "    def get_posi_angle_vec(position):\n",
    "        return [cal_angle(position, hid_j) for hid_j in range(d_model)]\n",
    "    sinusoid_table = np.array([get_posi_angle_vec(pos_i) for pos_i in range(n_position)])\n",
    "    sinusoid_table[:, 0::2] = np.sin(sinusoid_table[:, 0::2]) # 偶数位用正弦函数\n",
    "    sinusoid_table[:, 1::2] = np.cos(sinusoid_table[:, 1::2]) # 奇数位用余弦函数\n",
    "    return torch.FloatTensor(sinusoid_table)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "73ed603c",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[ 0.0000e+00,  1.0000e+00,  0.0000e+00,  ...,  1.0000e+00,\n",
      "          0.0000e+00,  1.0000e+00],\n",
      "        [ 8.4147e-01,  5.4030e-01,  8.2186e-01,  ...,  1.0000e+00,\n",
      "          1.0366e-04,  1.0000e+00],\n",
      "        [ 9.0930e-01, -4.1615e-01,  9.3641e-01,  ...,  1.0000e+00,\n",
      "          2.0733e-04,  1.0000e+00],\n",
      "        ...,\n",
      "        [ 9.5638e-01, -2.9214e-01,  7.9142e-01,  ...,  1.0000e+00,\n",
      "          2.7989e-03,  1.0000e+00],\n",
      "        [ 2.7091e-01, -9.6261e-01,  9.5325e-01,  ...,  1.0000e+00,\n",
      "          2.9026e-03,  1.0000e+00],\n",
      "        [-6.6363e-01, -7.4806e-01,  2.9471e-01,  ...,  1.0000e+00,\n",
      "          3.0062e-03,  1.0000e+00]])\n"
     ]
    }
   ],
   "source": [
    "print(get_sinusoid_encoding_table(30, 512))"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "77f721b9",
   "metadata": {},
   "source": [
    "### 掩码操作"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "16f45380",
   "metadata": {},
   "outputs": [],
   "source": [
    "# mask掉没有意义的占位符\n",
    "def get_attn_pad_mask(seq_q, seq_k):                       # seq_q: [batch_size, seq_len] ,seq_k: [batch_size, seq_len]\n",
    "    batch_size, len_q = seq_q.size()\n",
    "    batch_size, len_k = seq_k.size()\n",
    "    pad_attn_mask = seq_k.data.eq(0).unsqueeze(1)          # 判断 输入那些含有P(=0),用1标记 ,[batch_size, 1, len_k]\n",
    "    return pad_attn_mask.expand(batch_size, len_q, len_k)\n",
    "\n",
    "# mask掉未来信息\n",
    "def get_attn_subsequence_mask(seq):                               # seq: [batch_size, tgt_len]\n",
    "    attn_shape = [seq.size(0), seq.size(1), seq.size(1)]\n",
    "    subsequence_mask = np.triu(np.ones(attn_shape), k=1)          # 生成上三角矩阵,[batch_size, tgt_len, tgt_len]\n",
    "    subsequence_mask = torch.from_numpy(subsequence_mask).byte()  #  [batch_size, tgt_len, tgt_len]\n",
    "    return subsequence_mask "
   ]
  },
  {
   "cell_type": "markdown",
   "id": "1da18369",
   "metadata": {},
   "source": [
    "### 注意力计算函数"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "0e7fb2ee",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 缩放点积注意力计算\n",
    "class ScaledDotProductAttention(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(ScaledDotProductAttention, self).__init__()\n",
    "    def forward(self, Q, K, V, attn_mask):\n",
    "        '''\n",
    "        Q: [batch_size, n_heads, len_q, d_k]\n",
    "        K: [batch_size, n_heads, len_k, d_k]\n",
    "        V: [batch_size, n_heads, len_v(=len_k), d_v]\n",
    "        attn_mask: [batch_size, n_heads, seq_len, seq_len]\n",
    "        '''\n",
    "        scores = torch.matmul(Q, K.transpose(-1, -2)) / np.sqrt(d_k) # scores : [batch_size, n_heads, len_q, len_k]\n",
    "        scores.masked_fill_(attn_mask, -1e9) # Fills elements of self tensor with value where mask is True.\n",
    "        attn = nn.Softmax(dim=-1)(scores)\n",
    "        context = torch.matmul(attn, V) # [batch_size, n_heads, len_q, d_v]\n",
    "        return context, attn\n",
    "\n",
    "#多头注意力计算\n",
    "class MultiHeadAttention(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(MultiHeadAttention, self).__init__()\n",
    "        self.W_Q = nn.Linear(d_model, d_k * n_heads, bias=False)\n",
    "        self.W_K = nn.Linear(d_model, d_k * n_heads, bias=False)\n",
    "        self.W_V = nn.Linear(d_model, d_v * n_heads, bias=False)\n",
    "        self.fc = nn.Linear(n_heads * d_v, d_model, bias=False)\n",
    "    def forward(self, input_Q, input_K, input_V, attn_mask):\n",
    "        '''\n",
    "        input_Q: [batch_size, len_q, d_model]\n",
    "        input_K: [batch_size, len_k, d_model]\n",
    "        input_V: [batch_size, len_v(=len_k), d_model]\n",
    "        attn_mask: [batch_size, seq_len, seq_len]\n",
    "        '''\n",
    "        residual, batch_size = input_Q, input_Q.size(0)\n",
    "        # (B, S, D) -proj-> (B, S, D_new) -split-> (B, S, H, W) -trans-> (B, H, S, W)\n",
    "        Q = self.W_Q(input_Q).view(batch_size, -1, n_heads, d_k).transpose(1,2) # Q: [batch_size, n_heads, len_q, d_k]\n",
    "        K = self.W_K(input_K).view(batch_size, -1, n_heads, d_k).transpose(1,2) # K: [batch_size, n_heads, len_k, d_k]\n",
    "        V = self.W_V(input_V).view(batch_size, -1, n_heads, d_v).transpose(1,2) # V: [batch_size, n_heads, len_v(=len_k), d_v]\n",
    "        attn_mask = attn_mask.unsqueeze(1).repeat(1, n_heads, 1, 1) # attn_mask : [batch_size, n_heads, seq_len, seq_len]\n",
    "        # context: [batch_size, n_heads, len_q, d_v], attn: [batch_size, n_heads, len_q, len_k]\n",
    "        context, attn = ScaledDotProductAttention()(Q, K, V, attn_mask)\n",
    "        context = context.transpose(1, 2).reshape(batch_size, -1, n_heads * d_v) # context: [batch_size, len_q, n_heads * d_v]\n",
    "        output = self.fc(context) # [batch_size, len_q, d_model]\n",
    "        return nn.LayerNorm(d_model)(output + residual), attn"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "852dc93f",
   "metadata": {},
   "source": [
    "### 构建前馈网络"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "b68eb3bc",
   "metadata": {},
   "outputs": [],
   "source": [
    "class PoswiseFeedForwardNet(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(PoswiseFeedForwardNet, self).__init__()\n",
    "        self.fc = nn.Sequential(\n",
    "            nn.Linear(d_model, d_ff, bias=False),\n",
    "            nn.ReLU(),\n",
    "            nn.Linear(d_ff, d_model, bias=False))\n",
    "        \n",
    "    def forward(self, inputs):                             # inputs: [batch_size, seq_len, d_model]\n",
    "        residual = inputs\n",
    "        output = self.fc(inputs)\n",
    "        return nn.LayerNorm(d_model)(output + residual)   # 残差 + LayerNorm"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "44b3ff96",
   "metadata": {},
   "source": [
    "### 编码器模块"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "eae1e01c",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 编码器层\n",
    "class EncoderLayer(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(EncoderLayer, self).__init__()\n",
    "        self.enc_self_attn = MultiHeadAttention()  # 多头注意力\n",
    "        self.pos_ffn = PoswiseFeedForwardNet()  # 前馈网络\n",
    "    def forward(self, enc_inputs, enc_self_attn_mask):\n",
    "        '''\n",
    "        enc_inputs: [batch_size, src_len, d_model]\n",
    "        enc_self_attn_mask: [batch_size, src_len, src_len]\n",
    "        '''\n",
    "        # enc_outputs: [batch_size, src_len, d_model], attn: [batch_size, n_heads, src_len, src_len]\n",
    "        enc_outputs, attn = self.enc_self_attn(enc_inputs, enc_inputs, enc_inputs, enc_self_attn_mask) # enc_inputs to same Q,K,V\n",
    "        enc_outputs = self.pos_ffn(enc_outputs) # enc_outputs: [batch_size, src_len, d_model]\n",
    "        return enc_outputs, attn\n",
    "\n",
    "# 编码器模块\n",
    "class Encoder(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(Encoder, self).__init__()\n",
    "        self.src_emb = nn.Embedding(src_vocab_size, d_model)\n",
    "        self.pos_emb = nn.Embedding.from_pretrained(get_sinusoid_encoding_table(max_len, d_model), freeze=True)\n",
    "        self.layers = nn.ModuleList([EncoderLayer() for _ in range(n_layers)])\n",
    "    def forward(self, enc_inputs):\n",
    "        '''\n",
    "        enc_inputs: [batch_size, src_len]\n",
    "        '''\n",
    "        batch_size, src_len = enc_inputs.size()\n",
    "        pos_idx = torch.arange(src_len).unsqueeze(0).expand(batch_size, src_len)\n",
    "        \n",
    "        word_emb = self.src_emb(enc_inputs) # [batch_size, src_len, d_model]\n",
    "        pos_emb = self.pos_emb(pos_idx) # [batch_size, src_len, d_model]\n",
    "        enc_outputs = word_emb + pos_emb\n",
    "        enc_self_attn_mask = get_attn_pad_mask(enc_inputs, enc_inputs) # [batch_size, src_len, src_len]\n",
    "        enc_self_attns = []\n",
    "        for layer in self.layers:\n",
    "            # enc_outputs: [batch_size, src_len, d_model], enc_self_attn: [batch_size, n_heads, src_len, src_len]\n",
    "            enc_outputs, enc_self_attn = layer(enc_outputs, enc_self_attn_mask)\n",
    "            enc_self_attns.append(enc_self_attn)\n",
    "        return enc_outputs, enc_self_attns"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "a62a13bc",
   "metadata": {},
   "source": [
    "### 解码器模块"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "416d9b38",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 解码器层\n",
    "class DecoderLayer(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(DecoderLayer, self).__init__()\n",
    "        self.dec_self_attn = MultiHeadAttention()\n",
    "        self.dec_enc_attn = MultiHeadAttention()\n",
    "        self.pos_ffn = PoswiseFeedForwardNet()\n",
    "    def forward(self, dec_inputs, enc_outputs, dec_self_attn_mask, dec_enc_attn_mask):\n",
    "        '''\n",
    "        dec_inputs: [batch_size, tgt_len, d_model]\n",
    "        enc_outputs: [batch_size, src_len, d_model]\n",
    "        dec_self_attn_mask: [batch_size, tgt_len, tgt_len]\n",
    "        dec_enc_attn_mask: [batch_size, tgt_len, src_len]\n",
    "        '''\n",
    "        # dec_outputs: [batch_size, tgt_len, d_model], dec_self_attn: [batch_size, n_heads, tgt_len, tgt_len]\n",
    "        dec_outputs, dec_self_attn = self.dec_self_attn(dec_inputs, dec_inputs, dec_inputs, dec_self_attn_mask)\n",
    "        # dec_outputs: [batch_size, tgt_len, d_model], dec_enc_attn: [batch_size, h_heads, tgt_len, src_len]\n",
    "        dec_outputs, dec_enc_attn = self.dec_enc_attn(dec_outputs, enc_outputs, enc_outputs, dec_enc_attn_mask)\n",
    "        dec_outputs = self.pos_ffn(dec_outputs) # [batch_size, tgt_len, d_model]\n",
    "        return dec_outputs, dec_self_attn, dec_enc_attn\n",
    "\n",
    "# 解码器模块\n",
    "class Decoder(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(Decoder, self).__init__()\n",
    "        self.tgt_emb = nn.Embedding(tgt_vocab_size, d_model)\n",
    "        self.pos_emb = nn.Embedding.from_pretrained(get_sinusoid_encoding_table(max_len, d_model),freeze=True)\n",
    "        self.layers = nn.ModuleList([DecoderLayer() for _ in range(n_layers)])\n",
    "    def forward(self, dec_inputs, enc_inputs, enc_outputs):\n",
    "        '''\n",
    "        dec_inputs: [batch_size, tgt_len]\n",
    "        enc_intpus: [batch_size, src_len]\n",
    "        enc_outputs: [batsh_size, src_len, d_model]\n",
    "        '''\n",
    "        batch_size, tgt_len = dec_inputs.size()\n",
    "        pos_idx = torch.arange(tgt_len).unsqueeze(0).expand(batch_size, tgt_len)\n",
    "        \n",
    "        word_emb = self.tgt_emb(dec_inputs) # [batch_size, tgt_len, d_model]\n",
    "        pos_emb = self.pos_emb(pos_idx) # [batch_size, tgt_len, d_model]\n",
    "        dec_outputs = word_emb + pos_emb\n",
    "        dec_self_attn_pad_mask = get_attn_pad_mask(dec_inputs, dec_inputs) # [batch_size, tgt_len, tgt_len]\n",
    "        dec_self_attn_subsequent_mask = get_attn_subsequence_mask(dec_inputs) # [batch_size, tgt_len]\n",
    "        dec_self_attn_mask = torch.gt((dec_self_attn_pad_mask + dec_self_attn_subsequent_mask), 0) # [batch_size, tgt_len, tgt_len]\n",
    "        dec_enc_attn_mask = get_attn_pad_mask(dec_inputs, enc_inputs) # [batc_size, tgt_len, src_len]\n",
    "        dec_self_attns, dec_enc_attns = [], []\n",
    "        for layer in self.layers:\n",
    "            # dec_outputs: [batch_size, tgt_len, d_model], dec_self_attn: [batch_size, n_heads, tgt_len, tgt_len], dec_enc_attn: [batch_size, h_heads, tgt_len,src_len]\n",
    "            dec_outputs, dec_self_attn, dec_enc_attn = layer(dec_outputs, enc_outputs, dec_self_attn_mask, dec_enc_attn_mask)\n",
    "            dec_self_attns.append(dec_self_attn)\n",
    "            dec_enc_attns.append(dec_enc_attn)\n",
    "        return dec_outputs, dec_self_attns, dec_enc_attns"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "fe516394",
   "metadata": {},
   "source": [
    "### Transformer模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "5461ca2c",
   "metadata": {},
   "outputs": [],
   "source": [
    "class Transformer(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(Transformer, self).__init__()\n",
    "        self.encoder = Encoder()\n",
    "        self.decoder = Decoder()\n",
    "        self.projection = nn.Linear(d_model, tgt_vocab_size, bias=False)\n",
    "    def forward(self, enc_inputs, dec_inputs):\n",
    "        '''\n",
    "        enc_inputs: [batch_size, src_len]\n",
    "        dec_inputs: [batch_size, tgt_len]\n",
    "        '''\n",
    "        # tensor to store decoder outputs\n",
    "        # outputs = torch.zeros(batch_size, tgt_len, tgt_vocab_size).to(self.device)\n",
    "        # enc_outputs: [batch_size, src_len, d_model], enc_self_attns: [n_layers, batch_size, n_heads, src_len, src_len]\n",
    "        enc_outputs, enc_self_attns = self.encoder(enc_inputs)\n",
    "        # dec_outpus: [batch_size, tgt_len, d_model], dec_self_attns: [n_layers, batch_size, n_heads, tgt_len, tgt_len], dec_enc_attn: [n_layers, batch_size, tgt_len, src_len]\n",
    "        dec_outputs, dec_self_attns, dec_enc_attns = self.decoder(dec_inputs, enc_inputs, enc_outputs)\n",
    "        dec_logits = self.projection(dec_outputs) # dec_logits: [batch_size, tgt_len, tgt_vocab_size]\n",
    "        return dec_logits.view(-1, dec_logits.size(-1)), enc_self_attns, dec_self_attns, dec_enc_attns"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "3c3328f5",
   "metadata": {},
   "source": [
    "## 模型训练"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "6f2cf469",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 1 loss = 3.068131                                                                                                                  \n",
      "Epoch: 2 loss = 2.098386                                                                                                                  \n",
      "Epoch: 3 loss = 1.766903                                                                                                                  \n",
      "Epoch: 4 loss = 1.558566                                                                                                                  \n",
      "Epoch: 5 loss = 1.244992                                                                                                                  \n",
      "Epoch: 6 loss = 0.976748                                                                                                                  \n",
      "Epoch: 7 loss = 0.780826                                                                                                                  \n",
      "Epoch: 8 loss = 0.706284                                                                                                                  \n",
      "Epoch: 9 loss = 0.610922                                                                                                                  \n",
      "Epoch: 10 loss = 0.537917                                                                                                                 \n",
      "Epoch: 11 loss = 0.473819                                                                                                                 \n",
      "Epoch: 12 loss = 0.385198                                                                                                                 \n",
      "Epoch: 13 loss = 0.374470                                                                                                                 \n",
      "Epoch: 14 loss = 0.361486                                                                                                                 \n",
      "Epoch: 15 loss = 0.360906                                                                                                                 \n",
      "Epoch: 16 loss = 0.294931                                                                                                                 \n",
      "Epoch: 17 loss = 0.243941                                                                                                                 \n",
      "Epoch: 18 loss = 0.183459                                                                                                                 \n",
      "Epoch: 19 loss = 0.168781                                                                                                                 \n",
      "Epoch: 20 loss = 0.131821                                                                                                                 \n",
      "100%|█████████████████████████████████████████████████████████████████████████████████████████████████████| 20/20 [05:18<00:00, 15.95s/it]\n"
     ]
    }
   ],
   "source": [
    "d_model = 512   # 字 Embedding 的维度\n",
    "d_ff = 2048     # 前向传播隐藏层维度\n",
    "d_k = d_v = 64  # K(=Q), V的维度 \n",
    "n_layers = 6    # 有多少个encoder和decoder\n",
    "n_heads = 8     # Multi-Head Attention设置为8\n",
    "num_epochs = 20 # 训练轮次\n",
    "# 记录损失变化\n",
    "loss_history = []\n",
    "\n",
    "model = Transformer()\n",
    "criterion = nn.CrossEntropyLoss(ignore_index=0)\n",
    "optimizer = optim.SGD(model.parameters(), lr=0.001, momentum=0.99)\n",
    "\n",
    "for epoch in tqdm(range(num_epochs), file=sys.stdout):\n",
    "    total_loss = 0\n",
    "    for enc_inputs, dec_inputs, dec_outputs in train_loader:\n",
    "        '''\n",
    "        enc_inputs: [batch_size, src_len]\n",
    "        dec_inputs: [batch_size, tgt_len]\n",
    "        dec_outputs: [batch_size, tgt_len]\n",
    "        '''\n",
    "        # enc_inputs, dec_inputs, dec_outputs = enc_inputs.to(device), dec_inputs.to(device), dec_outputs.to(device)\n",
    "        # outputs: [batch_size * tgt_len, tgt_vocab_size]\n",
    "        outputs, enc_self_attns, dec_self_attns, dec_enc_attns = model(enc_inputs, dec_inputs)\n",
    "        loss = criterion(outputs, dec_outputs.view(-1))\n",
    "        optimizer.zero_grad()\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "        total_loss += loss.item()\n",
    "    avg_loss = total_loss/len(train_loader)\n",
    "    loss_history.append(avg_loss)\n",
    "    tqdm.write(\"{0} {1} {2} {3}\".format('Epoch:', '%d' % (epoch + 1), 'loss =', '{:.6f}'.format(avg_loss)))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "6c7f102c",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAGdCAYAAADuR1K7AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAABE8ElEQVR4nO3de1xUZf4H8M8Bhhmug9yvAl4SERXEErxmrZiWl3LLLj+tbX+VpblFdlF3t9q2tbbftq1rabZ2tbQttGg1E1fBTLygQKiAJiiIjFyEGS4ywHB+fwCjKI6MzsyZy+f9es3rxZzznOF79sTOx/M853kEURRFEBEREdkJJ6kLICIiIjIlhhsiIiKyKww3REREZFcYboiIiMiuMNwQERGRXWG4ISIiIrvCcENERER2heGGiIiI7IqL1AVYWkdHB86ePQsvLy8IgiB1OURERNQHoiiioaEBoaGhcHIyfG/G4cLN2bNnERERIXUZREREdB3Ky8sRHh5usI3DhRsvLy8Anf/jeHt7S1wNERER9YVGo0FERIT+e9wQhws33V1R3t7eDDdEREQ2pi9DSjigmIiIiOwKww0RERHZFYYbIiIisisMN0RERGRXGG6IiIjIrjDcEBERkV1huCEiIiK7wnBDREREdoXhhoiIiOwKww0RERHZFYYbIiIisisMN0RERGRXHG7hTHOpamjBF/vLUNfUildnxUldDhERkcPinRsTadOJeGfHCazfX4YLrTqpyyEiInJYDDcmEqpUIMBLDl2HiKNn1VKXQ0RE5LAYbkxEEATER/gAAPLK6yWthYiIyJEx3JhQd7jJZbghIiKSjKThZvXq1RgxYgS8vb3h7e2N5ORkfP/99waPycrKQmJiIhQKBQYMGIA1a9ZYqNpr09+5KauXtA4iIiJHJmm4CQ8PxxtvvIGcnBzk5OTgtttuw6xZs3D06NFe25eWlmL69OmYMGECcnNzsWzZMixevBhpaWkWrrx3I8KVEASgov4Cqhu0UpdDRETkkARRFEWpi7iUr68v3nrrLfz2t7+9Yt+LL76I9PR0FBYW6rctWLAA+fn5yM7O7tPnazQaKJVKqNVqeHt7m6zublPezsKJqkb8a/5o/Co2yOSfT0RE5IiM+f62mjE3Op0OGzduRFNTE5KTk3ttk52djZSUlB7bpk6dipycHLS1tfV6jFarhUaj6fEyp5EcVExERCQpycNNQUEBPD09IZfLsWDBAmzevBmxsbG9tlWpVAgK6nk3JCgoCO3t7aipqen1mBUrVkCpVOpfERERJj+HS/GJKSIiImlJHm6GDBmCvLw87Nu3D08++SQefvhhHDt27KrtBUHo8b67V+3y7d2WLl0KtVqtf5WXl5uu+F50h5v8M/Xo6LCqHj8iIiKHIPnyC66urhg0aBAAYPTo0Th48CD+8Y9/4P3337+ibXBwMFQqVY9tVVVVcHFxgZ+fX6+fL5fLIZfLTV/4VQwJ9oJC5oSGlnaU1DRhUKCnxX43ERERWcGdm8uJogittvcnjZKTk5GRkdFj2/bt2zF69GjIZDJLlHdNMmcnxIUqAbBrioiISAqShptly5bhxx9/xKlTp1BQUIDly5cjMzMTDz30EIDOLqX58+fr2y9YsACnT59GamoqCgsL8eGHH2LdunVYsmSJVKfQq4vjbuqkLYSIiMgBSdotde7cOcybNw+VlZVQKpUYMWIEtm3bhilTpgAAKisrUVZWpm8fHR2NrVu34tlnn8W7776L0NBQrFy5EnPmzJHqFHoV398HAJBfzjWmiIiILM3q5rkxN3PPcwMAZ+qaMf7NXXBxEnDk1alQyJzN8nuIiIgchU3Oc2NPwnzc4O/pinauEE5ERGRxDDdmcOkK4blcZ4qIiMiiGG7M5OJ8N7xzQ0REZEkMN2YSH9EPAJ+YIiIisjSGGzMZHt451035+QuobeQK4URERJbCcGMmSjcZBgZ4AOBkfkRERJbEcGNG3V1T+Qw3REREFsNwY0bdk/nlMtwQERFZDMONGcWH+wDovHPDFcKJiIgsg+HGjGJCvCB3cYKmpR2ltU1Sl0NEROQQGG7MSObshLiwzqemOO6GiIjIMhhuzOziCuH1ktZBRETkKBhuzGwkww0REZFFMdyYWUJXuCms1KClTSdtMURERA6A4cbMwvu5wc/DFW06EccqNVKXQ0REZPcYbszs0hXC87hCOBERkdkx3FgAx90QERFZDsONBfCJKSIiIsthuLGA7js3Zeebcb6pVdpiiIiI7BzDjQUo3WQY0LVCOCfzIyIiMi+GGwvpXmeKi2gSERGZF8ONhXSvEM5xN0RERObFcGMh3YOK88vrIYpcIZyIiMhcGG4sJCbYG64uTlBfaMOp2mapyyEiIrJbDDcW4urihGGh3gCAvPI6iashIiKyXww3FsSZiomIiMyP4caC9OHmjFraQoiIiOwYw40FJUT0AwAUntVA284VwomIiMyB4caCInzd4OvhilZdB46d5QrhRERE5sBwY0GCIGBkuBIA57shIiIyF4YbC4vv6priMgxERETmwXBjYZypmIiIyLwYbiysu1vqVG0z6rhCOBERkckx3FiYj7srov07VwjPO1MvbTFERER2iOFGApeuM0VERESmxXAjAf1kfgw3REREJsdwI4GRXCGciIjIbBhuJDA0xAuuzk6oa27Daa4QTkREZFIMNxKQuzgjtmuF8HwOKiYiIjIphhuJdI+7yeUK4URERCbFcCMRDiomIiIyD4YbiXSHm2NcIZyIiMikGG4kEunnjn7uMrTqOlBU2SB1OURERHaD4UYigiDoHwln1xQREZHpMNxIaGS4DwCGGyIiIlNiuJEQVwgnIiIyPUnDzYoVK3DzzTfDy8sLgYGBmD17NoqLiw0ek5mZCUEQrngVFRVZqGrTie+6c1Na04T6Zq4QTkREZAqShpusrCwsXLgQ+/btQ0ZGBtrb25GSkoKmpqZrHltcXIzKykr9a/DgwRao2LT6ebgiys8dAJB/Ri1xNURERPbBRcpfvm3bth7vP/roIwQGBuLQoUOYOHGiwWMDAwPh4+NjxuosY2SED07VNiOvrB6TbgqQuhwiIiKbZ1VjbtTqzrsXvr6+12ybkJCAkJAQ3H777di1a9dV22m1Wmg0mh4va3JxMr86aQshIiKyE1YTbkRRRGpqKsaPH4+4uLirtgsJCcHatWuRlpaGTZs2YciQIbj99tuxe/fuXtuvWLECSqVS/4qIiDDXKVyX7nCTf0bNFcKJiIhMQBCt5Bt14cKF2LJlC/bs2YPw8HCjjp0xYwYEQUB6evoV+7RaLbRarf69RqNBREQE1Go1vL29b7juG6Vt1yHu5R/QphOx+/nJ6N81BoeIiIgu0mg0UCqVffr+too7N08//TTS09Oxa9cuo4MNACQlJeHEiRO97pPL5fD29u7xsiZyF2fEhnTWlMuuKSIiohsmabgRRRGLFi3Cpk2bsHPnTkRHR1/X5+Tm5iIkJMTE1VkOF9EkIiIyHUmfllq4cCG++OILfPvtt/Dy8oJKpQIAKJVKuLm5AQCWLl2KiooKfPrppwCAd955B1FRURg2bBhaW1uxfv16pKWlIS0tTbLzuFHx/X3wSfZp5DPcEBER3TBJw83q1asBALfeemuP7R999BEeeeQRAEBlZSXKysr0+1pbW7FkyRJUVFTAzc0Nw4YNw5YtWzB9+nRLlW1y8RH9AABHzmrQ2t4BVxer6C0kIiKySVYzoNhSjBmQZCmiKCL+TxlQX2hD+qJxGNE1czERERF1srkBxY6OK4QTERGZDsONleCgYiIiItNguLESCQw3REREJsFwYyVGhCsBACXVTVA3t0lcDRERke1iuLESfp5y9PftXiG8XtpiiIiIbBjDjRXRrzPFrikiIqLrxnBjRTiomIiI6MYx3FiRSx8Hd7Dph4iIiEyG4caKDAv1hsxZQG1TK87UXZC6HCIiIpvEcGNFFDJnDO1aIZxdU0RERNeH4cbKjOxaeoHhhoiI6Pow3FgZDiomIiK6MQw3Via+vw8A4EiFGm26DmmLISIiskEMN1Ym2s8D3goXaNs7UKxqkLocIiIim8NwY2WcnC6uEJ7LrikiIiKjMdxYIf24m7J6SesgIiKyRQw3Vki/DAPXmCIiIjIaw40V6u6WOlndCE0LVwgnIiIyBsONFfL3lCO8nxtEEfi5XC11OURERDaF4cZKXZzvpk7aQoiIiGwMw42VuhhueOeGiIjIGAw3ViqhazI/rhBORERkHIYbKzUsVAkXJwE1jVpU1HOFcCIior5iuLFSCpkzYkK8AHCdKSIiImMw3Fgx/Xw3DDdERER9xnBjxeIj+gHgnRsiIiJjMNxYsfgIJQCggCuEExER9RnDjRUb4O8JL4ULWtq4QjgREVFfMdxYMScnASPDfQBwnSkiIqK+YrixclwhnIiIyDgMN1ZupH6m4npJ6yAiIrIVDDdWrvvOzS/VjWjgCuFERETXxHBj5QK85Ajz6VwhvOAM15kiIiK6FoYbGxDftc5ULrumiIiIronhxgbEdz0xxXE3RERE18ZwYwPiuUI4ERFRnzHc2IC4UCWcnQRUN2hRqW6RuhwiIiKrxnBjA9xcnRETzBXCiYiI+oLhxkZwvhsiIqK+YbixEYn9O1cI/zavgvPdEBERGcBwYyPuHBGCSD93nNNo8bftx6Uuh4iIyGox3NgIhcwZf54dBwD4JPsU8tk9RURE1CuGGxsyYXAAZseHQhSBpZsK0K7rkLokIiIiq8NwY2N+f1cslG4yHKvU4OO9p6Quh4iIyOow3NgYf085lk2PAQD8bftxnKlrlrgiIiIi68JwY4PuTYzALVG+uNCmw8vfHuWsxURERJdguLFBTk4CXr87DjJnAf8tqsIPR1VSl0RERGQ1JA03K1aswM033wwvLy8EBgZi9uzZKC4uvuZxWVlZSExMhEKhwIABA7BmzRoLVGtdBgd5YcGkgQCAl9OPQsO5b4iIiABIHG6ysrKwcOFC7Nu3DxkZGWhvb0dKSgqampquekxpaSmmT5+OCRMmIDc3F8uWLcPixYuRlpZmwcqtw8LJgxDVPffND9cOhURERI5AEK1owEZ1dTUCAwORlZWFiRMn9trmxRdfRHp6OgoLC/XbFixYgPz8fGRnZ1/zd2g0GiiVSqjVanh7e5usdqn89EsNHvrXfggCsPmpcYjvWqaBiIjInhjz/W1VY27UajUAwNfX96ptsrOzkZKS0mPb1KlTkZOTg7a2K7tmtFotNBpNj5c9GTfIH/ckhHHuGyIioi5WE25EUURqairGjx+PuLi4q7ZTqVQICgrqsS0oKAjt7e2oqam5ov2KFSugVCr1r4iICJPXLrXldw6Fj7sMhZUafPTTKanLISIikpTVhJtFixbh559/xoYNG67ZVhCEHu+7e9Yu3w4AS5cuhVqt1r/Ky8tNU7AV8fOUY9m0oQCAtzOOo/w8574hIiLHZRXh5umnn0Z6ejp27dqF8PBwg22Dg4OhUvV89LmqqgouLi7w8/O7or1cLoe3t3ePlz26d3Q4bonunPvmj98e4dw3RETksCQNN6IoYtGiRdi0aRN27tyJ6Ojoax6TnJyMjIyMHtu2b9+O0aNHQyaTmatUqycIAv7SNffNruJqfH+Ec98QEZFjkjTcLFy4EOvXr8cXX3wBLy8vqFQqqFQqXLhwQd9m6dKlmD9/vv79ggULcPr0aaSmpqKwsBAffvgh1q1bhyVLlkhxClZlUKAXnuya++YVzn1DREQOStJws3r1aqjVatx6660ICQnRv7788kt9m8rKSpSVlenfR0dHY+vWrcjMzER8fDxee+01rFy5EnPmzJHiFKzOU5MHIdrfA1UNWvwf574hIiIHZFXz3FiCvc1z05u9v9Tgwa65bzY9ORYJ/ftJXRIREdENsdl5bsg0xg7yxz2jLs5908a5b4iIyIEw3Nip5dM7574pUjXgwz2lUpdDRERkMQw3dsrPU45l0zvnvvn7Ds59Q0REjoPhxo7dm9g5901LWwfnviEiIofBcGPHOue+GQ5XZyfsKq7G1gLOfUNERPaP4cbODQr0xJO3ds19891RqC9w7hsiIrJvDDcO4MlbB2KAvweqG7R464ciqcshIiIyK4YbB6CQOePPd3eutP75/jIcOl0ncUVERETmw3DjIMYO9MecUeEQRWD5Zs59Q0RE9svocLNt2zbs2bNH//7dd99FfHw8HnzwQdTV8Y6ANVt+51D065r7Zh3nviEiIjtldLh5/vnnodFoAAAFBQV47rnnMH36dJSUlCA1NdXkBZLp+Hq46ue+eYdz3xARkZ0yOtyUlpYiNjYWAJCWloa77roLf/nLX/Dee+/h+++/N3mBZFq/TgxH0oDOuW9+/w3nviEiIvtjdLhxdXVFc3Pnv/h37NiBlJQUAICvr6/+jg5ZL0EQ8HrX3DdZx6vxn58rpS6JiIjIpIwON+PHj0dqaipee+01HDhwAHfeeScA4Pjx4wgPDzd5gWR6AwM88dTkzrlvXv3uGOe+ISIiu2J0uFm1ahVcXFzw9ddfY/Xq1QgLCwMAfP/997jjjjtMXiCZR/fcNzWNWvx1G+e+ISIi+yGIDjboQqPRQKlUQq1Ww9vbW+pyJJV9shYPfLAPAJD25FgkRvaTuCIiIqLeGfP9bfSdm8OHD6OgoED//ttvv8Xs2bOxbNkytLa2Gl8tSSZ5oB9+ndjZlbhsE+e+ISIi+2B0uHniiSdw/PhxAEBJSQnuv/9+uLu746uvvsILL7xg8gLJvJZN75z7pvhcAz74sUTqcoiIiG6Y0eHm+PHjiI+PBwB89dVXmDhxIr744gt8/PHHSEtLM3V9ZGa+Hq5Yfmfno/3/2HECJdWNEldERER0Y4wON6IooqOjs/tix44dmD59OgAgIiICNTU1pq2OLGLOqDAkD/CDtr0D89Yd4OR+RERk04wON6NHj8af//xnfPbZZ8jKytI/Cl5aWoqgoCCTF0jmJwgC/vFAPAYEeKCi/gLuX7uPAYeIiGyW0eHmnXfeweHDh7Fo0SIsX74cgwYNAgB8/fXXGDt2rMkLJMsI9FJg42NJGODPgENERLbNZI+Ct7S0wNnZGTKZzBQfZzZ8FNywc5oWPLB2H0pqmhDm44Yvn0hCeD93qcsiIiIHZ9ZHwbsdOnQI69evx+eff47Dhw9DoVBYfbChawvyVmDD40mIvuQOzpk63sEhIiLbYfSdm6qqKsydOxdZWVnw8fGBKIpQq9WYPHkyNm7ciICAAHPVahK8c9M3KnULHvhgH0prmhDh64aNjycjzMdN6rKIiMhBmfXOzdNPP42GhgYcPXoU58+fR11dHY4cOQKNRoPFixdfd9FkXYKVCmx4LAlRfu4oP38B96/NRkX9BanLIiIiuiaj79wolUrs2LEDN998c4/tBw4cQEpKCurr601Zn8nxzo1xKtWdXVOna5vR39cdGx9PQijv4BARkYWZ9c5NR0dHr2NrZDKZfv4bsh8hSjdseCwJ/X3dUXa+Gfev3YezvINDRERWzOhwc9ttt+F3v/sdzp49q99WUVGBZ599FrfffrtJiyPrEOrjho2PXww4D3ywD5VqBhwiIrJORoebVatWoaGhAVFRURg4cCAGDRqE6OhoNDQ04J///Kc5aiQrEOrjhg2PJyHC1w2na5vxwNp9UKlbpC6LiIjoCtc9z01GRgaKioogiiJiY2Pxq1/9ytS1mQXH3NyYM3XNXY+HX0CUnzs2Pp6MYKVC6rKIiMjOGfP9bbJJ/GwFw82NuzTgRPt7YMNjSQw4RERkViYPNytXruzzL7f2x8EZbkyjvGtwcUV9Z8DZ+HgSgrwZcIiIyDxMHm6io6P79IsFQUBJSUnfqpQIw43pXBpwBnQFnEAGHCIiMgN2SxnAcGNaPQJOgAc2PsaAQ0REpmeRtaWIACDC1x0bHktCqFKBkuomPPDBPlQ18CkqIiKSDsMN3bD+XU9NhSoVOFndhAfWMuAQEZF0GG7IJPr7uWPD40kI6Qo4D36wH9UNWqnLIiIiB8RwQyYT6dc5qDjYW4FfqhrxwAf7GHCIiMjiGG7IpC4POA9+sA81jQw4RERkOdf1tFR9fT0OHDiAqqqqKxbLnD9/vsmKMwc+LWUZpTVNuH9tNs5ptLgpyBNfPJYEf0+51GUREZGNMuuj4N999x0eeughNDU1wcvLC4IgXPwwQcD58+evr2oLYbixnEsDzpAgL3zx2Bj4MeAQEdF1MOuj4M899xweffRRNDQ0oL6+HnV1dfqXtQcbsqzupRkCveQoPteAh/61H03adqnLIiIiO2d0uKmoqMDixYvh7u5ujnrIzgwI8MTGx5MQ4CVHkaoBb24rkrokIiKyc0aHm6lTpyInJ8cctZCdGhDgib/fFw8A+DT7NLJP1kpbEBER2TUXYw+488478fzzz+PYsWMYPnw4ZDJZj/0zZ840WXFkP8YP9scDt/THhgNleDHtZ2x7ZgLcXY3+z4+IiOiajB5Q7OR09Zs9giBAp9P1+bN2796Nt956C4cOHUJlZSU2b96M2bNnX7V9ZmYmJk+efMX2wsJCxMTE9Ol3ckCxdBpa2jD177txVt2C34yLwsszhkldEhER2QizDiju6Oi46suYYAMATU1NGDlyJFatWmXUccXFxaisrNS/Bg8ebNTxJA0vhQwr5owAAHy89xQOnuIAdCIiMj1J+wWmTZuGadOmGX1cYGAgfHx8TF8Qmd2kmwJw3+hw/DvnDF74+mdsXTwBbq7OUpdFRER2pE/hZuXKlXj88cehUCiwcuVKg20XL15sksIMSUhIQEtLC2JjY/H73/++166qblqtFlrtxRlyNRqN2esjw5bfGYus49UorWnC2xnFWH5nrNQlERGRHenTmJvo6Gjk5OTAz88P0dHRV/8wQUBJScn1FSII1xxzU1xcjN27dyMxMRFarRafffYZ1qxZg8zMTEycOLHXY1555RW8+uqrV2znmBtp7Sw6h0c/zoEgAF8vGIvEyH5Sl0RERFbMrDMUm0tfwk1vZsyYAUEQkJ6e3uv+3u7cREREMNxYgdQv87AptwIDAzywZfEEKGTsniIiot6ZdUCxtUlKSsKJEyeuul8ul8Pb27vHi6zDH2fEIsBLjpPVTXhnx9WvIRERkTGua0DxmTNnkJ6ejrKyMrS2tvbY9/bbb5uksL7Kzc1FSEiIRX8nmYaPuytenx2Hxz87hLW7T2JaXDBGRvhIXRYREdk4o8PNf//7X8ycORPR0dEoLi5GXFwcTp06BVEUMWrUKKM+q7GxEb/88ov+fWlpKfLy8uDr64v+/ftj6dKlqKiowKeffgoAeOeddxAVFYVhw4ahtbUV69evR1paGtLS0ow9DbISKcOCMXNkKNLzz2LJV/n4z+LxkLuwe4qIiK6f0d1SS5cuxXPPPYcjR45AoVAgLS0N5eXlmDRpEu69916jPisnJwcJCQlISEgAAKSmpiIhIQF//OMfAQCVlZUoKyvTt29tbcWSJUswYsQITJgwAXv27MGWLVtwzz33GHsaZEVemTkM/p6uOFHViH/+95drH0BERGSA0QOKvby8kJeXh4EDB6Jfv37Ys2cPhg0bhvz8fMyaNQunTp0yU6mmwRmKrdP3BZV48vPDcHYS8O3CcYgLU0pdEhERWRGzDij28PDQP30UGhqKkydP6vfV1NQY+3FEAIBpw0Nw5/AQ6DpELPkqH63tHVKXRERENsrocJOUlISffvoJQOcims899xxef/11PProo0hKSjJ5geQ4Xp01DL4erihSNeDdXeyeIiKi62N0uHn77bcxZswYAJ0T5E2ZMgVffvklIiMjsW7dOpMXSI7D31OOV2d2Lqb57q5fcOwsZ5MmIiLjGTXmRqfTYc+ePRgxYgT69bPNGWU55sa6iaKIBesP4Yej5xAb4o1vF42DzNnmp2MiIqIbZLYxN87Ozpg6dSrq6+tvpD6iqxIEAa/NjoOPuwzHKjVYk3ny2gcRERFdwuh/Eg8fPvy6148i6otALwVemdHZPbVy5wkUqxokroiIiGyJ0eHm9ddfx5IlS/Cf//wHlZWV0Gg0PV5EpjArPhS/GhqINl3n01PtOj49RUREfWP0PDdOThfzkCAI+p9FUYQgCNDpdKarzgw45sZ2nNO0YMrbWdC0tOOFO4bgqVsHSV0SERFJxJjvb6OXX9i1a9d1F0ZkjCBvBf44YxiWfJWPdzJOICU2CIMCvaQui4iIrJzR4SY6OhoRERE97toAnXduysvLTVYYEQDMGRWG//x8FpnF1Vjy1c9Ie3IsnJ2Eax9IREQOy+gxN9HR0aiurr5i+/nz5xEdHW2Sooi6CYKAFfcMh5fcBXnl9Vi3h4PZiYjIMKPDTffYmss1NjZCoVCYpCiiS4Uo3fD7u4YCAP62/ThKqhslroiIiKxZn7ulUlNTAXT+S/oPf/gD3N3d9ft0Oh3279+P+Ph4kxdIBAD3jY7Af36uxI8navDC1z/jyyeS2T1FRES96nO4yc3NBdB556agoACurq76fa6urhg5ciSWLFli+gqJ0Bmq35gzAilvZyHndB0+3nsKvx3PblAiIrqS0Y+C/+Y3v8E//vEPm32Mmo+C27bP95/G8s1HoJA5YdvvJiLK30PqkoiIyALMtvwCAHz00UcMBSSZB2/pj7ED/dDS1oEX0n5GR4dR2ZyIiBwAVyQkmyIIAt6cMwLurs44UHoen+07LXVJRERkZRhuyOZE+LrjpWkxAIA3txWh/HyzxBUREZE1Ybghm/Q/YyIxJtoXza06vPA1u6eIiOgihhuySU5Ond1TCpkTsktq8cWBMqlLIiIiK8FwQzYryt8Dz0/t7J5asbUQZ+rYPUVERAw3ZOMeGRuF0ZH90NSqw4tpP0PH7ikiIofHcEM2zdlJwF9/3dk99dMvtVj53xNSl0RERBJjuCGbNyDAE3+5ezgAYOXOE9hVXCVxRUREJCWGG7IL94wKx0Nj+kMUgWe/zOPj4UREDozhhuzGH2fEYkS4EvXNbXjq88NoadNJXRIREUmA4YbshtzFGe89NAo+7jIUVKjxp/8ck7okIiKSAMMN2ZXwfu54Z248BAH4Yn8Zvj50RuqSiIjIwhhuyO7cOiQQv7t9MABg+eYCHDurkbgiIiKyJIYbskuLbxuMSTcFQNvegac+PwRNS5vUJRERkYUw3JBdcnIS8M7ceIT5uOFUbTOW/DsfosgJ/oiIHAHDDdmtfh6ueO+hUXB1dsL2Y+ewdneJ1CUREZEFMNyQXRsZ4YOXZ8YCAN7cVoTsk7USV0RERObGcEN278Fb+uOeUWHoEIGnN+TinKZF6pKIiMiMGG7I7gmCgNdnD0dMsBdqGrVY9MVhtOk6pC6LiIjMhOGGHIKbqzNW/08ivOQuOHiqDm9+XyR1SUREZCYMN+Qwov098H/3jQQA/GtPKbYWVEpcERERmQPDDTmUqcOC8cSkAQCA57/Kx8nqRokrIiIiU2O4IYfzfMoQjIn2RVOrDgs+O4QmbbvUJRERkQkx3JDDcXF2wj8fTECglxwnqhqxdFMBJ/gjIrIjDDfkkAK9FHj3oVFwdhKQnn8Wn+07LXVJRERkIgw35LBujvLF0mkxAIDX/nMMh8vqJK6IiIhMgeGGHNpvx0dj+vBgtOlELPz8MGobtVKXREREN4jhhhyaIAh4c84IDAjwQKW6Bb/bmAddB8ffEBHZMoYbcnheChnW/E8i3GTO2PNLDf6x47jUJRER0Q1guCECcFOQF96YMxwAsHLnL9hZdE7iioiI6HpJGm52796NGTNmIDQ0FIIg4JtvvrnmMVlZWUhMTIRCocCAAQOwZs0a8xdKDmFWfBgeTo4EADz7ZT7KzzdLXBEREV0PScNNU1MTRo4ciVWrVvWpfWlpKaZPn44JEyYgNzcXy5Ytw+LFi5GWlmbmSslRLL8zFvERPlBfaMOTnx9CS5tO6pKIiMhIgmgls5cJgoDNmzdj9uzZV23z4osvIj09HYWFhfptCxYsQH5+PrKzs/v0ezQaDZRKJdRqNby9vW+0bLJDZ+sv4K5/7sH5plY8cEsEVtwzQuqSiIgcnjHf3zY15iY7OxspKSk9tk2dOhU5OTloa2vr9RitVguNRtPjRWRIqI8bVt6fAEEANhwox79zyqUuiYiIjGBT4UalUiEoKKjHtqCgILS3t6OmpqbXY1asWAGlUql/RUREWKJUsnHjB/vjuSk3AQD+8M0RHD2rlrgiIiLqK5sKN0Bn99WlunvVLt/ebenSpVCr1fpXeTn/FU5989Stg3BbTCC07R14cv1hqC/0fneQiIisi02Fm+DgYKhUqh7bqqqq4OLiAj8/v16Pkcvl8Pb27vEi6gsnJwFv3zcS4f3cUHa+Gf/7yUFoWhhwiIisnU2Fm+TkZGRkZPTYtn37dowePRoymUyiqsie+bi7Ys3/JMJL4YKDp+rw0Af7cb6pVeqyiIjIAEnDTWNjI/Ly8pCXlweg81HvvLw8lJWVAejsUpo/f76+/YIFC3D69GmkpqaisLAQH374IdatW4clS5ZIUT45iLgwJTY8lgRfD1cUVKgx9/1snNO0SF0WERFdhaThJicnBwkJCUhISAAApKamIiEhAX/84x8BAJWVlfqgAwDR0dHYunUrMjMzER8fj9deew0rV67EnDlzJKmfHEdcmBL/fiIZwd4KnKhqxL1rsjnJHxGRlbKaeW4shfPc0I0oP9+Mh/61H2XnmxHsrcD6/x2DQYGeUpdFRGT37HaeGyKpRfi646sFyRgc6AmVpgX3vZ+NIxV8TJyIyJow3BAZKchbgS+fSMbwMGXnLMYf7EPOqfNSl0VERF0Yboiug6+HK754bAxuifJFQ0s75q07gB9PVEtdFhERgeGG6Lp5KWT45NFbMOmmAFxo0+G3H+fgh6Oqax9IRERmxXBDdAPcXJ3xwfzRmBYXjFZdB576/DA2556RuiwiIofGcEN0g1xdnPDPBxLw68Rw6DpEpP47H5/tOy11WUREDovhhsgEXJyd8Nc5I/DI2CiIYudim6szT0pdFhGRQ2K4ITIRJycBL8+IxaLJgwAAb24rwls/FMHBppIiIpIcww2RCQmCgCVTh+ClaTEAgHd3ncQr6UfR0cGAQ0RkKQw3RGawYNJAvDY7DoIAfJJ9Gs9//TPadR1Sl0VE5BAYbojMZF5SJN6+byScnQSkHT6DpzfkQtuuk7osIiK7x3BDZEZ3J4TjvYdGwdXZCd8fUeGxTw/hQisDDhGROTHcEJnZ1GHBWPfIaLjJnLH7eDUe/vAANC1tUpdFRGS3GG6ILGDC4AB89ttb4CV3wYFT5/HQB/txvqlV6rKIiOwSww2RhYyO8sWGx5Pg6+GKggo15r6fjXOaFqnLIiKyOww3RBYUF6bEv59IQrC3AieqGnHvmmyUn2+WuiwiIrvCcENkYYMCvfDVgmT093VH2flm3LsmG79UNUpdFhGR3WC4IZJAhK87vlqQjMGBnlBpWnDf+9lYtfME7+IQEZmAIDrY3PAajQZKpRJqtRre3t5Sl0MO7nxTKx7+8AAKKtT6bTdH9cPshDDcOTwEPu6uElZHRGQ9jPn+ZrghklhLmw7f5Z/FN3kV2HuyFt1/kTJnAZOHBOLuhDBMjgmEQuYsbaFERBJiuDGA4YasmUrdgvT8CmzOPYvCSo1+u5fCBXcOD8HshDDcEuULJydBwiqJiCyP4cYAhhuyFUUqDb7JPYtv8ypQqb74yHioUoFZCWG4OyEMNwV5SVghEZHlMNwYwHBDtqajQ8T+0vP4JrcCWwsq0aBt1++LDfHG3QlhmBkfiiBvhYRVEhGZF8ONAQw3ZMta2nTYWVSFzbkVyCyuQpuu889XEIBxA/0xOyEMd8QFw1PuInGlRESmxXBjAMMN2Yu6plZsKajEN7kVyDldp9+ukDlhSmww7k4IxYTBAZA5c8YHIrJ9DDcGMNyQPSqrbca3eRXYnFuBkpom/XZfD1fMGBGCXydGYHi4UsIKiYhuDMONAQw3ZM9EUURBhRqbcyvwXf5Z1DReXJxzTLQvFkwaiFuHBEAQ+LQVEdkWhhsDGG7IUbTrOrDnlxpsOlyB749U6sfn3BTkiccnDsTMkaFwdWGXFRHZBoYbAxhuyBFVqi/go59O4Yv9ZWjsetoq2FuBR8dH4YFb+sNLIZO4QiIiwxhuDGC4IUemvtCGL/aX4aOfSlHVoAUAeMld8FBSJB4dF4VAPk5ORFaK4cYAhhsiQNuuw7e5Z/H+7pM4Wd05ANnV2Ql3J4ThsYkDMCjQU+IKiYh6YrgxgOGG6KKODhE7i6rw/u6TOHjq4uPkvxoahAWTBmB0lK+E1RERXcRwYwDDDVHvDp0+j/ezSpBReE6/eOeo/j54YtJATBkaxPWsiEhSDDcGMNwQGXayuhH/+rEEaYcq0KrrAAAM8PfA4xMHYHZCGFcnJyJJMNwYwHBD1DdVDS34ZO8pfJZ9GpqWzies/D3l+M24KPzPmEgo3fmEFRFZDsONAQw3RMZp1LZj44EyrNtTql+d3MPVGfff0h+Pjo9GmI+bxBUSkSNguDGA4Ybo+rTpOvBd/lms3V2CIlUDAMDFScDMkaGYPzYKI8OVnPmYiMyG4cYAhhuiGyOKIrKOV2Pt7hLsPVmr3x7l546Z8WGYFR+KgQF8lJyITIvhxgCGGyLT+flMPdbtKcUPR1VoaevQbx8epsSs+FDcNSIUwUpODEhEN47hxgCGGyLTa9K2I+PYOXybV4HdJ2qg6+j8vxVBAJKi/TArPhTT4kI4CJmIrhvDjQEMN0TmVduoxdYjKqTnVfSYGFDmLODWIYGYFR+K22OC4ObKR8qJqO8YbgxguCGynDN1zfguvxLf5lXoByEDnU9bTY0Lxqz4MIwb6AcXZ65OTkSGMdwYwHBDJI1iVQPS8yvwbd5ZnKm7oN/u5+GKu0aEYGZ8GEb19+ETV0TUK4YbAxhuiKQliiIOl9Xh27yz+M/PlTjf1KrfF+HrhpkjQzE7PgyDg7wkrJKIrA3DjQEMN0TWo03XgZ9+qUF63ln8cFSFpladft/QEG/Mig/FjJGhnCiQiBhuDGG4IbJOF1p1+G/ROXybdxaZxVVo0138v6aR4UqkDAvGlNggDA70ZNcVkQOyqXDz3nvv4a233kJlZSWGDRuGd955BxMmTOi1bWZmJiZPnnzF9sLCQsTExPTp9zHcEFm/+uZWbDuiwjd5Fdhfeh6X/r9UpJ87pgwNwpTYIIyO8oUzVysncgg2E26+/PJLzJs3D++99x7GjRuH999/H//6179w7Ngx9O/f/4r23eGmuLi4x4kFBATA2blvj5Uy3BDZlqqGFvy3sAoZx85hzy81aG2/OFlgP3cZbovpDDoTb/KHu6uLhJUSkTnZTLgZM2YMRo0ahdWrV+u3DR06FLNnz8aKFSuuaN8dburq6uDj43Ndv5Phhsh2NWnb8eOJamw/eg47i6tQ39ym3yd3ccKEwf6YEhuE24cGwd9TLmGlRGRqxnx/S/bPnNbWVhw6dAgvvfRSj+0pKSnYu3evwWMTEhLQ0tKC2NhY/P73v++1q6qbVquFVqvVv9doNDdWOBFJxkPugjviQnBHXAjadR04eKoOGcfOIaNQhfLzF7CjsAo7CqsgCAUY1b8fpsR23tXhWldEjkWycFNTUwOdToegoKAe24OCgqBSqXo9JiQkBGvXrkViYiK0Wi0+++wz3H777cjMzMTEiRN7PWbFihV49dVXTV4/EUnLxdkJyQP9kDzQD3+4ayiKVA2dQefYORRUqHHodB0Ona7DG98XYUCAB6bEBiElNggJEf3gxHE6RHZNsm6ps2fPIiwsDHv37kVycrJ+++uvv47PPvsMRUVFffqcGTNmQBAEpKen97q/tzs3ERER7JYismOV6gvYcewcth87h30ltT2evPL3lONXQwMxJTYI4wb5QyHjMhBEtsAmuqX8/f3h7Ox8xV2aqqqqK+7mGJKUlIT169dfdb9cLodczr53IkcSonTDvOQozEuOgqalDZnF1cg4dg6ZRVWoadRi48FybDxYDjeZMybHBODBWyIxbpAfHzEnshOShRtXV1ckJiYiIyMDd999t357RkYGZs2a1efPyc3NRUhIiDlKJCI74K2QYebIUMwcGYrW9g7sL63Vd19VqluwtUCFrQUqDA70xPyxUbgnIQwecj51RWTLrOJR8DVr1iA5ORlr167FBx98gKNHjyIyMhJLly5FRUUFPv30UwDAO++8g6ioKAwbNgytra1Yv3493njjDaSlpeGee+7p0+/k01JEBHQuA3GkQoOvD5Xj60Nn9LMjeylccN/oCMxPjkSkn4fEVRJRN5volgKAuXPnora2Fn/6059QWVmJuLg4bN26FZGRkQCAyspKlJWV6du3trZiyZIlqKiogJubG4YNG4YtW7Zg+vTpUp0CEdkoQRAwPFyJ4eFKPDd1CNIOncEne0/hVG0z1u0pxYc/leK2IYF4eGwUJgz2Z5cVkQ2RfIZiS+OdGyK6mo4OEVknqvHJ3lPILK7Wbx8Q4IFHxkbhnlHh8GSXFZEkbGYSPykw3BBRX5RUN+LT7NP4+tAZNGrbAQBechfMSQzHw2OjEO3PLisiS2K4MYDhhoiM0aht7+yyyj6Fkuom/fZbhwTg4bFRmDQ4gPPmEFkAw40BDDdEdD06OkT8+EsNPtl7CruKq/SLeUb7e2B+ciR+nRgOL4VM2iKJ7BjDjQEMN0R0o07VNOHT7NP4KqccDV1dVh6uzvh1Yjjmj43icg9EZsBwYwDDDRGZSpO2HZsOn8En2afxS1WjfvvEmwLwyNhI3HpTILusiEyE4cYAhhsiMjVRFPHTL7X4eG8p/lt0scsq0s8dKbFB8FbI4CF3gafcBZ4Kl66fnS9uk3dukzk7SXsiRFaM4cYAhhsiMqey2mZ8tu8UNh4sR0NLu1HHuro4wasr6HQHIE/9zy5XhCEPuTO83WTwVsigdHOBt0IGbzcZ5C5OnJeH7A7DjQEMN0RkCc2t7fgu/yxOnGtEU2s7Glra0aRtR5NWh0ZtOxq1ne8bte3QtneY9He7OjvBuyvseLnJ4K1w0Ycg70tC0KXbGY7I2tnMDMVERPbK3dUFc2/u36e2bboOfdDpLfw0db0atJcFpJZ2NGjboLnQDk1LGzQX2tAhAq26DtQ0tqKmsfW6au8OR55yFzg5CeiOOYJw6c+AAAGXZqDuQCR07+/ad2k7oetgAYCPuwy3xQQiJTYYwUrFddVK1BveuSEishOiKKKpVQfNhbausNN+yc9t0LS099zXctnPXeFICvERPpg6LBhThwVhAJ82o16wW8oAhhsiot51dIhoam3Xh6BGbTs6utKOCEAUARFdXxkiun/Sbxcv7kL3V4uobytebNvVrqSmET8cPYdDp+t61HFTkGdX0AnGsFBvdpERAIYbgxhuiIisS5WmBduPncMPR1XIPlmL9ktuH4X3c9MHncTIfnDmo/UOi+HGAIYbIiLrpW5uw87ic9h2RIWs49Voabs42Nrf0xVTYoMwdVgwxg70h6sLH513JAw3BjDcEBHZhgutOmQdr8b2oyrsKDwHzSWP1nvJXTA5JhB3xAVj0k0B8OBq7XaP4cYAhhsiItvTpuvAvpJabDuiwvZj51DdoNXvk7s4YcLgAEwdFoRfDQ1CPw9XCSslc2G4MYDhhojItnV0iMgtr8cPR1X44agKp2ub9fucnQSMifbF1GHBuC0mEBG+7hJWSqbEcGMAww0Rkf0QRRFFqoauoHMOhZWaHvvDfNwwJtoXYwb4Yky0HyL93Pn0lY1iuDGA4YaIyH6V1Tbr7+jkltdDd9nEPUHecoyJ9tOHnYEBHgw7NoLhxgCGGyIix9Ckbceh03XYX1qLA6XnkVdejzZdz688f095jzs7gwM9uZK7lWK4MYDhhojIMbW06XC4rA77S85jf2ktcsvqr1jXq5+7DLdE++rv7sQEe3NuHSvBcGMAww0REQGAtl2H/HI19pfUYn/peRw6XYcLbboebbwVLrg56uKdnWGh3nBx5vw6UmC4MYDhhoiIetPa3oGCCrW+GyvnVB0ate092njKXZAY2Q9jBvhi4uAALg9hQQw3BjDcEBFRX7TrOnCsUqPvxjpQer7HRIIAEOglx61DAnBbTCDGDw6AJycTNBuGGwMYboiI6HroOkQUqTrDTnZJLX76pQbNrRe7sWTOAm6O8sVtMYGYHBOIAf58EsuUGG4MYLghIiJT0LbrcLC0DjuLqrCruAqlNU099vf3ddcHnTHRvlDInCWq1D4w3BjAcENEROZQWtOEXV1BZ3/JebTqLj6J5SZzxrhBfpgcE4jJQwIR6uMmYaW2ieHGAIYbIiIytyZtO376pQa7iquwq6gaKk1Lj/0xwV76oDOqvw+fwOoDhhsDGG6IiMiSRFFEYWUDdhVXYWdRFXLL6nDpxMlKNxkm3hSAyUMCcOuQQPhy4c9eMdwYwHBDRERSqmtqxe4T1dhZVIWs49Wob27T7xMEID7CBxMG+SNY6QYfdxl83GTwcXft/NldBjeZs0MOVGa4MYDhhoiIrIWuQ0Reeeeg5J1F1Vcs/NkbV2cnKPWhRwalW2fw6efeGYKUXdt9urZ3v/eUu9h0KGK4MYDhhoiIrJVK3YJdxVU4fLoOdc1tUF9oRX1zG+ovtKG+ufWKtbGM4eIk6MNOoJcCQ4K9EBPshZgQb9wU5Al3V+ueo4fhxgCGGyIiskWiKKK5VacPOmp96GlD/YXO93XNF8OQumt7XXMbWi9bQ+tyggBE+rojJtgbMSFdoSfYG/193a1mIVGGGwMYboiIyNG0tOn0IaiuqQ1n6ppRrGpAkaoBRSoNahpbez3OTeaMm4K9MLTrLs+QYG/EBHuhnwSDno35/rbue1BERER0wxQyZwQrnRGsVHRt8euxv7pB2xV2NPrAc/xcIy606ZBfXo/88voe7YO85Vfc5RkY4AlXF+t4pJ13boiIiOgK7boOnKptRpFKg2JVAworO0PPmboLvbZ3cRIwMMATMSFeGBLshccnDDDp/D3sljKA4YaIiOj6NbS04fi5ri6tyot3exouWVTU39MVOb+fYtLfy24pIiIiMgsvhQyJkb5IjPTVbxNFEWfVLShWaVBY2QCp75sw3BAREdENEQQBYT5uCPNxw20xQVKXA+sY+UNERERkIgw3REREZFcYboiIiMiuMNwQERGRXWG4ISIiIrvCcENERER2heGGiIiI7ArDDREREdkVycPNe++9h+joaCgUCiQmJuLHH3802D4rKwuJiYlQKBQYMGAA1qxZY6FKiYiIyBZIGm6+/PJLPPPMM1i+fDlyc3MxYcIETJs2DWVlZb22Ly0txfTp0zFhwgTk5uZi2bJlWLx4MdLS0ixcOREREVkrSRfOHDNmDEaNGoXVq1frtw0dOhSzZ8/GihUrrmj/4osvIj09HYWFhfptCxYsQH5+PrKzs/v0O7lwJhERke0x5vtbsjs3ra2tOHToEFJSUnpsT0lJwd69e3s9Jjs7+4r2U6dORU5ODtra2sxWKxEREdkOyRbOrKmpgU6nQ1BQzwW2goKCoFKpej1GpVL12r69vR01NTUICQm54hitVgutVqt/r9FoTFA9ERERWSvJVwUXBKHHe1EUr9h2rfa9be+2YsUKvPrqq1dsZ8ghIiKyHd3f230ZTSNZuPH394ezs/MVd2mqqqquuDvTLTg4uNf2Li4u8PPz6/WYpUuXIjU1Vf++oqICsbGxiIiIuMEzICIiIktraGiAUqk02EaycOPq6orExERkZGTg7rvv1m/PyMjArFmzej0mOTkZ3333XY9t27dvx+jRoyGTyXo9Ri6XQy6X6997enqivLwcXl5eBu8QXQ+NRoOIiAiUl5fb/WBlRzpXwLHOl+dqvxzpfHmu9kcURTQ0NCA0NPSabSXtlkpNTcW8efMwevRoJCcnY+3atSgrK8OCBQsAdN51qaiowKeffgqg88moVatWITU1FY899hiys7Oxbt06bNiwoc+/08nJCeHh4WY5n27e3t52/R/YpRzpXAHHOl+eq/1ypPPludqXa92x6SZpuJk7dy5qa2vxpz/9CZWVlYiLi8PWrVsRGRkJAKisrOwx5010dDS2bt2KZ599Fu+++y5CQ0OxcuVKzJkzR6pTICIiIisj+YDip556Ck899VSv+z7++OMrtk2aNAmHDx82c1VERERkqyRffsGeyOVyvPzyyz3G+NgrRzpXwLHOl+dqvxzpfHmujk3SGYqJiIiITI13boiIiMiuMNwQERGRXWG4ISIiIrvCcENERER2heHGSO+99x6io6OhUCiQmJiIH3/80WD7rKwsJCYmQqFQYMCAAVizZo2FKr1+K1aswM033wwvLy8EBgZi9uzZKC4uNnhMZmYmBEG44lVUVGShqq/fK6+8ckXdwcHBBo+xxesKAFFRUb1ep4ULF/ba3pau6+7duzFjxgyEhoZCEAR88803PfaLoohXXnkFoaGhcHNzw6233oqjR49e83PT0tIQGxsLuVyO2NhYbN682UxnYBxD59vW1oYXX3wRw4cPh4eHB0JDQzF//nycPXvW4Gd+/PHHvV7vlpYWM5+NYde6to888sgVNSclJV3zc63x2l7rXHu7PoIg4K233rrqZ1rrdTUnhhsjfPnll3jmmWewfPly5ObmYsKECZg2bVqPiQYvVVpaiunTp2PChAnIzc3FsmXLsHjxYqSlpVm4cuNkZWVh4cKF2LdvHzIyMtDe3o6UlBQ0NTVd89ji4mJUVlbqX4MHD7ZAxTdu2LBhPeouKCi4altbva4AcPDgwR7nmZGRAQC49957DR5nC9e1qakJI0eOxKpVq3rd/9e//hVvv/02Vq1ahYMHDyI4OBhTpkxBQ0PDVT8zOzsbc+fOxbx585Cfn4958+bhvvvuw/79+811Gn1m6Hybm5tx+PBh/OEPf8Dhw4exadMmHD9+HDNnzrzm53p7e/e41pWVlVAoFOY4hT671rUFgDvuuKNHzVu3bjX4mdZ6ba91rpdfmw8//BCCIFxzMltrvK5mJVKf3XLLLeKCBQt6bIuJiRFfeumlXtu/8MILYkxMTI9tTzzxhJiUlGS2Gs2hqqpKBCBmZWVdtc2uXbtEAGJdXZ3lCjORl19+WRw5cmSf29vLdRVFUfzd734nDhw4UOzo6Oh1v61eVwDi5s2b9e87OjrE4OBg8Y033tBva2lpEZVKpbhmzZqrfs59990n3nHHHT22TZ06Vbz//vtNXvONuPx8e3PgwAERgHj69Omrtvnoo49EpVJp2uJMrLdzffjhh8VZs2YZ9Tm2cG37cl1nzZol3nbbbQbb2MJ1NTXeuemj1tZWHDp0CCkpKT22p6SkYO/evb0ek52dfUX7qVOnIicnB21tbWar1dTUajUAwNfX95ptExISEBISgttvvx27du0yd2kmc+LECYSGhiI6Ohr3338/SkpKrtrWXq5ra2sr1q9fj0cfffSai8ja6nXtVlpaCpVK1eO6yeVyTJo06ap/v8DVr7WhY6yVWq2GIAjw8fEx2K6xsRGRkZEIDw/HXXfdhdzcXMsUeIMyMzMRGBiIm266CY899hiqqqoMtreHa3vu3Dls2bIFv/3tb6/Z1lav6/ViuOmjmpoa6HQ6BAUF9dgeFBQElUrV6zEqlarX9u3t7aipqTFbraYkiiJSU1Mxfvx4xMXFXbVdSEgI1q5di7S0NGzatAlDhgzB7bffjt27d1uw2uszZswYfPrpp/jhhx/wwQcfQKVSYezYsaitre21vT1cVwD45ptvUF9fj0ceeeSqbWz5ul6q+2/UmL/f7uOMPcYatbS04KWXXsKDDz5ocGHFmJgYfPzxx0hPT8eGDRugUCgwbtw4nDhxwoLVGm/atGn4/PPPsXPnTvztb3/DwYMHcdttt0Gr1V71GHu4tp988gm8vLxwzz33GGxnq9f1Rki+tpStufxfuKIoGvxXb2/te9turRYtWoSff/4Ze/bsMdhuyJAhGDJkiP59cnIyysvL8X//93+YOHGiucu8IdOmTdP/PHz4cCQnJ2PgwIH45JNPkJqa2usxtn5dAWDdunWYNm0aQkNDr9rGlq9rb4z9+73eY6xJW1sb7r//fnR0dOC9994z2DYpKanHQNxx48Zh1KhR+Oc//4mVK1eau9TrNnfuXP3PcXFxGD16NCIjI7FlyxaDX/y2fm0//PBDPPTQQ9ccO2Or1/VG8M5NH/n7+8PZ2fmKVF9VVXVF+u8WHBzca3sXFxf4+fmZrVZTefrpp5Geno5du3YhPDzc6OOTkpJs8l8GHh4eGD58+FVrt/XrCgCnT5/Gjh078L//+79GH2uL17X76Tdj/n67jzP2GGvS1taG++67D6WlpcjIyDB416Y3Tk5OuPnmm23ueoeEhCAyMtJg3bZ+bX/88UcUFxdf19+wrV5XYzDc9JGrqysSExP1T5d0y8jIwNixY3s9Jjk5+Yr227dvx+jRoyGTycxW640SRRGLFi3Cpk2bsHPnTkRHR1/X5+Tm5iIkJMTE1ZmfVqtFYWHhVWu31et6qY8++giBgYG48847jT7WFq9rdHQ0goODe1y31tZWZGVlXfXvF7j6tTZ0jLXoDjYnTpzAjh07rit4i6KIvLw8m7vetbW1KC8vN1i3LV9boPPOa2JiIkaOHGn0sbZ6XY0i1UhmW7Rx40ZRJpOJ69atE48dOyY+88wzooeHh3jq1ClRFEXxpZdeEufNm6dvX1JSIrq7u4vPPvuseOzYMXHdunWiTCYTv/76a6lOoU+efPJJUalUipmZmWJlZaX+1dzcrG9z+bn+/e9/Fzdv3iweP35cPHLkiPjSSy+JAMS0tDQpTsEozz33nJiZmSmWlJSI+/btE++66y7Ry8vL7q5rN51OJ/bv31988cUXr9hny9e1oaFBzM3NFXNzc0UA4ttvvy3m5ubqnw564403RKVSKW7atEksKCgQH3jgATEkJETUaDT6z5g3b16Ppx9/+ukn0dnZWXzjjTfEwsJC8Y033hBdXFzEffv2Wfz8LmfofNva2sSZM2eK4eHhYl5eXo+/Y61Wq/+My8/3lVdeEbdt2yaePHlSzM3NFX/zm9+ILi4u4v79+6U4RT1D59rQ0CA+99xz4t69e8XS0lJx165dYnJyshgWFmaT1/Za/x2Loiiq1WrR3d1dXL16da+fYSvX1ZwYboz07rvvipGRkaKrq6s4atSoHo9HP/zww+KkSZN6tM/MzBQTEhJEV1dXMSoq6qr/MVoTAL2+PvroI32by8/1zTffFAcOHCgqFAqxX79+4vjx48UtW7ZYvvjrMHfuXDEkJESUyWRiaGioeM8994hHjx7V77eX69rthx9+EAGIxcXFV+yz5eva/dj65a+HH35YFMXOx8FffvllMTg4WJTL5eLEiRPFgoKCHp8xadIkfftuX331lThkyBBRJpOJMTExVhPsDJ1vaWnpVf+Od+3apf+My8/3mWeeEfv37y+6urqKAQEBYkpKirh3717Ln9xlDJ1rc3OzmJKSIgYEBIgymUzs37+/+PDDD4tlZWU9PsNWru21/jsWRVF8//33RTc3N7G+vr7Xz7CV62pOgih2jYQkIiIisgMcc0NERER2heGGiIiI7ArDDREREdkVhhsiIiKyKww3REREZFcYboiIiMiuMNwQERGRXWG4ISIiIrvCcENERER2heGGiIiI7ArDDREREdkVhhsiIiKyK/8P5MUHBP9MCLQAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 640x480 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "plt.plot(loss_history)\n",
    "plt.ylabel('train loss')\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "436f3944",
   "metadata": {},
   "source": [
    "## 模型预测"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "2c99ea7d",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.8266666666666667\n"
     ]
    }
   ],
   "source": [
    "model.eval()\n",
    "translation_results = []\n",
    "\n",
    "correct = 0\n",
    "error = 0\n",
    "\n",
    "for enc_inputs, dec_inputs, dec_outputs in test_loader:\n",
    "    '''\n",
    "    enc_inputs: [batch_size, src_len]\n",
    "    dec_inputs: [batch_size, tgt_len]\n",
    "    dec_outputs: [batch_size, tgt_len]\n",
    "    '''\n",
    "    # enc_inputs, dec_inputs, dec_outputs = enc_inputs.to(device), dec_inputs.to(device), dec_outputs.to(device)\n",
    "    # outputs: [batch_size * tgt_len, tgt_vocab_size]\n",
    "    outputs, enc_self_attns, dec_self_attns, dec_enc_attns = model(enc_inputs, dec_inputs)\n",
    "    # pred形状为 (seq_len, batch_size, vocab_size) = (1, 1, vocab_size)\n",
    "    # dec_outputs, dec_self_attns, dec_enc_attns = model.decoder(dec_inputs, enc_inputs, enc_output)\n",
    "    \n",
    "    outputs = outputs.squeeze()\n",
    "    \n",
    "    pred_seq = []\n",
    "    for output in outputs:\n",
    "        next_token_index = output.argmax().item()\n",
    "        if next_token_index == tgt_vocab['<eos>']:\n",
    "            break\n",
    "        pred_seq.append(next_token_index)\n",
    "    \n",
    "    pred_seq = tgt_vocab[pred_seq]\n",
    "    tgt_seq = dec_outputs.squeeze().tolist()\n",
    "    \n",
    "    # 需要注意在<eos>之前截断\n",
    "    if tgt_vocab['<eos>'] in tgt_seq:\n",
    "        eos_idx = tgt_seq.index(tgt_vocab['<eos>'])\n",
    "        tgt_seq = tgt_vocab[tgt_seq[:eos_idx]]\n",
    "    else:\n",
    "        tgt_seq = tgt_vocab[tgt_seq]\n",
    "    translation_results.append((' '.join(tgt_seq), ' '.join(pred_seq)))\n",
    "    \n",
    "    for i in range(len(tgt_seq)):\n",
    "        if i >= len(pred_seq) or pred_seq[i] != tgt_seq[i]:\n",
    "            error += 1\n",
    "        else:\n",
    "            correct += 1\n",
    "    \n",
    "print(correct/(correct+error))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "87a19f24",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[('c s l s a u', 'c s l s a u'),\n",
       " ('p z z d q d', 'p u y d q d'),\n",
       " ('x b o h e f', 'x b o h e f'),\n",
       " ('u i n d c c', 'u n n d c c'),\n",
       " ('d y y j i t', 'd y y j i t'),\n",
       " ('b v v e r u', 'b y y e r u'),\n",
       " ('u i i p v y', 'u i i p p y'),\n",
       " ('h u b q e r', 'h u b u e r'),\n",
       " ('f d e t n r', 'y d t t m r'),\n",
       " ('e s b q s w', 'e s b q s w'),\n",
       " ('k p t v s e', 'k p t v x r'),\n",
       " ('o n t o f i', 'n n y o f i'),\n",
       " ('p a z g i l', 'p a z g i l'),\n",
       " ('h y y f o s', 'f y y j s s'),\n",
       " ('s p q d h e', 's p q d h e'),\n",
       " ('p c r d g r', 'p c r d g r'),\n",
       " ('l n i d w d', 'l n i a w d'),\n",
       " ('h w h b u i', 'h w h b u i'),\n",
       " ('q c a a u c', 'q c a a u c'),\n",
       " ('w g q j k c', 'w g u j k c'),\n",
       " ('f n c r b v', 'f n c r b d'),\n",
       " ('a j q p g v', 'a j q p g v'),\n",
       " ('o h i r z e', 'o h i r z e'),\n",
       " ('i y j t r j', 'i y j t r j'),\n",
       " ('c q o c o z', 'c r o c o z'),\n",
       " ('p x r p j g', 'p x r p g g'),\n",
       " ('m t l n d h', 'm t l n d h'),\n",
       " ('d s i k g k', 'd s y y g q'),\n",
       " ('c t c z f g', 'c t c z f g'),\n",
       " ('l c a s p b', 'l c a s p n'),\n",
       " ('q u p s l z', 'q u p d l z'),\n",
       " ('j w c p m s', 'j w c g m s'),\n",
       " ('z z f o a n', 'h t f o a n'),\n",
       " ('o t m v r y', 'o t m v r y'),\n",
       " ('u r b j x q', 'u r b j x q'),\n",
       " ('v q t x h u', 'c q t x h'),\n",
       " ('h r v q r g', 'h r y q r g'),\n",
       " ('w q s i a v', 'i q i i a v'),\n",
       " ('m d z l q z', 'm d c l q z'),\n",
       " ('m d y w z h', 'm i y w z h'),\n",
       " ('w p g n n j', 'w p s n n j'),\n",
       " ('q g r f f u', 'q g r f r u'),\n",
       " ('y i r r s u', 'y i x x s u'),\n",
       " ('c t e p o n', 'c k e p o n'),\n",
       " ('i n p w y e', 'i n p w y e'),\n",
       " ('r f g u f y', 'r f t t f y'),\n",
       " ('b f f o d v', 'b f f o d v'),\n",
       " ('b y f x p s', 'b y f x f s'),\n",
       " ('j s c z w e', 'j s c z w e'),\n",
       " ('g n h o k m', 'e n h o k m'),\n",
       " ('c p t s u d', 'c j j u u q'),\n",
       " ('h i x g y z', 'h i x g y e'),\n",
       " ('e p o s o m', 'e p o s o m'),\n",
       " ('h n f c y k', 'h n c c y k'),\n",
       " ('z g f j e a', 'w g f j e a'),\n",
       " ('p t i t i w', 'p t i y i w'),\n",
       " ('r m o q s j', 'r m o q s j'),\n",
       " ('t b l d h z', 'j b l l l z'),\n",
       " ('a i d x o b', 'a i d x o b'),\n",
       " ('o t e j r s', 'o t e j z z'),\n",
       " ('r t f q y v', 'r t q q y v'),\n",
       " ('w p h p t s', 'w p h e t s'),\n",
       " ('y c b e w d', 'y v b e w d'),\n",
       " ('x h v j n q', 'j h v j n q'),\n",
       " ('b w x d i r', 'b w x d i r'),\n",
       " ('a z n g m u', 'a z i g m u'),\n",
       " ('c d d i a c', 'c d f i a c'),\n",
       " ('w a g e p x', 'y a e e p x'),\n",
       " ('s h d p l o', 's h d p l o'),\n",
       " ('c g i d z e', 'c g i d e e'),\n",
       " ('f e v w z c', 'f e v w z b'),\n",
       " ('f v e q f x', 'f v e q b x'),\n",
       " ('u o j z e m', 'u j j z e m'),\n",
       " ('n t w m c b', 'n t w m c b'),\n",
       " ('j u z n s w', 'j u z n s w'),\n",
       " ('g n v n m o', 'g n v n m o'),\n",
       " ('h a t y s w', 'h a t y s w'),\n",
       " ('p y n q w g', 'z y n q w t'),\n",
       " ('f y w e w n', 'f y w e w n'),\n",
       " ('z y c z x e', 'z q q z x r'),\n",
       " ('w o n l u a', 'w o n l u a'),\n",
       " ('h x k c k w', 'h x k c k w'),\n",
       " ('k l d q u x', 'k l d q u x'),\n",
       " ('i t w u g r', 'i f a a g r'),\n",
       " ('x k y k b l', 'x k h k b l'),\n",
       " ('g r t r p m', 'g r t r p m'),\n",
       " ('t v k u z j', 'p v k v z e'),\n",
       " ('y f k j b h', 'y f m j b h'),\n",
       " ('c a l i n f', 'c a l i n f'),\n",
       " ('c q p z y h', 'c q p z y u'),\n",
       " ('g l o v b j', 'g l o v b j'),\n",
       " ('l i s p k d', 'l i s d k d'),\n",
       " ('s y q u s u', 's y t u s u'),\n",
       " ('q o w y j h', 'q w w y j h'),\n",
       " ('v t b b t c', 'v c d d t c'),\n",
       " ('v o y c y c', 'v o d d y c'),\n",
       " ('c x q s y l', 'c x q s y l'),\n",
       " ('h g x h p x', 'h g e h p'),\n",
       " ('e y q z m d', 'e z z z m d'),\n",
       " ('r n q o c g', 'r n q o c g'),\n",
       " ('l y u t z n', 'l y u z z n'),\n",
       " ('w n o f o l', 'w n o b o l'),\n",
       " ('e z v m p d', 'e z v m p d'),\n",
       " ('i l f o h c', 'i l f e h k'),\n",
       " ('d k d q t o', 'd k d q t o'),\n",
       " ('l x l f q o', 'l x l f q o'),\n",
       " ('f v k w d r', 'f v k w d r'),\n",
       " ('v p a g o k', 'v p a g o y'),\n",
       " ('f k i j q v', 'k k i j q v'),\n",
       " ('n m k c v t', 'n m k c v t'),\n",
       " ('a p c p q o', 'a p c p q o'),\n",
       " ('w o s u l q', 'w o x x l q'),\n",
       " ('z m z n j d', 'z m c n j n'),\n",
       " ('w n m v x n', 'w n f i x n'),\n",
       " ('i w s i t e', 'i w s i t e'),\n",
       " ('y m a d z z', 'y v a d z z'),\n",
       " ('m y k q o r', 'm y u o o r'),\n",
       " ('j d s g n c', 'j d e g n c'),\n",
       " ('q i o m r d', 'q i o m r d'),\n",
       " ('j c p b h k', 'j c p b h k'),\n",
       " ('f e k e k u', 'r b b e b h'),\n",
       " ('f t l s w u', 'f t x x w u'),\n",
       " ('k w k a y a', 'y w k a y a'),\n",
       " ('n q a a j i', 'n q a a j i'),\n",
       " ('s i d g f t', 's i d g c t'),\n",
       " ('h k s q y o', 'h k s q y o'),\n",
       " ('o p j k k k', 'e p j k k k'),\n",
       " ('t y i j n r', 't y i j n r'),\n",
       " ('j c h x t o', 'j c h x t o'),\n",
       " ('p k g c i t', 'p k g c i t'),\n",
       " ('i e c v k j', 'i z z v k q'),\n",
       " ('f u p r j u', 'y u c r j p'),\n",
       " ('e n y w o v', 'e n y w o q'),\n",
       " ('o q u q s a', 'o q u q s a'),\n",
       " ('w f u v h o', 'w f u v h w'),\n",
       " ('p n r x f x', 'p n u x f x'),\n",
       " ('u s q x e t', 'u e q x e t'),\n",
       " ('v b a r h t', 'v b a r h t'),\n",
       " ('p v v d f f', 'p e e d f f'),\n",
       " ('h q z f f t', 'f q y y t t'),\n",
       " ('u l b s o f', 'u l b d d f'),\n",
       " ('t q l h x o', 't q l j j'),\n",
       " ('v t a f d s', 'v t a f d s'),\n",
       " ('x f o p w x', 'x f t p w x'),\n",
       " ('v q o v u o', 'v k g g u o'),\n",
       " ('y r q r l f', 'y r q w l w'),\n",
       " ('r z l p u b', 'y z l p i b'),\n",
       " ('k x e e u w', 'k x w w u w'),\n",
       " ('w z e l w p', 'y e e l w p'),\n",
       " ('q h z j n y', 'q k z j n y'),\n",
       " ('c e o q j y', 'e e o q j y'),\n",
       " ('y c k z z u', 'y q k z z u'),\n",
       " ('y l i p y u', 'y l i p y u'),\n",
       " ('h c e a a y', 'h e e a a i'),\n",
       " ('b h m p j s', 'b h m p j s'),\n",
       " ('s g l n v s', 's g c n v s'),\n",
       " ('v e q u j y', 'e e q u j y'),\n",
       " ('g g w o x j', 'g w l s j j'),\n",
       " ('z b f o x i', 'z q q o x i'),\n",
       " ('m b z u e e', 'm b z u e e'),\n",
       " ('a b n u p l', 'a b n u p l'),\n",
       " ('o z b l q q', 'o z b q q q'),\n",
       " ('j y e r y x', 'j e e h h x'),\n",
       " ('e c p m h i', 'e c p m h i'),\n",
       " ('v r y s q e', 'v r y s l e'),\n",
       " ('j r x v a i', 'j m x v a i'),\n",
       " ('d z q u u q', 'd z q u u q'),\n",
       " ('i a x k v p', 'i s x k v p'),\n",
       " ('u p v t g l', 'b p v e g l'),\n",
       " ('i s u q k i', 'j e e e k i'),\n",
       " ('g a u b a m', 'g a u b a m'),\n",
       " ('h y a m m l', 'h y p m m l'),\n",
       " ('l q x a d j', 's r x a p j'),\n",
       " ('z d q q e r', 'z q q q e r'),\n",
       " ('g g o i p x', 'g g r i a x'),\n",
       " ('l y s l a j', 'z y s l a j'),\n",
       " ('t z t f d a', 't z t f d a'),\n",
       " ('i k g e n a', 'i k g h n a'),\n",
       " ('m n u z l z', 'm n u n l z'),\n",
       " ('k f o i y u', 'k f o i y u'),\n",
       " ('g s v t l e', 'g s v t l e'),\n",
       " ('o m q s o p', 'o r q s o p'),\n",
       " ('h j p t w d', 'h t p t w d'),\n",
       " ('j c z e j f', 'j c z e r r'),\n",
       " ('h g n z a q', 'b y n z a q'),\n",
       " ('u p p r l x', 'u p p r l x'),\n",
       " ('f l i r g b', 'f l i r g b'),\n",
       " ('l e a e l f', 'l e a e l f'),\n",
       " ('v i i f x b', 'v i i f x b'),\n",
       " ('e x l u q a', 'e x l u q a'),\n",
       " ('q n c o x i', 'q u c h x i'),\n",
       " ('f b l h m n', 'f b l h m n'),\n",
       " ('q t m h n n', 'q t m h n n'),\n",
       " ('m m j m t s', 'g g j m t s'),\n",
       " ('z g a l t t', 'z g a l t t'),\n",
       " ('s x n w l s', 'k f q w l s'),\n",
       " ('d e p o k b', 'd e p o k b'),\n",
       " ('j n i w p e', 'j t i w p e'),\n",
       " ('x o d w z t', 'x o d w z t'),\n",
       " ('z u g v y g', 'w u g y y g')]"
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "translation_results"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b3768013",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.10"
  },
  "toc": {
   "base_numbering": 1,
   "nav_menu": {},
   "number_sections": true,
   "sideBar": true,
   "skip_h1_title": false,
   "title_cell": "Table of Contents",
   "title_sidebar": "Contents",
   "toc_cell": false,
   "toc_position": {},
   "toc_section_display": true,
   "toc_window_display": false
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
