{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "5efe74c5",
   "metadata": {},
   "source": [
    "### 使用seq2seq模型架构实现英译法任务\n",
    "\n",
    "![](./pics/seq2seq.png)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "317f8fca",
   "metadata": {},
   "outputs": [],
   "source": [
    "from io import open\n",
    "# 用于获得常见字符及字符规范化\n",
    "import unicodedata\n",
    "import re\n",
    "import random\n",
    "import torch\n",
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "from torch import optim\n",
    "# device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
    "device = torch.device('cpu')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "cc7c8e12",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 将词汇映射成数值\n",
    "\n",
    "SOS_index = 0  # 起始标志\n",
    "EOS_index = 1  # 结束标志\n",
    "\n",
    "class Lang():\n",
    "    def __init__(self, name):\n",
    "        self.name = name\n",
    "        self.word2index = {}\n",
    "        self.index2word = {0: 'SOS', 1: 'EOS'}\n",
    "        self.n_words = 2\n",
    "    def add_word(self, word):\n",
    "        if word not in self.word2index:\n",
    "            self.word2index[word] = self.n_words\n",
    "            self.index2word[self.n_words] = word\n",
    "            self.n_words += 1\n",
    "    def add_sentence(self, sentence):\n",
    "        for word in sentence.split(' '):\n",
    "            self.add_word(word)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "84e74123",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "are you kidding me ?\n"
     ]
    }
   ],
   "source": [
    "# 字符规范化\n",
    "\n",
    "def unicode2ascii(s):\n",
    "    return ''.join(c for c in unicodedata.normalize('NFD', s) if unicodedata.category(c) != 'Mn')\n",
    "\n",
    "def norm_string(s):\n",
    "    s = unicode2ascii(s.strip().lower())\n",
    "    # 在.!?前加一个空格\n",
    "    s = re.sub(r'([.!?])', r' \\1', s)\n",
    "    # 将不是大小写字母和正常标点的都替换成空格\n",
    "    s = re.sub(r'[^a-zA-Z.!?]+', r' ', s)\n",
    "    return s\n",
    "\n",
    "# 测试\n",
    "s = \"Are you kidding me?\"\n",
    "print(norm_string(s))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "c148196d",
   "metadata": {},
   "outputs": [],
   "source": [
    "data_path = 'data/eng-fra.txt'\n",
    "\n",
    "def read_lines(src_lang, tgt_lang):\n",
    "    lines = open(data_path, encoding='utf-8').read().strip().split('\\n')\n",
    "    pairs = [[norm_string(s) for s in l.split('\\t')] for l in lines]\n",
    "    input_lang = Lang(src_lang)\n",
    "    output_lang = Lang(tgt_lang)\n",
    "    return input_lang, output_lang, pairs\n",
    "# 测试\n",
    "src_lang = 'eng'\n",
    "tgt_lang = 'fra'\n",
    "input_lang, output_lang, pairs = read_lines(src_lang, tgt_lang)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "f6c450ab",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[['go .', 'va !'], ['run !', 'cours !'], ['run !', 'courez !'], ['wow !', 'ca alors !'], ['fire !', 'au feu !']]\n"
     ]
    }
   ],
   "source": [
    "print(pairs[:5])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "2790b187",
   "metadata": {},
   "outputs": [],
   "source": [
    "MAX_LENGTH = 10\n",
    "\n",
    "eng_prefixes = (\n",
    "    'i am ', 'i m ',\n",
    "    'he is', 'he s ',\n",
    "    'she is', 'she s ',\n",
    "    'you are', 'you re ',\n",
    "    'we are', 'we re ',\n",
    "    'they are', 'they re '\n",
    ")\n",
    "\n",
    "def filter_pair(p):\n",
    "    return len(p[0].split(' ')) < MAX_LENGTH and p[0].startswith(eng_prefixes) and len(p[1].split(' ')) < MAX_LENGTH\n",
    "\n",
    "def filter_pairs(pairs):\n",
    "    return [pair for pair in pairs if filter_pair(pair)]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "7c24e45d",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "过滤后的pairs前五个： [['i m .', 'j ai ans .'], ['i m ok .', 'je vais bien .'], ['i m ok .', 'ca va .'], ['i m fat .', 'je suis gras .'], ['i m fat .', 'je suis gros .']]\n"
     ]
    }
   ],
   "source": [
    "fpairs = filter_pairs(pairs)\n",
    "print('过滤后的pairs前五个：', fpairs[:5])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "4f41cd88",
   "metadata": {},
   "outputs": [],
   "source": [
    "def prepare_data(lang1, lang2):\n",
    "    input_lang, output_lang, pairs = read_lines(lang1, lang2)\n",
    "    pairs = filter_pairs(pairs)\n",
    "    for pair in pairs:\n",
    "        input_lang.add_sentence(pair[0])\n",
    "        output_lang.add_sentence(pair[1])\n",
    "    return input_lang, output_lang, pairs"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "ba34f6e2",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "input_n_words:  2803\n",
      "output_n_words:  4345\n",
      "['i m sorry .', 'desole .']\n"
     ]
    }
   ],
   "source": [
    "input_lang, output_lang, pairs = prepare_data('eng', 'fra')\n",
    "print('input_n_words: ', input_lang.n_words)\n",
    "print('output_n_words: ', output_lang.n_words)\n",
    "print(random.choice(pairs))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "329931ad",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(tensor([[2],\n",
      "        [3],\n",
      "        [4],\n",
      "        [1]]), tensor([[2],\n",
      "        [3],\n",
      "        [4],\n",
      "        [5],\n",
      "        [1]]))\n"
     ]
    }
   ],
   "source": [
    "def sentence2tensor(lang, sentence):\n",
    "    indexes = [lang.word2index[word] for word in sentence.split(' ')]\n",
    "    indexes.append(EOS_index)\n",
    "    return torch.tensor(indexes, dtype=torch.long, device=device).view(-1, 1)\n",
    "\n",
    "def pair2tensor(pair):\n",
    "    input_tensor = sentence2tensor(input_lang, pair[0])\n",
    "    output_tensor = sentence2tensor(output_lang, pair[1])\n",
    "    \n",
    "    return (input_tensor, output_tensor)\n",
    "\n",
    "# 测试\n",
    "pair = pairs[0]\n",
    "print(pair2tensor(pair))"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "a304a2b6",
   "metadata": {},
   "source": [
    "#### 构建编码器\n",
    "\n",
    "![](./pics/encoder.png)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "05f03921",
   "metadata": {},
   "outputs": [],
   "source": [
    "class Encoder(nn.Module):\n",
    "    def __init__(self, input_size, hidden_size):\n",
    "        '''\n",
    "        input_size代表编码器的输入尺寸即源语言的词表大小\n",
    "        hidden_size代表GRU的隐层节点数\n",
    "        '''\n",
    "        super(Encoder, self).__init__()\n",
    "        self.hidden_size = hidden_size\n",
    "        self.embedding = nn.Embedding(input_size, hidden_size)  # 词嵌入的维度是hidden_size\n",
    "        self.gru = nn.GRU(input_size = hidden_size, hidden_size = hidden_size)\n",
    "    \n",
    "    def forward(self, x, hidden):\n",
    "        output = self.embedding(x).view(1, 1, -1)  # 编码器每次只以一个词为输入，gru必须要使用三维张量作为输入\n",
    "        output, hidden = self.gru(output, hidden)\n",
    "        return output, hidden\n",
    "    \n",
    "    def init_hidden(self):\n",
    "        return torch.zeros(1, 1, self.hidden_size, device=device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "29094c4f",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[[-0.0225, -0.1874,  0.3417, -0.2659,  0.0954,  0.0306, -0.2194,\n",
      "          -0.0333,  0.5661,  0.0891, -0.2912, -0.1238,  0.1182,  0.4186,\n",
      "           0.1189, -0.1347, -0.1015, -0.0724,  0.2176,  0.0935, -0.1461,\n",
      "          -0.1305,  0.0278,  0.0721,  0.0485]]], grad_fn=<StackBackward>)\n",
      "tensor([[[-0.0225, -0.1874,  0.3417, -0.2659,  0.0954,  0.0306, -0.2194,\n",
      "          -0.0333,  0.5661,  0.0891, -0.2912, -0.1238,  0.1182,  0.4186,\n",
      "           0.1189, -0.1347, -0.1015, -0.0724,  0.2176,  0.0935, -0.1461,\n",
      "          -0.1305,  0.0278,  0.0721,  0.0485]]], grad_fn=<StackBackward>)\n"
     ]
    }
   ],
   "source": [
    "# 测试\n",
    "input_size = 20\n",
    "hidden_size = 25\n",
    "encoder = Encoder(input_size, hidden_size).to(device)\n",
    "\n",
    "pair_tensor = pair2tensor(pairs[0])\n",
    "\n",
    "x = pair_tensor[0][0].to(device)\n",
    "hidden = encoder.init_hidden()\n",
    "\n",
    "output, hidden = encoder(x, hidden)\n",
    "print(output)\n",
    "print(hidden)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "65c35b1b",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Parameter containing:\n",
      "tensor([[ 0.3274, -0.1059,  0.4267, -1.5469,  0.2529],\n",
      "        [-1.0401, -1.1065, -0.0505, -0.8051,  1.3302],\n",
      "        [-0.2504, -0.3548, -0.9850,  0.4371,  1.4371],\n",
      "        [-2.3268,  0.5805,  0.9877, -0.9873, -0.2463],\n",
      "        [-0.3329,  0.0450,  0.4488, -0.3771, -0.0337],\n",
      "        [ 2.0380, -0.5655,  0.1938, -1.0148,  0.4328],\n",
      "        [ 1.2841,  1.6338, -0.0274,  0.1830, -1.2926],\n",
      "        [-0.4389, -1.3914,  0.9053,  0.8655, -0.5254],\n",
      "        [-1.4992,  2.5682,  0.4470, -1.2894,  0.2397],\n",
      "        [-0.6729,  1.6130, -0.7612,  1.8608, -0.8260]], requires_grad=True)\n",
      "tensor([[-1.0401, -1.1065, -0.0505, -0.8051,  1.3302],\n",
      "        [-0.2504, -0.3548, -0.9850,  0.4371,  1.4371],\n",
      "        [-2.3268,  0.5805,  0.9877, -0.9873, -0.2463],\n",
      "        [-0.3329,  0.0450,  0.4488, -0.3771, -0.0337]],\n",
      "       grad_fn=<EmbeddingBackward>)\n"
     ]
    }
   ],
   "source": [
    "embed = nn.Embedding(10, 5)\n",
    "\n",
    "x = torch.tensor([1,2,3,4])\n",
    "y = embed(x)\n",
    "print(embed.weight)\n",
    "print(y)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "274791be",
   "metadata": {},
   "source": [
    "#### 构建基于GRU的解码器\n",
    "\n",
    "![](./pics/decoder.png)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "8613ada1",
   "metadata": {},
   "outputs": [],
   "source": [
    "class Decoder(nn.Module):\n",
    "    def __init__(self, hidden_size, output_size):\n",
    "        super(Decoder, self).__init__()\n",
    "        self.hidden_size = hidden_size\n",
    "        self.embedding = nn.Embedding(output_size, hidden_size)  # hidden_size为目标语言的词嵌入维度\n",
    "        self.gru = nn.GRU(hidden_size, hidden_size)\n",
    "        self.out = nn.Linear(hidden_size, output_size)\n",
    "        self.softmax = nn.LogSoftmax(dim=1)\n",
    "    def forward(self, x, hidden):\n",
    "        output = self.embedding(x).view(1, 1, -1)\n",
    "        output = F.relu(output)\n",
    "        output, hidden = self.gru(output, hidden)\n",
    "        output = self.softmax(self.out(output[0]))\n",
    "        return output, hidden\n",
    "    def init_hidden(self):\n",
    "        return torch.zeros(1, 1, self.hidden_size, device=device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "840b719a",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([[-2.2321, -2.4028, -2.2764, -2.1604, -2.3741, -2.2094, -2.4448, -2.3519,\n",
       "         -2.3694, -2.2444]], grad_fn=<LogSoftmaxBackward>)"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 测试\n",
    "hidden_size = 25\n",
    "output_size = 10\n",
    "decoder = Decoder(hidden_size, output_size).to(device)\n",
    "\n",
    "x = pair_tensor[1][0].to(device)\n",
    "hidden = decoder.init_hidden()\n",
    "\n",
    "output, hidden = decoder(x, hidden)\n",
    "output"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "a97d4da5",
   "metadata": {},
   "source": [
    "#### 构建基于GRU和Attention的解码器\n",
    "\n",
    "![](./pics/attention_decoder.png)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "d3e07884",
   "metadata": {},
   "outputs": [],
   "source": [
    "class AttnDecoder(nn.Module):\n",
    "    def __init__(self, hidden_size, output_size, dropout_p=0.1, max_length=MAX_LENGTH):\n",
    "        super(AttnDecoder, self).__init__()\n",
    "        self.hidden_size = hidden_size\n",
    "        self.output_size = output_size\n",
    "        self.dropout_p = dropout_p\n",
    "        self.max_length = max_length\n",
    "        \n",
    "        self.embedding = nn.Embedding(self.output_size, self.hidden_size)\n",
    "        self.attn = nn.Linear(self.hidden_size*2, self.max_length)\n",
    "        self.attn_combine = nn.Linear(self.hidden_size*2, self.hidden_size)\n",
    "        self.dropout = nn.Dropout(self.dropout_p)\n",
    "        self.gru = nn.GRU(self.hidden_size, self.hidden_size)\n",
    "        self.out = nn.Linear(self.hidden_size, self.output_size)\n",
    "        \n",
    "    def forward(self, x, hidden, encoder_outputs):\n",
    "        embedded = self.embedding(x).view(1, 1, -1)\n",
    "        embedded = self.dropout(embedded)\n",
    "        attn_weights = F.softmax(self.attn(torch.cat((embedded[0], hidden[0]),1)), dim=1)\n",
    "        attn_applied = torch.bmm(attn_weights.unsqueeze(0), encoder_outputs.unsqueeze(0))\n",
    "        output = torch.cat((embedded[0], attn_applied[0]), 1)\n",
    "        output = self.attn_combine(output).unsqueeze(0)\n",
    "        output = F.relu(output)\n",
    "        output, hidden = self.gru(output, hidden)\n",
    "        output = F.log_softmax(self.out(output[0]), dim=1)\n",
    "        return output, hidden, attn_weights\n",
    "    \n",
    "    def init_hidden(self):\n",
    "        return torch.zeros(1, 1, self.hidden_size, device=device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "379d8d40",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([[-2.1971, -2.2124, -2.2406, -2.5928, -2.3315, -2.2437, -2.2981, -2.4328,\n",
       "         -2.2495, -2.2895]], grad_fn=<LogSoftmaxBackward>)"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 测试\n",
    "hidden_size = 25\n",
    "output_size = 10\n",
    "A_decoder = AttnDecoder(hidden_size, output_size)\n",
    "\n",
    "x = pair_tensor[1][0]\n",
    "hidden = A_decoder.init_hidden()\n",
    "encoder_outputs = torch.randn(10, 25)  # encoder中每一个时间步的输出堆叠而成\n",
    "\n",
    "output, hidden, attn_weights = A_decoder(x, hidden, encoder_outputs)\n",
    "output"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "id": "d483dc2e",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 训练\n",
    "teacher_forcing_ratio = 0.5\n",
    "\n",
    "def train(input_tensor, target_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, max_length=MAX_LENGTH):\n",
    "    encoder_hidden = encoder.init_hidden()\n",
    "    encoder_optimizer.zero_grad()\n",
    "    decoder_optimizer.zero_grad()\n",
    "    input_length = input_tensor.size()[0]\n",
    "    target_length = target_tensor.size()[0]\n",
    "    encoder_outputs = torch.zeros(max_length, encoder.hidden_size, device=device)\n",
    "    \n",
    "    loss = 0\n",
    "    \n",
    "    for ei in range(input_length):\n",
    "        encoder_output, encoder_hidden = encoder(input_tensor[ei], encoder_hidden)\n",
    "        encoder_outputs[ei] = encoder_output[0,0]\n",
    "        \n",
    "    decoder_input = torch.tensor([[SOS_index]], device=device)\n",
    "    decoder_hidden = encoder_hidden\n",
    "    \n",
    "    use_teacher_forcing = True if random.random() < teacher_forcing_ratio else False\n",
    "    \n",
    "    if use_teacher_forcing:\n",
    "        for di in range(target_length):\n",
    "            decoder_output, decoder_hidden, decoder_attention = decoder(\n",
    "                decoder_input, decoder_hidden, encoder_outputs\n",
    "            )\n",
    "            loss += criterion(decoder_output, target_tensor[di])\n",
    "            decoder_input = target_tensor[di]\n",
    "    else:\n",
    "        for di in range(target_length):\n",
    "            decoder_output, decoder_hidden, decoder_attention = decoder(\n",
    "                decoder_input, decoder_hidden, encoder_outputs\n",
    "            )\n",
    "            topv, topi = decoder_output.topk(1)\n",
    "            loss += criterion(decoder_output, target_tensor[di])\n",
    "            if topi.squeeze().item() == EOS_index:\n",
    "                break\n",
    "            decoder_input = topi.squeeze().detach()\n",
    "    loss.backward()\n",
    "    encoder_optimizer.step()\n",
    "    decoder_optimizer.step()\n",
    "    \n",
    "    return loss.item()/target_length"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "id": "50848981",
   "metadata": {},
   "outputs": [],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "%matplotlib inline\n",
    "\n",
    "def train_iter(encoder, decoder, n_iters, print_every=1000, plot_every=100, learning_rate=0.01):\n",
    "    plot_losses = []\n",
    "    print_loss_total = 0\n",
    "    plot_loss_total = 0\n",
    "    encoder_optimizer = optim.SGD(encoder.parameters(), lr=learning_rate)\n",
    "    decoder_optimizer = optim.SGD(decoder.parameters(), lr=learning_rate)\n",
    "    \n",
    "    criterion = nn.NLLLoss()\n",
    "    \n",
    "    for i in range(1, n_iters+1):\n",
    "        training_pair = pair2tensor(random.choice(pairs))\n",
    "        input_tensor = training_pair[0]\n",
    "        target_tensor = training_pair[1]\n",
    "        \n",
    "        loss = train(input_tensor, target_tensor, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion)\n",
    "        \n",
    "        print_loss_total += loss\n",
    "        plot_loss_total += loss\n",
    "        \n",
    "        if i%print_every == 0:\n",
    "            print_loss_avg = print_loss_total/print_every\n",
    "            print(print_loss_avg)\n",
    "            print_loss_total = 0\n",
    "            \n",
    "        if i%plot_every == 0:\n",
    "            plot_loss_avg = plot_loss_total/plot_every\n",
    "            plot_losses.append(plot_loss_avg)\n",
    "            plot_loss_total = 0\n",
    "            \n",
    "    plt.figure()\n",
    "    plt.plot(plot_losses)\n",
    "    plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "id": "4e038ab6",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "4.065329728179508\n",
      "3.47404532235123\n",
      "3.284263515142028\n",
      "3.1336777385072088\n",
      "3.026122686354317\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAAAv0UlEQVR4nO3dd3zT173/8deRZMt7ygbjgfEAA2abDUmAzIaMJimZN6XNapu26W1vm+SOpk1vf7ftvWlzm3TcJE1Ls9OSNJS2JARIQtjGjAA2YAwG2+C9jZd8fn9INh6yLWNLsuTP8/HwA1k6ks4XzFvH53vO56u01gghhPB+Bk93QAghxMiQQBdCCB8hgS6EED5CAl0IIXyEBLoQQvgIk6fe2GKx6OTkZE+9vRBCeKX9+/dXaK1jHD3msUBPTk4mOzvbU28vhBBeSSlV2N9jTk25KKXOKKU+U0odVEr1SWFl80ulVL5S6rBSau5wOiyEEGLohjJCX6G1rujnsRuAdPvXQuA39j+FEEK4yUidFL0F+KO22Q1EKKXiRui1hRBCOMHZQNfAB0qp/Uqphx08Hg+c6/Z9kf0+IYQQbuLslMsyrXWxUioW2KyUytNafzLUN7N/GDwMkJSUNNSnCyGEGIBTI3StdbH9zzLgXWBBrybFQGK37xPs9/V+nRe01lla66yYGIerboQQQlymQQNdKRWslArtvA1cCxzp1WwDcL99tcsioFZrfX7EeyuEEKJfzozQxwGfKqUOAXuBv2mtNymlvqKU+oq9zd+BAiAfeBH4mkt6C+w7U8VPN+UhZX+FEKKnQefQtdYFwCwH9/+2220NPDqyXXPs0LkafvPRKR65IoWIIH93vKUQQngFr6vlEhNqBqCiocXDPRFCiNHF+wI9xBboZfUS6EII0Z3XBbqla4Te6uGeCCHE6OJ1gd45Qq+QEboQQvTgdYEeHuiHyaAolzl0IYTowesC3WBQRIf4ywhdCCF68bpAB9tKFxmhCyFET14Z6JYQsyxbFEKIXrwy0GNCzFTUyyoXIYTozisD3RJqG6F3dMj2fyGE6OSdgR5ipr1DU3uxzdNdEUKIUcMrA122/wshRF9eGeiWEFtRrnJZuiiEEF28MtBj7SN0WboohBCXeGWgW+zb/2WELoQQl3hloIcH+uFnVFKgSwghuvHKQFdKyeYiIYToxSsDHWzTLjLlIoQQl3htoMeEyghdCCG689pAt4T4ywhdCCG68eJAN1PZ2Crb/4UQws5rAz0m1Iy1Q1Mj2/+FEALw4kCXtehCCNGT1wa61HMRQoievDbQO0foEuhCCGHjtYEeI1MuQgjRg9cGeligCX+jQQp0CSGEndcGum37v6xFF0KITl4b6NC5W1QKdAkhBHh5oFtCzFTICF0IIQAfCHSZQxdCCBuvDvSYUDNVja1YZfu/EEJ4d6BbQvyxdmiqm2QeXQghvDrQY0IDANlcJIQQ4OWBbgnxB6CiXkboQgjh3YFur+dS3tDs4Z4IIYTnOR3oSimjUuqAUmqjg8fWKqXKlVIH7V8Pjmw3Hesq0CUjdCGEwDSEto8BuUBYP4+/pbX++vC75LxQswl/k0Hm0IUQAidH6EqpBOBG4CXXdmdolFLEyMWihRACcH7K5Vnge0DHAG1uV0odVkr9WSmV6KiBUuphpVS2Uiq7vLx8iF11zBIqm4uEEAKcCHSl1GqgTGu9f4BmfwWStdYzgc3AOkeNtNYvaK2ztNZZMTExl9Xh3mKkQJcQQgDOjdCXAjcrpc4AbwIrlVKvdm+gta7UWnem6kvAvBHt5QCkQJcQQtgMGuha6ye11gla62TgLmCr1vq+7m2UUnHdvr0Z28lTt7CEmKlqbJHt/0KIMW8oq1x6UEo9DWRrrTcA31RK3Qy0A1XA2pHp3uBiQs10aKhqbO1axiiEEGPRkAJda/0R8JH99ve73f8k8ORIdsxZ3a8tKoEuhBjLvHqnKFwKdDkxKoQY67w+0Lt2i8rSRSHEGOf1gd5VoEsCXQgxxnl9oIeYTQT4GWTKRQgx5nl9oCulbNcWlbXoQogxzusDHewXi5YpFyHEGOcTgR4TKgW6hBDCJwJdRuhCCOEjgR4TaqaysZV260DFIIUQwrf5RqCH+KM1VDXJiVEhxNjlE4Hetf1fLkUnhBjDfCLQY7ouFi3z6EKIscsnAv3SCF0CXQgxdvlEoMsIXQghfCTQg80mAv2MMkIXQoxpPhHoAJZQf1mLLoQY03wm0GNCzDLlIoQY03wm0C0hZlm2KIQY03wm0GNCHW//r29u4+1956TWixDC5132RaJHG0uImaom2/Z/k9FAZUMLf9h5hnU7z1DX3M7dCxL5r9tmerqbQgjhMr4T6KFmtIYjJXW8d7CYN/aepbmtg+unj6epzcrGQ+d56qbpBPgZPd1VIYRwCZ8J9Bj75qJbf7UDo0Fx6+x4vnpVCmmxoew8VcE9L+7h/aMXuGV2vId7KoQQruEzgZ4ZH0ZiVCArpsTy0PIUEqOCuh5bNCma+IhA1ucUS6ALIXyWzwR6QmQQ27+30uFjBoPitrnx/GpbPhdqmxkfHuDm3gkhhOv5zCqXwdw+N4EODe8eKPZ0V4QQwiXGTKAnW4LJmhjJ+pwitNae7o4QQoy4MRPoALfPSyC/rIHDRbWe7ooQQoy4MRXoN86Mw2wy8Of9RZ7uihBCjLgxFehhAX5cN308Gw6V0NJu9XR3hBBiRI2pQAfbtEvtxTa25pZ5uitCCDGixlygL0uzMC7MzPocmXYRQviWMRfoRoPi83MS2Ha8fMgFu6wdmqpGqegohBidxlygA9wxLx5rh+a9g86tSe/o0Pzjs/Pc8L+fsOi/tlBY2ejiHgohxNCNyUBPiw1lVmIE63MGDnStNR8eK2X1c5/y1ddyaO/QWDs0r+8966aeCiGE88ZkoAPcMTee3PN1HC3puyZda83HJ8q59dc7efCP2TS2tvPzNbPY/M9XcvXUWP6UXSSrZIQQo47TtVyUUkYgGyjWWq/u9ZgZ+CMwD6gE7tRanxnBfo64m2ZN4Ecbc/nS7/cRbDbR0malpb3D/mWlzaqJjwjkp7fP4La5CfgZbZ999y6cyPtHS3n/aCk3z5rg4aMQQohLhlKc6zEgFwhz8NgDQLXWOk0pdRfwU+DOEeify0QE+fP4DRnsPV2J2WTEbDJg9jN03U62BHPr7Hj8TT1/iVmWZiEpKojXdhdKoAshRhWnAl0plQDcCPwY+LaDJrcAP7Df/jPwvFJK6VFeNOWBZZN4YNmkIT3HYFDcszCJn/wjj/yyetJiQ13UOyGEGBpn59CfBb4HdPTzeDxwDkBr3Q7UAtG9GymlHlZKZSulssvLy4fe21HijnkJ+BkVr+0Z+ORoc5uVB9dl8/7RC27qmRBiLBs00JVSq4EyrfX+4b6Z1voFrXWW1jorJiZmuC/nMZYQM9dnxrF+fxHNbf2fHP3ZpuN8mFvKG7IqRgjhBs6M0JcCNyulzgBvAiuVUq/2alMMJAIopUxAOLaToz7r3oVJ1DW3s/HweYeP78yv4OUdpwkxm9hTUCWrYoQQLjdooGutn9RaJ2itk4G7gK1a6/t6NdsAfNF++w57m1E9fz5cCydFkRoTzGt7Cvs8Vtfcxr/86RAplmB+/PlMLrZZOXC2xv2dFEKMKZe9Dl0p9bRS6mb7t78DopVS+dhOmj4xEp0bzZRS3LNwIgfO1nCspK7HYz/YcJTS+haeWTOLFRmxGA2KT09WeKinQoixYkiBrrX+qHMNutb6+1rrDfbbzVrrL2it07TWC7TWBa7o7Ghz+9x4zCYDr++9NErfdOQ87+QU8+hVqcxJiiQswI9ZCeF8mi+BLoRwrTG7U3QkRAT5s3rmBN7NKaahpZ2y+mb+9d0jZMaH8Y1V6V3tlqVZOFxUQ21Tmwd7K4TwdRLow3TPwiQaW61sOFjCk+s/o6GlnV+smd21sxRgWXoMHRp2Ffj0eWIhhIdJoA/T3KQIMsaH8l9/z2VLXhmPX59B+riem41mJ0YQ5G/k03zvXXsvhBj9JNCHSSnFvYsmUt/SzuKUaL60JLlPG3+TgUUp0ezIlxG6EMJ1hlLLRfTjjrkJVDa0cNf8JAwG5bDN0jQLW/PKKKpuIiEyyM09FEKMBTJCHwGB/ka+dfVkxocH9NtmeboFgB2y2kUI4SIS6G6SHhtCbKiZ7bIeXQjhIhLobqKUYlmahZ2nKuno8OlNtEIID5FAd6Nl6RaqGls5dr5u8MZCCDFEEuhutDRN5tGFEK4jge5G48ICmDwuRMoACCFcQgLdzZamWdh7umrAOupCCHE5JNDdbHm6hZb2DvYXVnu6K0IIHyOB7mYLJkVjMiiZdhFCjDgJdDcLMZuYmxQp9dGFECNOAt0DlqZZOFJSS3Vjq6e7IoTwIRLoHrAs3YLWsPOUFOsSQowcCXQPmJUQTqjZJOV0hRAjSgLdA0xGA1dMiWHTkQu0tMvyRSHEyJBA95C75idS3dTG+0dLPd0VIYSPkED3kKWpFhKjAnljz1lPd0UI4SMk0D3EYFDcNT+JXQWVnK5o9HR3hBA+QALdg74wLwGjQfHmvuGN0k+W1ktJXiGEBLonxYYFsCojlvX7i2ht77is1zhZWs+1z37C7z49PcK9E0J4Gwl0D7t7QRIVDa18mHt5J0c/OFaK1vDSpwWyYkaIMU4C3cOumBzDhPAA3th7edMuH+aWEhZgorSuhfcOlIxw74QQ3kQC3cOMBsWa+YlsP1nBuaqmIT23vL6Fg+dqeHB5ClPjwvi/T07JXLoQY5gE+iiwJisRg4K39p0b0vO25ZWhNayaGstXrkzhVHkjW/LKBn3ezvwKtlzmFI8QYvSSQB8FJkQEctWUWN7OPke71fmTox/mljIhPIBpcWHcOCOO+IhAfvvxqQGfk1/WwJfX7eOrr+ZwRpZLCuFTJNBHibsXJFFW38JWJ0bYAM1tVrafrGDV1HEopTAZDTy0fBL7C6vJPlPl8Dmt7R089uYBAv2M+BkVP9p4bCQPQQjhYRLoo8SKKTGMCzM7fXJ0V0ElF9usrJoa23XfmvmJRAb58duPCxw+55nNxzlaUsdPb5/JY1ensyWvTKZehPAhEuijhMloYE1WIh+fKKe45uKg7bfklhLkb2RRSnTXfUH+Ju5fnMyHuaWcLK3v0X5nfgUvfFLAPQuTuHb6eNYumURqTDBPbzwm1zcVwkdIoI8ia7IS0cDbg5wc1VqzJbeM5ekWAvyMPR774pJkAvwMvPDJpVF6TVMr3377EJMswfz7jVMB8DcZ+MHN0ymsbOKl7Y5H9EII7yKBPookRgWxPD2GN/ed5WJr/6PmoyV1nK9t5uqp4/o8FhXsz51ZifzlYDEXapvRWvPkO59R2djCL++aQ5C/qavt8vQYbsgcz/Pb8p36rcATOjo07x+9gFWWYwoxKAn0UebRq1IprWvhV9vy+22zJbcMpWBFRqzDxx9cnkKHhpd3nOZP+4v4x5ELfOfaKWTGh/dp+2/2EfuP/zY6T5B+fKKcR17ZzzYnTxYLMZZJoI8yC1Oi+fyceF74pICC8gaHbbbklTInMQJLiNnh44lRQdw4I47Xdhfygw1HWZwSzcPLUxy2TYgM4tGr0vj7ZxdG5YWrd56y9el4r3MCQoi+Bg10pVSAUmqvUuqQUuqoUuqHDtqsVUqVK6UO2r8edE13x4YnP5eB2WTgqQ1H0brnVENpXTOHi2pZ5WC6pbuHr0ihsdWKn9HAM2tmYTCofts+dEUKSVFBPLXhyGUXCXOV3QW2JZj5ZY4/3IQQlzgzQm8BVmqtZwGzgeuVUosctHtLaz3b/vXSSHZyrIkNDeDb105m+8kKNh250OOxLbm2qYdrpg0c6Jnx4fzgpmm8eH8WEyICB2wb4GfkqZumcaq8kXU7zwyr7yOp9mIbR0tqAQl0IZwxaKBrm87/TX72LzlD5WL/tGgiU+PCeHrjMRpb2rvu35JbSmJUIOmxIYO+xtqlk1gwKcqp91s1dRwrM2L5+eYT7CmovOx+j6R9p6vo0DA1LoxT5Q1Sp0aIQTg1h66UMiqlDgJlwGat9R4HzW5XSh1WSv1ZKZXYz+s8rJTKVkpll5fLFe8HYjIa+NEt0zlf28xzW20nSC+2Wvk0v4JVGbbdoSPtJ7fNYEJEAF/8/V62n/T8v8+ugkr8TQbumJdAU6uV83XNnu6SEKOaU4GutbZqrWcDCcACpVRmryZ/BZK11jOBzcC6fl7nBa11ltY6KyYmZhjdHhuykqO4Y14CL20vIL+snk/zK2hp73C4XHEkxIYF8NYji0mODuaBP2Tz4THP7iLdXVDJ3KQIpk8IA2TaRYjBDGmVi9a6BtgGXN/r/kqtdYv925eAeSPSO8ETN2QQ5G/k++8dZUtuKaFmk9PTKJfDEmLmzYcXMTUulK+8up+/HT7vsvcaSG1TG8fO17E4xdI1vdR796sQoidnVrnEKKUi7LcDgWuAvF5t4rp9ezOQO4J9HNMsIWa+e90Udp6qZH1OEVdMicHf5NrVphFB/rz64ELmJEXwjTdyWL+/aEjPr2tuY09BJVWNrZfdhz2nK9EaFqVEER1iJjLIj1P9LOMUQtiYBm9CHLBOKWXE9gHwttZ6o1LqaSBba70B+KZS6magHagC1rqqw2PRPQsn8lb2OY4U13H1VMebiUZaaIAf6768gIf+mM13/nSIhpZ2Vk2NxWQwYDIq/Ox/ApworedwUS2HztVwqKiGU+W2srxx4QG88dAiki3BQ37/3QVVmE0GZidFAJAWGyJTLkIMQvVe5+wuWVlZOjs72yPv7Y2OltTyzAcnePau2YQF+LntfZvbrHzttRynyvpaQszMTgxnVkIESdFB/PCvx/AzKt54aBEpMYOvyunuc/+7nYggP15/yLZC9sl3DrPpyAUOfP/ayzoOIXyFUmq/1jrL0WPOjNDFKDB9Qjgvr53v9vcN8DPy2/vmsTWvlLrmdtqtmvaODtqsmnZrB1atmRQdzKzECOLCA3qsvskYH8Y9L+7mzhd288ZDi0hzYqkl2IqJ5V6o45+vntx1X1psKNVN56hsaCG6nx2yQox1EuhiUP4mA9dnxg3esJcp40N58+FF3P3iHu56YTdvPLSQ9HGhgz5vz+kq+/z5pdLAnR8GJ8saxnygHymuJTbMTGxogKe7IkYZqeUiXCp9nC3UDQruemE3xy8MvlJld0ElAX4GZiVeKibWGehjfR79XFUTt/1mJ//vb7LuQPQlgS5cLi02hDcfXoTJqLj7xd0cK6kbsP2uU5XMmxiJ2XSp1vuE8ACC/I1jPtB/uimP1vYOdhdU9anzI4QEunCLlJgQ3np4MWaTgbW/30ttU5vDdtWNreRdqGfRpOge9yulSI0JGdNLF/cXVrHx8HkmRgdxoa6ZourRWcNeeI4EunCbZEswL96fRWVjKz/+u+P663tO26orLk6N7vNYupuXLlY2tPDgumwOnK1223v2p6ND8/TGXGJDzfx8zWwA9p52fDFwMXZJoAu3yowP5+ErUng7u4hPTvStF9M5fz4zIaLPY6mxIZyvbaa+2fHofqS9sruQD3NL+cqr+6loaBn8CZfJmZLFGw6VcOhcDd+9bgpzEiMICzCx74wEuuhJAl243WOr0kmJCebJdz6joVslSbAFetbEKIe7YTtPjHZuXHKllnYrr+4uJDM+jJqmNh5784BLLoP38qenyXzqfd7ce7bfNhdbrfx0Ux6Z8WHcPjcBg0ExPzmKvRLoohcJdOF2AX5G/vuOmZTUXuRnmy5Vkaiyz587mm4B9650+euh81Q0tPLkDVP50S2Z7Miv5NkPT4zoe2w6coEf/e0Ygf5GnnjnM379Ub7DE50vbi/gfG0z/3HjtK4LlcyfFEVBeSPl9a77zUF4Hwl04RHzJkaxdkkyf9xV2DUX3FmHfVGK4+JjE6OC8DMqlwe61prffXqaKeNCWZIazZr5iazJSuC5rfkjdm3Tg+dq+NZbB5iVEMH2x1dw86wJ/GzTcX78t9wedd9L65r5zUenuCFzPAu7rcufn2z7O8qWUbroRgJdeMx3r5tCYlQgj68/THObld0FlQT6GZkRH+GwvcloYJIl2OWBvrugitzzdXx5WXLXztenb8lkalwY33rrIEXVTcN6/XNVTTy4bh8xoWZe+mIWYQF+PHvnbNYuSealT0/zL386RJvVNq/+3+8fx9qhefKGqT1eY0Z8OAF+Bpl2ET1IoAuPCfI38ZPbZnK6opFfbD7B7oIqspIjB6wmmRbr/NLFi61WTpbWsyW3lD/sOM3Tfz3Gup1nBl2//fKO00QF+3PL7Piu+wL8jPzm3rl0dGi+9loOLe1W5w6yl9qmNtb+fi9tVs3v1y7outC3waB46qZpfOeaybxzoJhHXtnPvjNVrM8p4ktLk0mKDurxOv4mA3MSI+XEqOhBtv4Lj1qaZuHuBYm8uL2ADg03z54wYPu0mBA2HblAc5uVAD+jwza/2HyC1/ee7TO/7G800GrtwGBQ/NOiiQ6fW1jZyIe5pXx9RVqf10+2BPM/a2bxyCv7+dHGY/znrTOGcKS2E62PvJrN2aomXnlgYZ/aNkopvrEqnagQf/79L0f4+EQ5UUH+PLoyzeHrzZ8UxfNbT1Lf3EaoGwu2idFLAl143JOfm8q2vHIu1DX3qN/iSGpsCB0azlQ2kjE+rM/jxTUXeW7rSeYnR/HFxRNJjAoiMSqIpKggIoP8eWDdPp7+61EyJ4QxJymyz/P/sPMMpgEC/7rp43nkihT+75MCFqdYuHGmczVutNY8sf4zdhdU8eydswc8znsXTiQqyJ/v/OkQT9yQ0W91zQXJUXRo2F9YzVVT3FNWeTia26wUVV+kuOYiRdVN1F5sY+2SZIL8JYZGivxNCo8LC/DjF3fO5tXdhcxMCB+wbXqsrbhXflmDw0B/fU8hAM+smUVCZFCfx5+9czarn/uUr72Ww8ZvLOtR6KuuuY23953jppkTiA3rv/DVd6+bwvaTFfzs/Tyumz4Ok3HwmcuXd5zh3QPFfOeaydw6J37Q9jfMiOOaaQO/9pykCIwGxb4zVaM20I8U1/If7x3hXFUTFQ19L3gSHujHvQsdf3j29u23DxIR6M/3b5o20t3skn2miqSooAH//UczmUMXo8Li1Gh+de9c/AYJx5SYYJRyvHSxpd3KW/vOsTJjnMMwB9vVmH573zwqG1v5Zq+15W/vO0djq5UvLZ00YB9MRgOPXZ1OYWUTfz1cMuix1V5s45dbTnLF5Bi+3s/0SX/vM5Bgs4nMCWHsO+35naz9efdAMUeL67h66ji+c81kfnHnLP70lcXsfGIlCZGBbMtz7mLktRfb2HCwhHcOFPVYBTSSGlrauefFPfzs/eMueX13kEAXXiXAz0hiZJDDQN905AIVDa3cv3jgEV9mfDj/aV9b/swHtv+81g7NH3aeYUFyFDMG+S0B4Jqp48gYH8rzW/MH3XD00vYCai+28b3rpvSoFz8SFkyK4mBRDc1tl3eS1tVyzlYzMyGcn9w+k2+sSufzcxKYnxzFhIhAVkyJZUd+hVN9/+REOe0dmpqmNnIvDFzc7XJ9erKcVmsHO/IrvLbwmQS68Dr9XY7u1d2FJEcHsSzNMuhrrJmfyN0LEvn1R6f44OgFNh8rpaj6Il9eluxUHwwGxddXpnGqvJG/f9b/hbQrGlr43aenuXFmHJnxg39QDNX85Cha2zs4XFQ74q89XC3tVo4W1zF3Yt9zFQArM2K52GZ1qibNltxSgv1tJ6l3naoc0X5eeg/bHoPztc2cqRze0lRPkUAXXic9NoSCisYeI+Pc83XsO1PNfYsmdu2mHMxTN01nRnw433n7EM9+eIKEyECumTbe6X7ckBlHWmwIz2/N73ca4NfbTtHcZuXb10x2+PhwdW4wGo3LF48U19Fq7WCu/bqwvS1OjcZsMgx6ecN2awcfnSjnuszxpFiC2emCQO/o0Gw7XtZ1DmdHfsWIv4c7SKALr5MaG0Jrewfnqi6Nol7ZXYjZZOCOeQlOv06An5Hf3DcXo1GRd6GetUuSMTr5YQBgNCi+viKN46X1fHCstM/jJTUXeXV3IXfMSyB1iNdUdVZksD/psSGjsvJiZ5XKuQ5WE4Ht739JajTbjpcNOMWRc7aGmqY2rp46jsWp0ewpqOzaeDVSDhfXUtHQypeWJhMXHuCy3wJcTQJdeJ3eNV3qmtv4y4Fibp41gYgg/yG9VkJkEL++dy7XThvHmvmJQ+7L6plxJEcH8dzWk31C6bmtJwH45qr0Ib/uUMyfFEVOYbVLiocNR87ZauIjAgdcMbIyI5bCyiZOV/RfcG1LXil+RsXydAtLUi00tlr5rHhkp5i25pZiUHDV5FiWpFrYearCZSdfXUkCXXidrkC37xh9N6eYplYr9y9OvqzXW5Jq4YX7s/pd7z0Qk9HA11akcbSkrsfUwemKRt7OLuKehUn9rrgZKQsnRVHf0k7uedecLLxcOYU1/c6fd+pcbjnQtMuW3DIWToomNMCvq87PSI+gt+SVMW9iJJHB/ixJjaa6qY08Jy6XONpIoAuvExbgR2yomZOlDWiteWV3IbMSI5xaneIKn58TT0JkIL/ceqla4i82n8DfaODRFc4vU7xcnfPoo2napaTmIhfqmvudP++UGBVEemwIHx13vHyxsLKR/LIGVmbYgj86xEzG+FB2nhq5Oe4Ltc0cLaljZcY4AJak2TZ9jeR7uIsEuvBK6eNCyC9vYFdBJfllDf3u7HQHP6OBr12VxqFzNWw/WcGxkjo2HCrhy8uSiQk1D/4CwzQhIpD4iMBRdWI0Z5D58+5WZsSy53Rln9r4cGnlyaqplzZOLUm1kH2mesSWanb+dtD5HnHhgS47+epqEujCK6XFhHCqrIFXdhUSEeTHaie34LvK7fPiiQsP4JdbTvLMB8cJCzDx8PJUt73/gklR7Dszei4cnVNYg9lkYGpc3928vV01JZY2q3a4smRrXhlpsSFMjA7uum9JajQt7R0cOFszIn3dmldKQmQg6d1q6yxJc83JV1eTQBdeKS02hIaWdjYdvcCdWYn9FupyF7PJyFeuTCW7sJoteWU8cmUq4UHuK5i1YFIUFQ2tA55cdKfODUUDVc7slJUcSajZ1KfWfH1zG3tOV/YYnQMsSInCoGDXCEyJNLdZ2ZFfycqM2B6bvjpPvo7G9f0DkUAXXim122jqnoVJHuzJJXfOTyQm1IwlxJ8vLU1263uPpvXozW1WjpbUOjXdArYpq+WTLX2WL24/WUGbVbPKPrfdKSzAjxkJESMyJbKroJKLbdauOfpOi+3F03Z62Xp0CXThlTqLdF05OabHr+OeFOBn5Pdr5/OHLy1wewXB1JhgooP92TMKToweLamlzaodVrPsz4opsZTWtXCs20qdD3NLCQ/0c3hidUlqNAfP1dDoYN59KLbmlhHoZ+xT/TIy2J9pcWFeN48ugS68kiXEn3+5djJP3JDh6a70kBkf7pIt/oNRSrEoJZqPj5fT2u7Zed+cwhoA5k6McPo5ncsXO6ddrB2aj46Xs2JKjMMiZUtSo2nv0MP6jURrzda8MpalWxxO2S1Ni2b/2ZE7+eoOEujCKyml+PrKdIcldMeqL2QlUNnYyqajFzzaj5yz1SREBhIb6nwJ2phQMzMTwtlmX7548Fw1VY2trJw6zmH7rIlR+BnVsNajHy+tp7jmIqsyHJceXpJqobW9g/2Fo7eaZW8S6EL4iCvSY0iKCuLV3YVOtT94roas/9zM4aKaEeuD1pqcs9VOz593t2JKLAfOVlPd2MqW3DKMBsWVk2Mctg30NzInKXJYUyKdSyJX9BPo8ydFYTKoy1qP3txm5Z0c15X67Y8EuhA+wmBQ3Lswib2nqzjuxC7HZz44TkVDKz/867ERW+5YUttMaV3LoBuKHFmZEUuHhk9OlrMlt4z5yZGEB/a/UmhJajRHSmqpbWq7rL5uzStjRnw44/opTRBiNjErMYId+UP/0Hh9z1m+/fYht5/TkEAXwod8ISsRf5OB1/YMPEo/aN8ENScpgv2F1Ww83H8J4O4OnavpURSttxz79MRgW/4dmREfjiXEn1d2FXK8tJ6r+5lu6bQ0zYLWsPv00AO3qrGVnLPVfVa39LYkNZrDRTXUNQ/tQ+MvB4sB2xWb3EkCXQgfEhXsz+oZcbyTUzzgCpDntpwkMsiPP355AdPiwvjJP/IGPfl3tKSWL/x2F3e/uJumVsevnXO2mgA/5zYU9WYwKK6cHEu2/UNhsLCdlRBBoJ/xsubRPzpehtaDv8eSVAsdGvYWOD/Szi9r6Fq/fqREAl0IMQz3LppIQ0t71yixtyPFtWzJK+PB5SmEBvjx76unUlxzkd99errf12xoaefrrx8gJMBEUfVF/uf9Ew7b5ZytYWZ8xKCXEuzPigzbnHmKJZiUQUoO+5sMzJ8UdVlz3FvzyrCEmJkxyIqkOUkRmE2GIc3Vv3ewGIOCWYkRo2+ErpQKUErtVUodUkodVUr90EEbs1LqLaVUvlJqj1Iq2SW9FUIMam5SBNPiwnhlV6HDufHntp4kLMDUdam+JakWrpk2jl9vy6esvrlPe601//buZxRWNvKbe+dy/+KJ/H7n6T6rP5rbrBwrqWXOEJYr9rY8PQazycA10weebum0JDWaE6UNlNe3OP0e+WX1fHyinJUZMYNeDCXAz8j8ZOc/NLTW/OVgMUvTLKycEktBReOw18oPhTMfoy3ASq31LGA2cL1SalGvNg8A1VrrNOAXwE9HtJdCCKcppbhv0UTyLtR3FcnqlHu+jvePlvKlpZMI7VYu+F8/N5VWawfPOBh5/ym7iPcOlvCtqyezMCWa712fQVxYAI+vP0xL+6VpmiPFtg1Fl7PCpVN4oB9/f2w531rl3BWelqTaNgTtKuh/BK215nBRDT/blMeqZz7i6p9/QlOrldvmOncxlMWp0eRdqKeiYfAPjZyz1Zyrusits+PJjA9Da3pslnK1QQNd23RewNHP/tX7Y/8WYJ399p+BVWqkr4YrhHDaLbMnEGo28cqunidHn9+WT4jZxJeXTupx/yRLMPcvTubt/ec42m3e92RpPd/fcIQlqdFdpYBDzCZ+fNsM8ssa+NXW/K62Q6mwOJDUmBAC/Z2rzTN9QjihASZ2naqgqbWdwspG9hdWsenIeV7ZdYan3jvC0p9s5ebnd/B/nxQwPjyAp2+Zzo7HV/bZHdqfpfZr1DozV//ugWIC/Axclzm+azrHndMuTu1PVkoZgf1AGvArrfWeXk3igXMAWut2pVQtEA1U9Hqdh4GHAZKSRkf9DSF8UbDZxG1z43lj7zn+Y3UL0SFm8svq+ftn5/naVY4Lh31zZTrv5BTxnxtzef2hhTS3dfDo6zmEmE08e+fsHpfnWzElltvmxPPrj05xw4w4psaFkVNYQ2JUoFtKBncyGmw7ZN/Ye4439p7r87jZZGB5egzfvnYKqzJiiQwe2hWtADInhBFqNrHzVAU3zZrQb7vW9g42Hj7PNdPGE2I2EWI2ERNq5kix+0boTgW61toKzFZKRQDvKqUytdZHhvpmWusXgBcAsrKyRkedTyF81H2LJrJuVyFvZxfx1atSeX5rPoF+Rh5YluKwfXiQH9+6ejJPbTjK5mOlbM0r42RZA3/88gKHl5H7j9XT+ORkOY+vP8w7X11CztlqFqc6N+odSY+tSmdiVBDRIeau4mgxobbbUUH+DksHDIXJaOCqjFjeO1jCV69MIyna8RWoPj5RTk1TG5+fcyn0MyeEjb4ReietdY1SahtwPdA90IuBRKBIKWUCwgHvqmojhI9JHxfKwklRvLankGumjWPDoRIeWp5C1ACj1HsWJvHK7kK+++fD1F5s49EVqSxPd7xbMzLYnx/enMmjr+fwo43HKKtvGfZ0y+VwR/2cJ2/I4KO8Mr63/hCvP7jI4cnUvxwoJirYv8ff14z4cD4+Uc7FVqvT00jD4cwqlxj7yBylVCBwDZDXq9kG4Iv223cAW/VoqbQvxBj2T4snUlR9kUdeycbfZODB5Y5H5538jAb+7cap1F5sI2tiJP989cAnJz83YzzXThvHOvtcvScC3R0mRATy76unsrugyuGmrbrmNj7MLeWmmXE9lmxOjw+nQ0PeBfdMuzjzu0gcsE0pdRjYB2zWWm9USj2tlLrZ3uZ3QLRSKh/4NvCEa7orhBiKa6eNxxJi5lR5I3cvSHJqfnvFlFheXpvFi/dnDTpdoZTiR7dmEhpgIsDPQEZc6Eh1fdRZk5XI8nQL//WPvD67ZTcduUBLewe3zInvcX/nbw5HSkZJoGutD2ut52itZ2qtM7XWT9vv/77WeoP9drPW+gta6zSt9QKtdYGrOy6EGJy/ycD9iycS5G/kkSucvyTeyoxxTp9AHBcWwPP3zOX7q6df9oYib6CU4ie3z8SgFN/78+Eehbf+cqCYidFBzEmM6PGcCeEBRAb5ccRNVz7y3b99IQQAj65IY8fjKxkf7nw526G6cnLMqLlylCvFRwTybzdOZVdBJa/tPQvA+dqL7Cqo5NbZ8fRera2UIjM+3G0lACTQhfBxRoO6rOV6wrG75tunXv6ey7mqJjYcLEFruLXXdEunzPhwTpTW99iE5SoS6EIIMQTdp14eX3+Ydw8UMzsxgkkWx5dCzJwQTptVc7K0weHjI0kCXQghhig+IpB//dxUdp6qJO9CPbfO7n/DUWa8rfLkZ25Yjy6BLoQQl+HuBYksS7NgMihWD7CDNCkqiNAAk1s2GLn30uRCCOEjlFL8+r65nKloxBLS/3JQpRSZE8LdsnRRRuhCCHGZwgL8mJkQMWi7zPgwcs/X0WbtcGl/JNCFEMLFMuPDaW3vIL/MtSdGJdCFEMLFMt1USlcCXQghXGxSdDDB/kaOungeXQJdCCFczGBQTJsQ5vKlixLoQgjhBtMnhHOspA5rh+sK0UqgCyGEG8yID+dim5XTFa47MSqBLoQQbtB5YtSV0y4S6EII4QapMcGYTQaXXmNUAl0IIdzAZDQwNc611xiVQBdCCDeZEW87MdrhohOjEuhCCOEmmfFh1Le0U9jrEnYjRQJdCCHcZPoE1+4YlUAXQgg3mTwulJUZsYQF+rnk9aV8rhBCuIm/ycDLa+e77PVlhC6EED5CAl0IIXyEBLoQQvgICXQhhPAREuhCCOEjJNCFEMJHSKALIYSPkEAXQggfobR23dUzBnxjpcqBwst8ugWoGMHueIuxetwwdo9djntscea4J2qtYxw94LFAHw6lVLbWOsvT/XC3sXrcMHaPXY57bBnuccuUixBC+AgJdCGE8BHeGugveLoDHjJWjxvG7rHLcY8twzpur5xDF0II0Ze3jtCFEEL0IoEuhBA+wusCXSl1vVLquFIqXyn1hKf74ypKqZeVUmVKqSPd7otSSm1WSp20/xnpyT66glIqUSm1TSl1TCl1VCn1mP1+nz52pVSAUmqvUuqQ/bh/aL9/klJqj/3n/S2llL+n++oKSimjUuqAUmqj/XufP26l1Bml1GdKqYNKqWz7fcP6OfeqQFdKGYFfATcA04C7lVLTPNsrl/kDcH2v+54Atmit04Et9u99TTvwHa31NGAR8Kj939jXj70FWKm1ngXMBq5XSi0Cfgr8QmudBlQDD3iuiy71GJDb7fuxctwrtNazu609H9bPuVcFOrAAyNdaF2itW4E3gVs83CeX0Fp/AlT1uvsWYJ399jrgVnf2yR201ue11jn22/XY/pPH4+PHrm0a7N/62b80sBL4s/1+nztuAKVUAnAj8JL9e8UYOO5+DOvn3NsCPR441+37Ivt9Y8U4rfV5++0LwDhPdsbVlFLJwBxgD2Pg2O3TDgeBMmAzcAqo0Vq325v46s/7s8D3gA7799GMjePWwAdKqf1KqYft9w3r51wuEu2ltNZaKeWza06VUiHAeuBbWus626DNxlePXWttBWYrpSKAd4EMz/bI9ZRSq4EyrfV+pdRVHu6Ouy3TWhcrpWKBzUqpvO4PXs7PubeN0IuBxG7fJ9jvGytKlVJxAPY/yzzcH5dQSvlhC/PXtNbv2O8eE8cOoLWuAbYBi4EIpVTnwMsXf96XAjcrpc5gm0JdCfwvvn/caK2L7X+WYfsAX8Awf869LdD3Aen2M+D+wF3ABg/3yZ02AF+03/4i8J4H++IS9vnT3wG5Wuufd3vIp49dKRVjH5mjlAoErsF2/mAbcIe9mc8dt9b6Sa11gtY6Gdv/561a63vx8eNWSgUrpUI7bwPXAkcY5s+51+0UVUp9DtucmxF4WWv9Y8/2yDWUUm8AV2Erp1kKPAX8BXgbSMJWeniN1rr3iVOvppRaBmwHPuPSnOq/YptH99ljV0rNxHYSzIhtoPW21vpppVQKtpFrFHAAuE9r3eK5nrqOfcrlX7TWq339uO3H9679WxPwutb6x0qpaIbxc+51gS6EEMIxb5tyEUII0Q8JdCGE8BES6EII4SMk0IUQwkdIoAshhI+QQBdCCB8hgS6EED7i/wOVtcJ9uqzl0gAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "hidden_size = 256\n",
    "\n",
    "encoder1 = Encoder(input_lang.n_words, hidden_size).to(device)\n",
    "attn_decoder1 = AttnDecoder(hidden_size, output_lang.n_words, dropout_p=0.1).to(device)\n",
    "\n",
    "n_iters = 5000\n",
    "train_iter(encoder1, attn_decoder1, n_iters)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "id": "5bd5290d",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 评估\n",
    "def evaluate(encoder, decoder, sentence, max_length=MAX_LENGTH):\n",
    "    with torch.no_grad():\n",
    "        input_tensor = sentence2tensor(input_lang, sentence)\n",
    "        input_length = input_tensor.size()[0]\n",
    "        \n",
    "        encoder_hidden = encoder.init_hidden()\n",
    "        \n",
    "        encoder_outputs = torch.zeros(max_length, encoder.hidden_size, device=device)\n",
    "        \n",
    "        for ei in range(input_length):\n",
    "            encoder_output, encoder_hidden = encoder(input_tensor[ei], encoder_hidden)\n",
    "            encoder_outputs[ei] += encoder_output[0, 0]\n",
    "            \n",
    "        decoder_input = torch.tensor([[SOS_index]], device=device)\n",
    "        decoder_hidden = encoder_hidden\n",
    "        \n",
    "        decoded_words = []\n",
    "        decoder_attentions = torch.zeros(max_length, max_length)\n",
    "        \n",
    "        for di in range(max_length):\n",
    "            decoder_output, decoder_hidden, decoder_attention = decoder(\n",
    "                decoder_input, decoder_hidden, encoder_outputs\n",
    "            )\n",
    "            decoder_attentions[di] = decoder_attention.data\n",
    "            topv, topi = decoder_output.data.topk(1)\n",
    "            \n",
    "            if topi.item() == EOS_index:\n",
    "                decoded_words.append('<EOS>')\n",
    "                break\n",
    "            else:\n",
    "                decoded_words.append(output_lang.index2word[topi.item()])\n",
    "            decoder_input = topi.squeeze().detach()\n",
    "    return decoded_words, decoder_attentions[: di+1]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "id": "4f8852bb",
   "metadata": {},
   "outputs": [],
   "source": [
    "def evl_random(encoder, decoder, n=6):\n",
    "    for i in range(n):\n",
    "        pair = random.choice(pairs)\n",
    "        print('>', pair[0])\n",
    "        print('=', pair[1])\n",
    "        output_words, attentions = evaluate(encoder, decoder, pair[0])\n",
    "        output_sentence = ' '.join(output_words)\n",
    "        print('<', output_sentence)\n",
    "        print('')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "id": "86f59545",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "> i m also thinking of going to korea .\n",
      "= je pense aussi aller en coree .\n",
      "< je suis de de mes . <EOS>\n",
      "\n",
      "> we re finally alone .\n",
      "= on est enfin seules .\n",
      "< nous sommes tous de . <EOS>\n",
      "\n",
      "> i m resilient .\n",
      "= je suis tenace .\n",
      "< je suis . . <EOS>\n",
      "\n",
      "> you re the one who trained me .\n",
      "= vous etes celui qui m a entrainee .\n",
      "< tu etes plus plus que moi . <EOS>\n",
      "\n",
      "> i m fairly busy actually .\n",
      "= je suis assez occupe en realite .\n",
      "< je suis de votre . <EOS>\n",
      "\n",
      "> we re like brothers .\n",
      "= nous sommes comme des freres .\n",
      "< nous sommes tous de . <EOS>\n",
      "\n"
     ]
    }
   ],
   "source": [
    "evl_random(encoder1, attn_decoder1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "id": "1e58c5d7",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "['nous', 'sommes', 'tous', 'de', '.', '<EOS>']\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY8AAAECCAYAAAAGtFvhAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAAALlElEQVR4nO3dW6il9X3G8efnzOjEA03a2FJnBvUipEggWjeS1lKoaRtTJblVSAqlMDdNa0ogJL3rfQnpRSgMxrYQG1uMgSBWY9EQQluT8RCrjiFiTXRi8NBaTS88jL9ezBaMe+J+/+le+333zOcDg3uvWSwe3pntd951rO4OAIw4be4BAOw84gHAMPEAYJh4ADBMPAAYJh4ADFtsPKrqyqr6XlU9VlWfmXvPElXVgaq6u6oeqaqHq+q6uTctWVXtqqr7q+rWubcsWVW9s6purqpHq+pIVf3G3JuWqKr+fP3n7qGq+nJV7Z1703ZaZDyqaleSLyT5cJKLklxbVRfNu2qRXkvyqe6+KMkHkvyJ4/S2rktyZO4RO8BfJ7m9u38tyfvjmG1QVfuS/FmSte5+X5JdSa6Zd9X2WmQ8klyW5LHufry7X0lyU5KPzrxpcbr76e6+b/3rl3L8h3zfvKuWqar2J7kqyfVzb1myqvqFJL+d5ItJ0t2vdPcLs45art1J3lFVu5OcmeRHM+/ZVkuNx74kT77p+6fif4pvq6ouSHJJkntmnrJUn0/y6SSvz7xj6S5M8mySv12/i+/6qjpr7lFL091Hk/xVkh8meTrJ/3T31+ddtb2WGg8GVNXZSb6S5JPd/eLce5amqq5O8kx33zv3lh1gd5JfT/I33X1Jkv9N4jHHt6iqd+X4vSEXJjkvyVlV9bF5V22vpcbjaJIDb/p+//plvEVV7cnxcNzY3bfMvWehLk/ykap6IsfvAr2iqr4076TFeirJU939xhnszTkeE37a7yb5z+5+trtfTXJLkt+cedO2Wmo8vpPkPVV1YVWdnuMPRH1t5k2LU1WV4/dNH+nuz829Z6m6+7Pdvb+7L8jxv0t3dfcp9a/Eqbr7x0merKr3rl/0wSSPzDhpqX6Y5ANVdeb6z+EHc4o9sWD33ANOpLtfq6pPJLkjx5/FcEN3PzzzrCW6PMnHk/xHVT2wftlfdPdt803iJPCnSW5c/4fb40n+aOY9i9Pd91TVzUnuy/FnPd6f5NC8q7ZXeUt2AEYt9W4rABZMPAAYJh4ADBMPAIaJBwDDFh2Pqjo494adwrGaxnGaxnGa7lQ9VouOR5JT8g/l5+RYTeM4TeM4TXdKHqulxwOABVrJiwTf/Yu7+vwD//8Xrz/7/LGc+0u7tmBR8v0HT+43Bn01L2dPzph7xuI5TtM4TtOd7Mfqpfz3c9197lsvX8nbk5x/YHf+9fZlvYP61fsunXvCRqdtTRi33OvH5l4ALMS/9M0/ONHl7rYCYJh4ADBMPAAYJh4ADBMPAIaJBwDDxAOAYeIBwDDxAGCYeAAwTDwAGCYeAAwTDwCGTYpHVV1ZVd+rqseq6jOrHgXAsm0aj6raleQLST6c5KIk11bVRaseBsByTTnzuCzJY939eHe/kuSmJB9d7SwAlmxKPPYlefJN3z+1fhkAp6gte8C8qg5W1eGqOvzs8z6JDuBkNiUeR5MceNP3+9cv+yndfai717p7bas+dxyAZZoSj+8keU9VXVhVpye5JsnXVjsLgCXbvdkVuvu1qvpEkjuS7EpyQ3c/vPJlACzWpvFIku6+LcltK94CwA7hFeYADBMPAIaJBwDDxAOAYeIBwDDxAGCYeAAwTDwAGCYeAAwTDwCGiQcAw8QDgGGT3hhx1PcfPCtX77t0FTd9Urn1yW/PPeGEFvlnVzX3go26514As3HmAcAw8QBgmHgAMEw8ABgmHgAMEw8AhokHAMPEA4Bh4gHAMPEAYJh4ADBMPAAYJh4ADBMPAIaJBwDDNo1HVd1QVc9U1UPbMQiA5Zty5vF3Sa5c8Q4AdpBN49Hd30zyX9uwBYAdwmMeAAzbss8wr6qDSQ4myd6cuVU3C8ACbdmZR3cf6u617l7bkzO26mYBWCB3WwEwbMpTdb+c5N+SvLeqnqqqP179LACWbNPHPLr72u0YAsDO4W4rAIaJBwDDxAOAYeIBwDDxAGCYeAAwTDwAGCYeAAwTDwCGiQcAw8QDgGHiAcAw8QBg2JZ9kiDjrt536dwTdow7jt4/94QNPnTexXNPgNk48wBgmHgAMEw8ABgmHgAMEw8AhokHAMPEA4Bh4gHAMPEAYJh4ADBMPAAYJh4ADBMPAIaJBwDDNo1HVR2oqrur6pGqeriqrtuOYQAs15TP83gtyae6+76qOifJvVV1Z3c/suJtACzUpmce3f10d9+3/vVLSY4k2bfqYQAs19BjHlV1QZJLktyzkjUA7AiTP4a2qs5O8pUkn+zuF0/w+weTHEySvTlzywYCsDyTzjyqak+Oh+PG7r7lRNfp7kPdvdbda3tyxlZuBGBhpjzbqpJ8McmR7v7c6icBsHRTzjwuT/LxJFdU1QPrv/5gxbsAWLBNH/Po7m8lqW3YAsAO4RXmAAwTDwCGiQcAw8QDgGHiAcAw8QBgmHgAMEw8ABgmHgAMEw8AhokHAMPEA4Bh4gHAMPEAYJh4ADBMPAAYJh4ADBMPAIaJBwDDxAOAYeIBwDDxAGCYeAAwTDwAGCYeAAwTDwCGiQcAw8QDgGGbxqOq9lbVt6vqu1X1cFX95XYMA2C5dk+4zstJrujun1TVniTfqqp/7u5/X/E2ABZq03h0dyf5yfq3e9Z/9SpHAbBskx7zqKpdVfVAkmeS3Nnd96x0FQCLNike3X2suy9Osj/JZVX1vrdep6oOVtXhqjr8al7e4pkALMnQs626+4Ukdye58gS/d6i717p7bU/O2KJ5ACzRlGdbnVtV71z/+h1Jfi/JoyveBcCCTXm21a8m+fuq2pXjsfmn7r51tbMAWLIpz7Z6MMkl27AFgB3CK8wBGCYeAAwTDwCGiQcAw8QDgGHiAcAw8QBgmHgAMEw8ABgmHgAMEw8AhokHAMPEA4BhU96SHWb3ofMunnvCBnf86IG5J2ywxOPEycmZBwDDxAOAYeIBwDDxAGCYeAAwTDwAGCYeAAwTDwCGiQcAw8QDgGHiAcAw8QBgmHgAMEw8ABgmHgAMmxyPqtpVVfdX1a2rHATA8o2ceVyX5MiqhgCwc0yKR1XtT3JVkutXOweAnWDqmcfnk3w6yes/6wpVdbCqDlfV4Vfz8lZsA2ChNo1HVV2d5Jnuvvftrtfdh7p7rbvX9uSMLRsIwPJMOfO4PMlHquqJJDcluaKqvrTSVQAs2qbx6O7Pdvf+7r4gyTVJ7uruj618GQCL5XUeAAzbPXLl7v5Gkm+sZAkAO4YzDwCGiQcAw8QDgGHiAcAw8QBgmHgAMEw8ABgmHgAMEw8AhokHAMPEA4Bh4gHAMPEAYNjQu+pOVaedltPOPmcVN/1zq6q5J2xw7MUX555wYqftmnvBRq8fm3vBBldd/tG5J2zwwh+eN/eEDd71j/fNPeGEfnzw0rknbHDeXc/PPWGjh058sTMPAIaJBwDDxAOAYeIBwDDxAGCYeAAwTDwAGCYeAAwTDwCGiQcAw8QDgGHiAcAw8QBgmHgAMGzSW7JX1RNJXkpyLMlr3b22ylEALNvI53n8Tnc/t7IlAOwY7rYCYNjUeHSSr1fVvVV1cJWDAFi+qXdb/VZ3H62qX05yZ1U92t3ffPMV1qNyMEn21llbPBOAJZl05tHdR9f/+0ySrya57ATXOdTda929dnrt3dqVACzKpvGoqrOq6pw3vk7y+/mZH4kOwKlgyt1Wv5Lkq1X1xvX/obtvX+kqABZt03h09+NJ3r8NWwDYITxVF4Bh4gHAMPEAYJh4ADBMPAAYJh4ADBMPAIaJBwDDxAOAYeIBwDDxAGCYeAAwTDwAGFbdvfU3WvVskh9swU29O8lzW3A7pwLHahrHaRrHabqT/Vid393nvvXClcRjq1TV4e5em3vHTuBYTeM4TeM4TXeqHit3WwEwTDwAGLb0eByae8AO4lhN4zhN4zhNd0oeq0U/5gHAMi39zAOABRIPAIaJBwDDxAOAYeIBwLD/A0f7Tvuo1z76AAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 480x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "# 绘制Attention张量制图\n",
    "sentence = 'we re both teachers .'\n",
    "\n",
    "output_words, attentions = evaluate(encoder1, attn_decoder1, sentence)\n",
    "print(output_words)\n",
    "\n",
    "plt.matshow(attentions)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "2e1398b9",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "torchX",
   "language": "python",
   "name": "torchx"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.11"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
