{"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"auto encoder.ipynb","provenance":[],"collapsed_sections":[],"toc_visible":true,"authorship_tag":"ABX9TyOVIFrFbtBcOk936NgR/ySu"},"kernelspec":{"name":"python3","display_name":"Python 3"},"accelerator":"GPU"},"cells":[{"cell_type":"code","metadata":{"id":"DJcL9vud9eD_","executionInfo":{"status":"ok","timestamp":1605468883934,"user_tz":-480,"elapsed":1051,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}},"outputId":"b0325299-b6e4-4b8c-8155-0e8f6da21b3d","colab":{"base_uri":"https://localhost:8080/"}},"source":["from google.colab import drive\n","drive.mount('/content/drive/')"],"execution_count":1,"outputs":[{"output_type":"stream","text":["Drive already mounted at /content/drive/; to attempt to forcibly remount, call drive.mount(\"/content/drive/\", force_remount=True).\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"znHh4Yez9rMm","executionInfo":{"status":"ok","timestamp":1605468884836,"user_tz":-480,"elapsed":1947,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}},"outputId":"34cd31b8-9cd6-4663-b969-6a9814d799e2","colab":{"base_uri":"https://localhost:8080/"}},"source":["%cd './drive/My Drive/Movie_lens/'"],"execution_count":2,"outputs":[{"output_type":"stream","text":["/content/drive/My Drive/Movie_lens\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"fw7NPdkwJtKA"},"source":["## **0. 加载包**"]},{"cell_type":"code","metadata":{"id":"ndprszpRrTgi","executionInfo":{"status":"ok","timestamp":1605468885318,"user_tz":-480,"elapsed":2423,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}}},"source":["import numpy as np\n","import pandas as pd\n","import torch\n","import torch.nn as nn\n","import torch.optim as optim\n","import torch.nn.functional as F\n","from torch.autograd import Variable\n","from torch.autograd import Variable\n","from torch.utils.data import Dataset, DataLoader, TensorDataset\n","from sklearn.model_selection import train_test_split\n","from operator import itemgetter"],"execution_count":3,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"f_Adr_TNGsTI"},"source":["## **1. Data Preparing**"]},{"cell_type":"markdown","metadata":{"id":"dywrS71fIIZf"},"source":["### 1.1 将评分数据按照80%, 10%, 10%的比例划分为训练集，验证集和测试集"]},{"cell_type":"code","metadata":{"id":"5pPrfSml9i1w","executionInfo":{"status":"ok","timestamp":1605468990884,"user_tz":-480,"elapsed":7766,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}}},"source":["ratings_title = ['UserID','MovieID', 'Rating', 'timestamps']\n","ratings = pd.read_csv('./ml-1m/ratings.dat', sep='::', header=None, names=ratings_title, engine = 'python')\n","\n","ratings.sample(frac=1.0)\n","train_set, test_set = train_test_split(ratings,test_size = 0.2)\n","dev_set, test_set = train_test_split(test_set,test_size = 0.5)\n","train_set = np.array(train_set, dtype = 'int')\n","dev_set = np.array(dev_set, dtype = 'int')\n","test_set = np.array(test_set, dtype = 'int')"],"execution_count":12,"outputs":[]},{"cell_type":"code","metadata":{"id":"17SrgRlcIVeW","executionInfo":{"status":"ok","timestamp":1605468889977,"user_tz":-480,"elapsed":7072,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}},"outputId":"78cc3363-fb33-4b70-9713-18770f4f7847","colab":{"base_uri":"https://localhost:8080/","height":206}},"source":["ratings.head()"],"execution_count":5,"outputs":[{"output_type":"execute_result","data":{"text/html":["<div>\n","<style scoped>\n","    .dataframe tbody tr th:only-of-type {\n","        vertical-align: middle;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: right;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>UserID</th>\n","      <th>MovieID</th>\n","      <th>Rating</th>\n","      <th>timestamps</th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>0</th>\n","      <td>1</td>\n","      <td>1193</td>\n","      <td>5</td>\n","      <td>978300760</td>\n","    </tr>\n","    <tr>\n","      <th>1</th>\n","      <td>1</td>\n","      <td>661</td>\n","      <td>3</td>\n","      <td>978302109</td>\n","    </tr>\n","    <tr>\n","      <th>2</th>\n","      <td>1</td>\n","      <td>914</td>\n","      <td>3</td>\n","      <td>978301968</td>\n","    </tr>\n","    <tr>\n","      <th>3</th>\n","      <td>1</td>\n","      <td>3408</td>\n","      <td>4</td>\n","      <td>978300275</td>\n","    </tr>\n","    <tr>\n","      <th>4</th>\n","      <td>1</td>\n","      <td>2355</td>\n","      <td>5</td>\n","      <td>978824291</td>\n","    </tr>\n","  </tbody>\n","</table>\n","</div>"],"text/plain":["   UserID  MovieID  Rating  timestamps\n","0       1     1193       5   978300760\n","1       1      661       3   978302109\n","2       1      914       3   978301968\n","3       1     3408       4   978300275\n","4       1     2355       5   978824291"]},"metadata":{"tags":[]},"execution_count":5}]},{"cell_type":"code","metadata":{"id":"qvwIHbpLHhuL","executionInfo":{"status":"ok","timestamp":1605468889979,"user_tz":-480,"elapsed":7070,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}},"outputId":"cf79bc2b-ffb9-42b0-9142-b40db8f777f0","colab":{"base_uri":"https://localhost:8080/"}},"source":["print(train_set.shape)\n","print(dev_set.shape)\n","print(test_set.shape)"],"execution_count":6,"outputs":[{"output_type":"stream","text":["(800167, 4)\n","(100021, 4)\n","(100021, 4)\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"nI0ZtxKwIYJX"},"source":["### 1.2 将数据转化列表形式（[ [用户1对所有电影的评分], [用户2对所有电影的评分] ...]，并把用户未打分的电影的评分设为0"]},{"cell_type":"code","metadata":{"id":"wG3ke3SbAJJk","executionInfo":{"status":"ok","timestamp":1605469006329,"user_tz":-480,"elapsed":12839,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}}},"source":["nb_users = int(max(max(train_set[:,0]), max(dev_set[:,0]), max(test_set[:,0])))\n","nb_movies = int(max(max(train_set[:,1]), max(dev_set[:,1]), max(test_set[:,1])))\n","\n","# Converting the data into an array with users in lines and movies in columns\n","def convert(data):\n","    new_data = []\n","    #匹配相对应的 local data里面 user与movie相匹配，若user没有评价过该movie，then mark ‘0’\n","    for id_users in range(1, nb_users + 1):\n","        #create 第一层list，id_movies为某user看过的movie的id\n","        id_movies = data[:,1][data[:,0] == id_users]\n","        #id_ratings为某user评价过某movie的rate\n","        id_ratings = data[:,2][data[:,0] == id_users]\n","        #首先创建全部为0的list，再将user 评价movie的rating分数替换0，那么就能mark user没看过的movie为0\n","        ratings = np.zeros(nb_movies)\n","        #由于movieID由1开始，而python由0开始，因此要rating匹配python则-1\n","        ratings[id_movies - 1] = id_ratings\n","        #将以上创建的list合并到一个list，以被torch提取\n","        new_data.append(list(ratings))\n","    return new_data\n","    \n","train_set = convert(train_set)\n","dev_set = convert(dev_set)\n","test_set = convert(test_set)"],"execution_count":13,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"LNs_PpoSH3OY"},"source":["### 1.3 数据载入pytorch自带的DataLoader"]},{"cell_type":"code","metadata":{"id":"L8WNeUI3ASnK","executionInfo":{"status":"ok","timestamp":1605469037387,"user_tz":-480,"elapsed":3080,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}}},"source":["train_set = torch.FloatTensor(train_set)\n","train_set = train_set[torch.sum(train_set!=0,axis=1)!=0] # 删除评分全为0的行\n","dev_set = torch.FloatTensor(dev_set)\n","dev_set = dev_set[torch.sum(dev_set!=0,axis=1)!=0] # 删除评分全为0的行\n","test_set = torch.FloatTensor(test_set)\n","test_set = test_set[torch.sum(test_set!=0,axis=1)!=0] # 删除评分全为0的行\n","\n","train_dataset = TensorDataset(train_set)\n","dev_dataset = TensorDataset(dev_set)\n","test_dataset = TensorDataset(test_set)\n","\n","train_loader = DataLoader(dataset = train_dataset, batch_size = 16, shuffle = True)\n","dev_loader = DataLoader(dataset = dev_dataset, batch_size = 16, shuffle = False)\n","test_loader = DataLoader(dataset = test_dataset, batch_size = 16, shuffle = False)"],"execution_count":15,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"6tJCUIK0Ikv9"},"source":["## **2. Auto Encoder**"]},{"cell_type":"markdown","metadata":{"id":"HUqUdgN0PC0o"},"source":["### 2.1 定义模型和损失函数"]},{"cell_type":"code","metadata":{"id":"J9A3zguDIsE3","executionInfo":{"status":"ok","timestamp":1605468943936,"user_tz":-480,"elapsed":1352,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}}},"source":["class AE(nn.Module):\n","  \"\"\"\n","  Auto Encoder(AE)最初用于学习数据的表示(编码)。其可被分解为两部分:\n","\n","  encoder：减少了数据的维度大小;\n","  decoder：它将编码转换回其原始形式。由于存在降维，神经网络需要学习输入(潜在空间)的低维表示，以便能够重构输入。\n","\n","  它们可以用来预测新的推荐。为了做到这一点，输入和输出都是点击向量(通常AE的输入和输出是相同的)，我们将在输入层之后使用大的dropout。\n","  这意味着模型将不得不重构点击向量，因为输入中的某个元素将会丢失，因此要学会预测给定的点击向量的推荐值。\n","  \"\"\"\n","  def __init__(self, nb_movies, device=\"cuda:0\"):\n","    super(AE, self).__init__()\n","    self.nb_movies = nb_movies\n","    self.encoder = nn.Sequential(\n","        nn.Linear(self.nb_movies, 512),\n","        nn.Sigmoid(),\n","        nn.Dropout(0.9), # 这里需要一个大的dropout\n","        nn.Linear(512, 80),\n","        nn.Sigmoid(),\n","        nn.Linear(80, 32),\n","        nn.Sigmoid()\n","        )\n","    self.decoder = nn.Sequential(\n","        nn.Linear(32, 80),\n","        nn.Sigmoid(),\n","        nn.Linear(80, 512),\n","        nn.Sigmoid(),\n","        nn.Linear(512, self.nb_movies)\n","        )\n","\n","  def forward(self, x):\n","    x = self.encoder(x)\n","    x = self.decoder(x)\n","    return x\n","\n","def loss_func(recon_x, x):\n","  \"\"\"\n","  对于一个用户而言，他只对部分电影进行了打分，因此在计算MSE时，只考虑他打过分的电影，而忽略他没打过分的电影\n","  这里MSE的计算原理是，例如2个用户对5个电影的打分数据为：[[1, 2, 0, 3, 4], [2, 3, 0, 0, 1]]\n","  经过AE重构后的打分数据为：[[1.1, 2.3, 0, 3.3, 4.7], [2.1, 3.2, 0, 0, 1.2]]\n","  则先计算两打分数据的2范数的平方，再除以每个用户打过分的电影数，得到每个用户的MSE，再用torch.mean求平均得到每个batch的MSE\n","  \"\"\"\n","  MSE = torch.mean(torch.norm((x - recon_x), p=2, dim=1, keepdim=False)**2/torch.sum(recon_x!=0,axis=1))\n","  return MSE"],"execution_count":10,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"t3tz3gEIPKON"},"source":["### 2.2 定义训练，验证和测试函数"]},{"cell_type":"code","metadata":{"id":"WOqhTH7AIssW","executionInfo":{"status":"ok","timestamp":1605470100696,"user_tz":-480,"elapsed":384823,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}},"outputId":"401a2f39-803e-43c0-f8e2-84fd70f7d5a4","colab":{"base_uri":"https://localhost:8080/"}},"source":["def train(train_loader, dev_loader=None, is_validate=True, device=\"cuda:0\"):\n","  ae.train()\n","  total_loss = 0\n","  for _, data in enumerate(train_loader, 0):\n","    data = Variable(data[0]).to(device)\n","    target = data.clone()\n","    optimizer.zero_grad()\n","    recon_x = ae.forward(data)\n","    # 在优化过程中，我们只想考虑用户打过分的电影，\n","    # 虽然我们之前将用户未打分的电影的评分设为0，但是我们还需要将模型预测的用户未打分的电影的评分也设为0，\n","    # 这样才不会累加到loss里导致影响了权重更新\n","    recon_x[target == 0] = 0 \n","    loss = loss_func(recon_x, data)\n","    loss.backward()\n","    total_loss += loss.item()\n","    optimizer.step()\n","    epoch_train_loss = total_loss / len(train_loader)\n","\n","  if (is_validate == True):\n","    ae.eval()\n","    total_loss = 0\n","    with torch.no_grad():\n","      for _, data in enumerate(dev_loader, 0):\n","        data = Variable(data[0]).to(device)\n","        target = data.clone()\n","        recon_x = ae.forward(data)\n","        recon_x[target == 0] = 0\n","        loss = loss_func(recon_x, data)\n","        total_loss += loss.item()\n","        epoch_dev_loss = total_loss / len(dev_loader)\n","    print('====> Epoch: {} Training Average loss: {:.4f}, Validating Average loss: {:.4f}'.format(epoch, epoch_train_loss, epoch_dev_loss))\n","    return epoch_train_loss, epoch_dev_loss\n","\n","  else:\n","    print('====> Epoch: {} Training Average loss: {:.4f}'.format(epoch, epoch_train_loss))\n","    return epoch_train_loss\n","\n","def test(test_loader, device=\"cuda:0\"):\n","  ae.eval()\n","  total_loss = 0\n","  with torch.no_grad():\n","    for _, data in enumerate(test_loader, 0):\n","      data = Variable(data[0]).to(device)\n","      target = data.clone()\n","      recon_x = ae.forward(data)\n","      recon_x[target == 0] = 0\n","      loss = loss_func(recon_x, data)\n","      total_loss += loss.item()\n","  print('Average test loss: {:.4f}'.format(total_loss / len(test_loader)))\n","\n","if __name__ == \"__main__\":\n","  device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n","  ae = AE(nb_movies = nb_movies).to(device)\n","  optimizer =  optim.Adam(ae.parameters(), lr=1e-4, betas=(0.9, 0.999), eps=1e-08, weight_decay=0)\n","  EPOCH = 200\n","  epoches_train_loss = []\n","  epoches_dev_loss = []\n","\n","  print(\"\\n\", 20 * \"=\", \"Training Auto Encoder on device: {}\".format(device), 20 * \"=\")\n","  for epoch in range(1, EPOCH + 1):\n","    epoch_train_loss, epoch_dev_loss = train(train_loader, dev_loader = dev_loader)\n","    epoches_train_loss.append(epoch_train_loss)\n","    epoches_dev_loss.append(epoch_dev_loss)\n","  print(\"\\n\", 20 * \"=\", \"Testing Auto Encoder on device: {}\".format(device), 20 * \"=\")\n","  test(test_loader)"],"execution_count":23,"outputs":[{"output_type":"stream","text":["\n"," ==================== Training Auto Encoder on device: cuda:0 ====================\n","====> Epoch: 1 Training Average loss: 3.8287, Validating Average loss: 1.0679\n","====> Epoch: 2 Training Average loss: 1.0207, Validating Average loss: 1.0108\n","====> Epoch: 3 Training Average loss: 0.9985, Validating Average loss: 1.0099\n","====> Epoch: 4 Training Average loss: 0.9965, Validating Average loss: 1.0129\n","====> Epoch: 5 Training Average loss: 0.9960, Validating Average loss: 1.0109\n","====> Epoch: 6 Training Average loss: 0.9957, Validating Average loss: 1.0121\n","====> Epoch: 7 Training Average loss: 0.9961, Validating Average loss: 1.0124\n","====> Epoch: 8 Training Average loss: 0.9954, Validating Average loss: 1.0130\n","====> Epoch: 9 Training Average loss: 0.9946, Validating Average loss: 1.0126\n","====> Epoch: 10 Training Average loss: 0.9934, Validating Average loss: 1.0098\n","====> Epoch: 11 Training Average loss: 0.9924, Validating Average loss: 1.0138\n","====> Epoch: 12 Training Average loss: 0.9907, Validating Average loss: 1.0138\n","====> Epoch: 13 Training Average loss: 0.9891, Validating Average loss: 1.0145\n","====> Epoch: 14 Training Average loss: 0.9881, Validating Average loss: 1.0112\n","====> Epoch: 15 Training Average loss: 0.9853, Validating Average loss: 1.0136\n","====> Epoch: 16 Training Average loss: 0.9838, Validating Average loss: 1.0123\n","====> Epoch: 17 Training Average loss: 0.9815, Validating Average loss: 1.0161\n","====> Epoch: 18 Training Average loss: 0.9807, Validating Average loss: 1.0107\n","====> Epoch: 19 Training Average loss: 0.9779, Validating Average loss: 1.0197\n","====> Epoch: 20 Training Average loss: 0.9758, Validating Average loss: 1.0120\n","====> Epoch: 21 Training Average loss: 0.9732, Validating Average loss: 1.0168\n","====> Epoch: 22 Training Average loss: 0.9729, Validating Average loss: 1.0215\n","====> Epoch: 23 Training Average loss: 0.9700, Validating Average loss: 1.0179\n","====> Epoch: 24 Training Average loss: 0.9697, Validating Average loss: 1.0241\n","====> Epoch: 25 Training Average loss: 0.9677, Validating Average loss: 1.0191\n","====> Epoch: 26 Training Average loss: 0.9661, Validating Average loss: 1.0284\n","====> Epoch: 27 Training Average loss: 0.9657, Validating Average loss: 1.0189\n","====> Epoch: 28 Training Average loss: 0.9656, Validating Average loss: 1.0225\n","====> Epoch: 29 Training Average loss: 0.9624, Validating Average loss: 1.0258\n","====> Epoch: 30 Training Average loss: 0.9616, Validating Average loss: 1.0197\n","====> Epoch: 31 Training Average loss: 0.9617, Validating Average loss: 1.0252\n","====> Epoch: 32 Training Average loss: 0.9599, Validating Average loss: 1.0232\n","====> Epoch: 33 Training Average loss: 0.9577, Validating Average loss: 1.0288\n","====> Epoch: 34 Training Average loss: 0.9570, Validating Average loss: 1.0198\n","====> Epoch: 35 Training Average loss: 0.9563, Validating Average loss: 1.0388\n","====> Epoch: 36 Training Average loss: 0.9554, Validating Average loss: 1.0252\n","====> Epoch: 37 Training Average loss: 0.9548, Validating Average loss: 1.0257\n","====> Epoch: 38 Training Average loss: 0.9514, Validating Average loss: 1.0248\n","====> Epoch: 39 Training Average loss: 0.9502, Validating Average loss: 1.0331\n","====> Epoch: 40 Training Average loss: 0.9502, Validating Average loss: 1.0305\n","====> Epoch: 41 Training Average loss: 0.9492, Validating Average loss: 1.0368\n","====> Epoch: 42 Training Average loss: 0.9470, Validating Average loss: 1.0314\n","====> Epoch: 43 Training Average loss: 0.9465, Validating Average loss: 1.0357\n","====> Epoch: 44 Training Average loss: 0.9449, Validating Average loss: 1.0312\n","====> Epoch: 45 Training Average loss: 0.9421, Validating Average loss: 1.0346\n","====> Epoch: 46 Training Average loss: 0.9411, Validating Average loss: 1.0322\n","====> Epoch: 47 Training Average loss: 0.9397, Validating Average loss: 1.0309\n","====> Epoch: 48 Training Average loss: 0.9385, Validating Average loss: 1.0283\n","====> Epoch: 49 Training Average loss: 0.9361, Validating Average loss: 1.0344\n","====> Epoch: 50 Training Average loss: 0.9330, Validating Average loss: 1.0343\n","====> Epoch: 51 Training Average loss: 0.9313, Validating Average loss: 1.0379\n","====> Epoch: 52 Training Average loss: 0.9316, Validating Average loss: 1.0494\n","====> Epoch: 53 Training Average loss: 0.9294, Validating Average loss: 1.0467\n","====> Epoch: 54 Training Average loss: 0.9288, Validating Average loss: 1.0483\n","====> Epoch: 55 Training Average loss: 0.9274, Validating Average loss: 1.0404\n","====> Epoch: 56 Training Average loss: 0.9243, Validating Average loss: 1.0409\n","====> Epoch: 57 Training Average loss: 0.9257, Validating Average loss: 1.0360\n","====> Epoch: 58 Training Average loss: 0.9223, Validating Average loss: 1.0446\n","====> Epoch: 59 Training Average loss: 0.9221, Validating Average loss: 1.0428\n","====> Epoch: 60 Training Average loss: 0.9213, Validating Average loss: 1.0405\n","====> Epoch: 61 Training Average loss: 0.9203, Validating Average loss: 1.0428\n","====> Epoch: 62 Training Average loss: 0.9184, Validating Average loss: 1.0462\n","====> Epoch: 63 Training Average loss: 0.9172, Validating Average loss: 1.0450\n","====> Epoch: 64 Training Average loss: 0.9158, Validating Average loss: 1.0437\n","====> Epoch: 65 Training Average loss: 0.9147, Validating Average loss: 1.0403\n","====> Epoch: 66 Training Average loss: 0.9165, Validating Average loss: 1.0472\n","====> Epoch: 67 Training Average loss: 0.9144, Validating Average loss: 1.0447\n","====> Epoch: 68 Training Average loss: 0.9122, Validating Average loss: 1.0444\n","====> Epoch: 69 Training Average loss: 0.9112, Validating Average loss: 1.0475\n","====> Epoch: 70 Training Average loss: 0.9115, Validating Average loss: 1.0489\n","====> Epoch: 71 Training Average loss: 0.9108, Validating Average loss: 1.0459\n","====> Epoch: 72 Training Average loss: 0.9080, Validating Average loss: 1.0443\n","====> Epoch: 73 Training Average loss: 0.9080, Validating Average loss: 1.0424\n","====> Epoch: 74 Training Average loss: 0.9094, Validating Average loss: 1.0427\n","====> Epoch: 75 Training Average loss: 0.9073, Validating Average loss: 1.0391\n","====> Epoch: 76 Training Average loss: 0.9071, Validating Average loss: 1.0383\n","====> Epoch: 77 Training Average loss: 0.9071, Validating Average loss: 1.0356\n","====> Epoch: 78 Training Average loss: 0.9050, Validating Average loss: 1.0445\n","====> Epoch: 79 Training Average loss: 0.9051, Validating Average loss: 1.0471\n","====> Epoch: 80 Training Average loss: 0.9029, Validating Average loss: 1.0390\n","====> Epoch: 81 Training Average loss: 0.9036, Validating Average loss: 1.0453\n","====> Epoch: 82 Training Average loss: 0.9017, Validating Average loss: 1.0452\n","====> Epoch: 83 Training Average loss: 0.9016, Validating Average loss: 1.0407\n","====> Epoch: 84 Training Average loss: 0.9006, Validating Average loss: 1.0398\n","====> Epoch: 85 Training Average loss: 0.9000, Validating Average loss: 1.0466\n","====> Epoch: 86 Training Average loss: 0.9012, Validating Average loss: 1.0414\n","====> Epoch: 87 Training Average loss: 0.8979, Validating Average loss: 1.0505\n","====> Epoch: 88 Training Average loss: 0.8980, Validating Average loss: 1.0517\n","====> Epoch: 89 Training Average loss: 0.8973, Validating Average loss: 1.0487\n","====> Epoch: 90 Training Average loss: 0.8950, Validating Average loss: 1.0506\n","====> Epoch: 91 Training Average loss: 0.8953, Validating Average loss: 1.0508\n","====> Epoch: 92 Training Average loss: 0.8959, Validating Average loss: 1.0457\n","====> Epoch: 93 Training Average loss: 0.8949, Validating Average loss: 1.0484\n","====> Epoch: 94 Training Average loss: 0.8941, Validating Average loss: 1.0453\n","====> Epoch: 95 Training Average loss: 0.8935, Validating Average loss: 1.0430\n","====> Epoch: 96 Training Average loss: 0.8921, Validating Average loss: 1.0497\n","====> Epoch: 97 Training Average loss: 0.8924, Validating Average loss: 1.0508\n","====> Epoch: 98 Training Average loss: 0.8918, Validating Average loss: 1.0487\n","====> Epoch: 99 Training Average loss: 0.8911, Validating Average loss: 1.0488\n","====> Epoch: 100 Training Average loss: 0.8900, Validating Average loss: 1.0429\n","====> Epoch: 101 Training Average loss: 0.8903, Validating Average loss: 1.0504\n","====> Epoch: 102 Training Average loss: 0.8879, Validating Average loss: 1.0543\n","====> Epoch: 103 Training Average loss: 0.8899, Validating Average loss: 1.0510\n","====> Epoch: 104 Training Average loss: 0.8884, Validating Average loss: 1.0538\n","====> Epoch: 105 Training Average loss: 0.8868, Validating Average loss: 1.0498\n","====> Epoch: 106 Training Average loss: 0.8865, Validating Average loss: 1.0534\n","====> Epoch: 107 Training Average loss: 0.8845, Validating Average loss: 1.0568\n","====> Epoch: 108 Training Average loss: 0.8851, Validating Average loss: 1.0552\n","====> Epoch: 109 Training Average loss: 0.8849, Validating Average loss: 1.0528\n","====> Epoch: 110 Training Average loss: 0.8853, Validating Average loss: 1.0579\n","====> Epoch: 111 Training Average loss: 0.8816, Validating Average loss: 1.0621\n","====> Epoch: 112 Training Average loss: 0.8844, Validating Average loss: 1.0624\n","====> Epoch: 113 Training Average loss: 0.8817, Validating Average loss: 1.0542\n","====> Epoch: 114 Training Average loss: 0.8832, Validating Average loss: 1.0588\n","====> Epoch: 115 Training Average loss: 0.8814, Validating Average loss: 1.0536\n","====> Epoch: 116 Training Average loss: 0.8801, Validating Average loss: 1.0671\n","====> Epoch: 117 Training Average loss: 0.8801, Validating Average loss: 1.0579\n","====> Epoch: 118 Training Average loss: 0.8800, Validating Average loss: 1.0584\n","====> Epoch: 119 Training Average loss: 0.8778, Validating Average loss: 1.0629\n","====> Epoch: 120 Training Average loss: 0.8802, Validating Average loss: 1.0596\n","====> Epoch: 121 Training Average loss: 0.8764, Validating Average loss: 1.0680\n","====> Epoch: 122 Training Average loss: 0.8772, Validating Average loss: 1.0656\n","====> Epoch: 123 Training Average loss: 0.8762, Validating Average loss: 1.0651\n","====> Epoch: 124 Training Average loss: 0.8775, Validating Average loss: 1.0640\n","====> Epoch: 125 Training Average loss: 0.8752, Validating Average loss: 1.0657\n","====> Epoch: 126 Training Average loss: 0.8756, Validating Average loss: 1.0691\n","====> Epoch: 127 Training Average loss: 0.8749, Validating Average loss: 1.0717\n","====> Epoch: 128 Training Average loss: 0.8737, Validating Average loss: 1.0688\n","====> Epoch: 129 Training Average loss: 0.8726, Validating Average loss: 1.0711\n","====> Epoch: 130 Training Average loss: 0.8728, Validating Average loss: 1.0727\n","====> Epoch: 131 Training Average loss: 0.8710, Validating Average loss: 1.0755\n","====> Epoch: 132 Training Average loss: 0.8700, Validating Average loss: 1.0651\n","====> Epoch: 133 Training Average loss: 0.8698, Validating Average loss: 1.0670\n","====> Epoch: 134 Training Average loss: 0.8674, Validating Average loss: 1.0675\n","====> Epoch: 135 Training Average loss: 0.8669, Validating Average loss: 1.0797\n","====> Epoch: 136 Training Average loss: 0.8676, Validating Average loss: 1.0731\n","====> Epoch: 137 Training Average loss: 0.8650, Validating Average loss: 1.0679\n","====> Epoch: 138 Training Average loss: 0.8638, Validating Average loss: 1.0674\n","====> Epoch: 139 Training Average loss: 0.8640, Validating Average loss: 1.0687\n","====> Epoch: 140 Training Average loss: 0.8610, Validating Average loss: 1.0856\n","====> Epoch: 141 Training Average loss: 0.8606, Validating Average loss: 1.0728\n","====> Epoch: 142 Training Average loss: 0.8606, Validating Average loss: 1.0739\n","====> Epoch: 143 Training Average loss: 0.8584, Validating Average loss: 1.0721\n","====> Epoch: 144 Training Average loss: 0.8591, Validating Average loss: 1.0855\n","====> Epoch: 145 Training Average loss: 0.8572, Validating Average loss: 1.0783\n","====> Epoch: 146 Training Average loss: 0.8564, Validating Average loss: 1.0699\n","====> Epoch: 147 Training Average loss: 0.8552, Validating Average loss: 1.0783\n","====> Epoch: 148 Training Average loss: 0.8545, Validating Average loss: 1.0747\n","====> Epoch: 149 Training Average loss: 0.8545, Validating Average loss: 1.0813\n","====> Epoch: 150 Training Average loss: 0.8514, Validating Average loss: 1.0839\n","====> Epoch: 151 Training Average loss: 0.8521, Validating Average loss: 1.0797\n","====> Epoch: 152 Training Average loss: 0.8515, Validating Average loss: 1.0765\n","====> Epoch: 153 Training Average loss: 0.8490, Validating Average loss: 1.0732\n","====> Epoch: 154 Training Average loss: 0.8479, Validating Average loss: 1.0838\n","====> Epoch: 155 Training Average loss: 0.8505, Validating Average loss: 1.0708\n","====> Epoch: 156 Training Average loss: 0.8488, Validating Average loss: 1.0773\n","====> Epoch: 157 Training Average loss: 0.8466, Validating Average loss: 1.0848\n","====> Epoch: 158 Training Average loss: 0.8469, Validating Average loss: 1.0822\n","====> Epoch: 159 Training Average loss: 0.8458, Validating Average loss: 1.0795\n","====> Epoch: 160 Training Average loss: 0.8466, Validating Average loss: 1.0759\n","====> Epoch: 161 Training Average loss: 0.8432, Validating Average loss: 1.0793\n","====> Epoch: 162 Training Average loss: 0.8430, Validating Average loss: 1.0851\n","====> Epoch: 163 Training Average loss: 0.8430, Validating Average loss: 1.0932\n","====> Epoch: 164 Training Average loss: 0.8423, Validating Average loss: 1.0892\n","====> Epoch: 165 Training Average loss: 0.8422, Validating Average loss: 1.0878\n","====> Epoch: 166 Training Average loss: 0.8420, Validating Average loss: 1.0805\n","====> Epoch: 167 Training Average loss: 0.8406, Validating Average loss: 1.0844\n","====> Epoch: 168 Training Average loss: 0.8384, Validating Average loss: 1.0801\n","====> Epoch: 169 Training Average loss: 0.8373, Validating Average loss: 1.0882\n","====> Epoch: 170 Training Average loss: 0.8385, Validating Average loss: 1.0817\n","====> Epoch: 171 Training Average loss: 0.8368, Validating Average loss: 1.0780\n","====> Epoch: 172 Training Average loss: 0.8349, Validating Average loss: 1.0856\n","====> Epoch: 173 Training Average loss: 0.8361, Validating Average loss: 1.0778\n","====> Epoch: 174 Training Average loss: 0.8337, Validating Average loss: 1.0881\n","====> Epoch: 175 Training Average loss: 0.8340, Validating Average loss: 1.0947\n","====> Epoch: 176 Training Average loss: 0.8355, Validating Average loss: 1.0801\n","====> Epoch: 177 Training Average loss: 0.8316, Validating Average loss: 1.0809\n","====> Epoch: 178 Training Average loss: 0.8325, Validating Average loss: 1.0858\n","====> Epoch: 179 Training Average loss: 0.8322, Validating Average loss: 1.0913\n","====> Epoch: 180 Training Average loss: 0.8310, Validating Average loss: 1.0820\n","====> Epoch: 181 Training Average loss: 0.8284, Validating Average loss: 1.0944\n","====> Epoch: 182 Training Average loss: 0.8292, Validating Average loss: 1.0845\n","====> Epoch: 183 Training Average loss: 0.8292, Validating Average loss: 1.0940\n","====> Epoch: 184 Training Average loss: 0.8274, Validating Average loss: 1.0808\n","====> Epoch: 185 Training Average loss: 0.8285, Validating Average loss: 1.0843\n","====> Epoch: 186 Training Average loss: 0.8252, Validating Average loss: 1.0898\n","====> Epoch: 187 Training Average loss: 0.8269, Validating Average loss: 1.1029\n","====> Epoch: 188 Training Average loss: 0.8260, Validating Average loss: 1.0856\n","====> Epoch: 189 Training Average loss: 0.8266, Validating Average loss: 1.0854\n","====> Epoch: 190 Training Average loss: 0.8248, Validating Average loss: 1.0832\n","====> Epoch: 191 Training Average loss: 0.8239, Validating Average loss: 1.0987\n","====> Epoch: 192 Training Average loss: 0.8221, Validating Average loss: 1.0865\n","====> Epoch: 193 Training Average loss: 0.8223, Validating Average loss: 1.0863\n","====> Epoch: 194 Training Average loss: 0.8210, Validating Average loss: 1.0831\n","====> Epoch: 195 Training Average loss: 0.8204, Validating Average loss: 1.0938\n","====> Epoch: 196 Training Average loss: 0.8213, Validating Average loss: 1.0905\n","====> Epoch: 197 Training Average loss: 0.8184, Validating Average loss: 1.0970\n","====> Epoch: 198 Training Average loss: 0.8188, Validating Average loss: 1.0919\n","====> Epoch: 199 Training Average loss: 0.8186, Validating Average loss: 1.1003\n","====> Epoch: 200 Training Average loss: 0.8185, Validating Average loss: 1.0938\n","\n"," ==================== Testing Auto Encoder on device: cuda:0 ====================\n","Average test loss: 1.0837\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"mXU81_oWRz3S"},"source":["### 2.3 保存与加载模型"]},{"cell_type":"code","metadata":{"id":"87X-Lh5RRwNz"},"source":["# save the training model\n","torch.save(ae.state_dict(), 'AutoEncoder.pkl')\n","\n","# load the trained model\n","ae = torch.load('AutoEncoder.pkl')"],"execution_count":null,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"4-6IRUWzP0Wd"},"source":["### 2.4 画图展示训练和验证过程中的loss变化情况：可以发现验证集的loss出现了先下降后微升的情况，考虑是模型过拟合+泛化能力差"]},{"cell_type":"code","metadata":{"id":"SORypM92IsnS","executionInfo":{"status":"ok","timestamp":1605470312644,"user_tz":-480,"elapsed":1547,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}},"outputId":"a146157e-ecfa-46eb-ba2a-29d5528ee693","colab":{"base_uri":"https://localhost:8080/","height":515}},"source":["import matplotlib.pyplot as plt\n","plt.figure(figsize = (12,8))\n","plt.plot(np.arange(len(epoches_train_loss)), epoches_train_loss, color='r', label='training loss')\n","plt.plot(np.arange(len(epoches_dev_loss)), epoches_dev_loss, color='b', label='validating loss')\n","plt.xlabel('Steps')\n","plt.ylabel('MSE-loss')\n","plt.legend()"],"execution_count":24,"outputs":[{"output_type":"execute_result","data":{"text/plain":["<matplotlib.legend.Legend at 0x7f59a44fbda0>"]},"metadata":{"tags":[]},"execution_count":24},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAAtEAAAHgCAYAAABjBzGSAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOzdeZwV5Z3v8e+v9266oRFQVoG4ILIvUdHRuESjmGjUuAYnGqMZJ5nEOzOOy2Q0yZ3MmLmOMRkTjcYkxmtQr0vGiajRiFETN0BFEQREkE1oQJqGbpZunvvHr4rTNL1UQR9Ow/m8X696nXPq1KnznOrq7u/51VNPWQhBAAAAAJIryHUDAAAAgH0NIRoAAABIiRANAAAApESIBgAAAFIiRAMAAAApEaIBAACAlIpy3YC0evfuHYYMGZLrZgAAAGA/N3PmzDUhhD6tPbfPheghQ4ZoxowZuW4GAAAA9nNmtqSt5+jOAQAAAKREiAYAAABSIkQDAAAAKe1zfaIBAAD2Bdu2bdOyZcu0efPmXDcFHSgrK9PAgQNVXFyc+DWEaAAAgCxYtmyZqqqqNGTIEJlZrpuDNoQQtHbtWi1btkxDhw5N/Dq6cwAAAGTB5s2b1atXLwJ0F2dm6tWrV+ojBoRoAACALCFA7xt25+dEiAYAANgPrV+/Xj/72c9267WTJ0/W+vXr213mpptu0nPPPbdb629pyJAhWrNmTaesa28hRAMAAOyH2gvRjY2N7b522rRpqq6ubneZ73//+/rsZz+72+3b1xGiAQAA9kPXX3+9PvjgA40dO1bXXnutXnjhBR1//PE666yzdOSRR0qSvvjFL2rChAkaMWKE7r777h2vjSvDixcv1vDhw3XllVdqxIgROu2009TQ0CBJuuyyy/TII4/sWP7mm2/W+PHjNWrUKM2bN0+SVFNTo1NPPVUjRozQ1772NQ0ePLjDivNtt92mkSNHauTIkbr99tslSZs2bdKZZ56pMWPGaOTIkXrooYd2fMYjjzxSo0eP1j/+4z927gbsAKNzAAAAZNs110hvvdW56xw7VopCZmtuueUWvfvuu3oret8XXnhBs2bN0rvvvrtjFIpf/vKXOuCAA9TQ0KBPf/rTOu+889SrV6+d1rNgwQJNnTpV99xzjy644AI9+uijmjJlyi7v17t3b82aNUs/+9nPdOutt+oXv/iFvve97+nkk0/WDTfcoKefflr33ntvux9p5syZ+tWvfqXXXntNIQQdffTR+sxnPqNFixapf//+evLJJyVJtbW1Wrt2rR5//HHNmzdPZtZh95PORiUaAAAgTxx11FE7DeP2k5/8RGPGjNExxxyjpUuXasGCBbu8ZujQoRo7dqwkacKECVq8eHGr6z733HN3Webll1/WRRddJEk6/fTT1bNnz3bb9/LLL+ucc85Rt27dVFlZqXPPPVcvvfSSRo0apWeffVbXXXedXnrpJfXo0UM9evRQWVmZrrjiCj322GOqqKhIuzn2CJVoAACAbGunYrw3devWbcf9F154Qc8995xeeeUVVVRU6MQTT2x1mLfS0tId9wsLC3d052hrucLCwg77XKd1+OGHa9asWZo2bZq+853v6JRTTtFNN92k119/XX/84x/1yCOP6I477tDzzz/fqe/bHirRAAAA+6GqqirV1dW1+Xxtba169uypiooKzZs3T6+++mqnt+G4447Tww8/LEn6wx/+oE8++aTd5Y8//nj97ne/U319vTZt2qTHH39cxx9/vFasWKGKigpNmTJF1157rWbNmqWNGzeqtrZWkydP1o9+9CO9/fbbnd7+9lCJBgAA2A/16tVLxx13nEaOHKkzzjhDZ5555k7Pn3766brrrrs0fPhwDRs2TMccc0ynt+Hmm2/WxRdfrPvvv1+TJk1S3759VVVV1eby48eP12WXXaajjjpKkvS1r31N48aN0zPPPKNrr71WBQUFKi4u1p133qm6ujqdffbZ2rx5s0IIuu222zq9/e2xEMJefcM9NXHixDBjxoxcNwMAAKBdc+fO1fDhw3PdjJzasmWLCgsLVVRUpFdeeUVXX331jhMdu5rWfl5mNjOEMLG15alEJ7F9u7Rhg1ReLjXrFwQAAIC2ffTRR7rgggu0fft2lZSU6J577sl1kzoNITqJZcukwYOle++VvvrVXLcGAABgn3DYYYfpzTffzHUzsoITC5MoiDbT9u25bQcAAAC6BEJ0EoRoAAAANEOIToIQDQAAgGYI0UkQogEAANAMIToJQjQAAMgDlZWVkqQVK1boS1/6UqvLnHjiiepouOHbb79d9fX1Ox5PnjxZ69ev3+P2LV68WCNHjtzj9XQGQnQScYhuasptOwAAAPaC/v3765FHHtnt17cM0dOmTVN1dXVnNK3LIEQnQSUaAADsY66//nr99Kc/3fH4u9/9rm699VZt3LhRp5xyisaPH69Ro0bpv//7v3d5bfOKb0NDgy666CINHz5c55xzjhoaGnYsd/XVV2vixIkaMWKEbr75ZknST37yE61YsUInnXSSTjrpJEnSkCFDtGbNGi1evFjDhw/XlVdeqREjRui0007bsb433nhDo0eP1tixY3Xttdd2WHHevHmzLr/8co0aNUrjxo3T9OnTJUlz5szRUUcdpbFjx2r06NFasGCBNm3apDPPPFNjxozRyJEj9dBDD+3BlnWME51EYaHfEqIBAMBuuOYaqbMv1Dd2rHT77W0/f+GFF+qaa67RN77xDUnSww8/rGeeeUZlZWV6/PHH1b17d61Zs0bHHHOMzjrrLJlZq+u58847VVFRoblz52r27NkaP378jud+8IMf6IADDlBTU5NOOeUUzZ49W9/61rd02223afr06erdu/cu61uwYIGmTp2qe+65RxdccIEeffRRTZkyRZdffrnuueceTZo0Sddff32Hn/+nP/2pzEzvvPOO5s2bp9NOO03z58/XXXfdpW9/+9v68pe/rK1bt6qpqUnTpk1T//799eSTT0qSamtrO1x/R6hEJ0ElGgAA7GPGjRun1atXa8WKFXr77bfVs2dPDRo0SCEE3XjjjRo9erQ++9nPavny5Vq1alWb63nxxRc1ZcoUSdLo0aM1evToHc89/PDDGj9+vMaNG6c5c+bovffe67BdQ4cO1dixYyVJEyZM0OLFi7V+/XrV1dVp0qRJkqRLLrmkw/W8/PLLO9p1xBFHaPDgwZo/f74mTZqkf/u3f9MPf/hDLVmyROXl5Ro1apSeffZZXXfddXrppZfUo0ePDtffESrRSRCiAQDAHmivYpxN559/vh555BF9/PHHuvDCCyVJDzzwgGpqajRz5kwVFxdryJAh2rx5c+p1f/jhh7r11lv1xhtvqGfPnrrssssSrae0tHTH/cLCwp26h3SGSy65REcffbSefPJJTZ48WT//+c918skna9asWZo2bZq+853v6JRTTtFNN920R+9DJToJQjQAANgHXXjhhXrwwQf1yCOP6Pzzz5fkXRkOPPBAFRcXa/r06VqyZEm76zjhhBP029/+VpL07rvvavbs2ZKkDRs2qFu3burRo4dWrVqlp556asdrqqqqVFdXl7id1dXVqqqq0muvvSZJevDBBzt8zfHHH68HHnhAkjR//nx99NFHGjZsmBYtWqRPfepT+ta3vqWzzz5bs2fP1ooVK1RRUaEpU6bo2muv1axZsxK3rS1UopMgRAMAgH3QiBEjVFdXpwEDBqhfv36SpC9/+cv6whe+oFGjRmnixIk64ogj2l3H1Vdfrcsvv1zDhw/X8OHDNWHCBEnSmDFjNG7cOB1xxBEaNGiQjjvuuB2vueqqq3T66aerf//+O07468i9996rK6+8UgUFBfrMZz7TYZeLv/3bv9XVV1+tUaNGqaioSL/+9a9VWlqqhx9+WPfff7+Ki4vVt29f3XjjjXrjjTd07bXXqqCgQMXFxbrzzjsTtak9FkLY45XsTRMnTgwdjU3Y6ZqapKIi6fvfl/7lX/buewMAgH3S3LlzNXz48Fw3Y5+xcePGHeNU33LLLVq5cqV+/OMf77X3b+3nZWYzQwgTW1ueSnQSjBMNAACQVU8++aT+/d//XY2NjRo8eLB+/etf57pJ7SJEJxEP+UJ3DgAAgKy48MILd5z8uC/gxMKkCgsJ0QAAAJBEiE6uoIAQDQAAUtnXzj3LV7vzcyJEJ0WIBgAAKZSVlWnt2rUE6S4uhKC1a9eqrKws1evoE50UIRoAAKQwcOBALVu2TDU1NbluCjpQVlamgQMHpnoNITopQjQAAEihuLhYQ4cOzXUzkCV050iKEA0AAIAIITopQjQAAAAihOikCgq42AoAAAAkEaKTY5xoAAAARAjRSdGdAwAAABFCdFKEaAAAAEQI0UkRogEAABAhRCdFiAYAAECEEJ0UIRoAAAARQnRShGgAAABECNFJMU40AAAAIoTopBgnGgAAABFCdFJ05wAAAECEEJ0UIRoAAAARQnRShGgAAABECNFJEaIBAAAQIUQnRYgGAABAhBCdFCEaAAAAEUJ0UowTDQAAgEjWQrSZlZnZ62b2tpnNMbPvtbLMZWZWY2ZvRdPXstWePcY40QAAAIgUZXHdWySdHELYaGbFkl42s6dCCK+2WO6hEMI3s9iOzkF3DgAAAESyFqJDCEHSxuhhcTSFbL1f1hGiAQAAEMlqn2gzKzSztyStlvRsCOG1VhY7z8xmm9kjZjYom+3ZI4RoAAAARLIaokMITSGEsZIGSjrKzEa2WOR/JA0JIYyW9Kyk+1pbj5ldZWYzzGxGTU1NNpvcNkI0AAAAIntldI4QwnpJ0yWd3mL+2hDClujhLyRNaOP1d4cQJoYQJvbp0ye7jW0LIRoAAACRbI7O0cfMqqP75ZJOlTSvxTL9mj08S9LcbLVnjxGiAQAAEMnm6Bz9JN1nZoXysP5wCOH3ZvZ9STNCCE9I+paZnSWpUdI6SZdlsT17hnGiAQAAEMnm6ByzJY1rZf5Nze7fIOmGbLWhUzFONAAAACJcsTApunMAAAAgQohOihANAACACCE6KUI0AAAAIoTopAjRAAAAiBCikyJEAwAAIEKITooQDQAAgAghOinGiQYAAECEEJ0U40QDAAAgQohOiu4cAAAAiBCikyJEAwAAIEKITooQDQAAgAghOilCNAAAACKE6KQI0QAAAIgQopMiRAMAACBCiE6KcaIBAAAQIUQnxTjRAAAAiBCik6I7BwAAACKE6KQI0QAAAIgQopMiRAMAACBCiE6KEA0AAIAIITopQjQAAAAihOikCNEAAACIEKKTYpxoAAAARAjRSTFONAAAACKE6KTozgEAAIAIITopQjQAAAAihOikCqJNFUJu2wEAAICcI0QnFYdoqtEAAAB5jxCdFCEaAAAAEUJ0UoRoAAAARAjRScUhmrGiAQAA8h4hOqnCQr+lEg0AAJD3CNFJ0Z0DAAAAEUJ0UoRoAAAARAjRSRGiAQAAECFEJ0WIBgAAQIQQnRQhGgAAABFCdFKEaAAAAEQI0UkxTjQAAAAihOikGCcaAAAAEUJ0UnTnAAAAQIQQnRQhGgAAABFCdFKEaAAAAEQI0UkRogEAABAhRCdFiAYAAECEEJ0UIRoAAAARQnRShGgAAABECNFJxeNEc7EVAACAvEeITopKNAAAACKE6KQI0QAAAIgQopMiRAMAACBCiE6KEA0AAIAIITopQjQAAAAihOikCNEAAACIEKKTIkQDAAAgQohOinGiAQAAECFEJ0UlGgAAAJGshWgzKzOz183sbTObY2bfa2WZUjN7yMwWmtlrZjYkW+3ZY4RoAAAARLJZid4i6eQQwhhJYyWdbmbHtFjmCkmfhBAOlfQjST/MYnv2DCEaAAAAkayF6OA2Rg+Loym0WOxsSfdF9x+RdIqZWbbatEcI0QAAAIhktU+0mRWa2VuSVkt6NoTwWotFBkhaKkkhhEZJtZJ6ZbNNu40QDQAAgEhWQ3QIoSmEMFbSQElHmdnI3VmPmV1lZjPMbEZNTU3nNjIpQjQAAAAie2V0jhDCeknTJZ3e4qnlkgZJkpkVSeohaW0rr787hDAxhDCxT58+2W5u6wjRAAAAiGRzdI4+ZlYd3S+XdKqkeS0We0LSV6L7X5L0fAihZb/proFxogEAABApyuK6+0m6z8wK5WH94RDC783s+5JmhBCekHSvpPvNbKGkdZIuymJ79gyVaAAAAESyFqJDCLMljWtl/k3N7m+WdH622tCpCNEAAACIcMXCpAjRAAAAiBCikyJEAwAAIEKITooQDQAAgAghOilCNAAAACKE6KQI0QAAAIgQopNinGgAAABECNFJUYkGAABAhBCdFCEaAAAAEUJ0UoRoAAAARAjRSRGiAQAAECFEJ0WIBgAAQIQQnRQhGgAAABFCdFKEaAAAAEQI0UkxTjQAAAAihOikqEQDAAAgQohOihANAACACCE6KUI0AAAAIoTopAjRAAAAiBCikyJEAwAAIEKITooQDQAAgAghOikzvyVEAwAA5D1CdFJmXo1mnGgAAIC8R4hOo6CASjQAAAAI0akQogEAACBCdDqEaAAAAIgQnQ4hGgAAACJEp0OIBgAAgAjR6RCiAQAAIEJ0OoRoAAAAiBCdTmEh40QDAACAEJ0KlWgAAACIEJ0OIRoAAAAiRKdDiAYAAIAI0ekQogEAACBCdDqEaAAAAIgQnQ4hGgAAACJEp0OIBgAAgAjR6TBONAAAAESITodKNAAAAESITocQDQAAABGi0yFEAwAAQITodAjRAAAAECE6HUI0AAAARIhOhxANAAAAEaLTIUQDAABAhOh0GCcaAAAAIkSnQyUaAAAAIkSnQ4gGAACACNHpEKIBAAAgQnQ6hGgAAACIEJ0OIRoAAAAiRKdDiAYAAIAI0ekQogEAACBCdDoFBYwTDQAAAEJ0KoWFVKIBAABAiE6F7hwAAABQFkO0mQ0ys+lm9p6ZzTGzb7eyzIlmVmtmb0XTTdlqT6cgRAMAAEBSURbX3SjpH0IIs8ysStJMM3s2hPBei+VeCiF8Povt6DyEaAAAACiLlegQwsoQwqzofp2kuZIGZOv99gpCNAAAALSX+kSb2RBJ4yS91srTk8zsbTN7ysxG7I327DZCNAAAAJTd7hySJDOrlPSopGtCCBtaPD1L0uAQwkYzmyzpd5IOa2UdV0m6SpIOPvjgLLe4HYRoAAAAKMuVaDMrlgfoB0IIj7V8PoSwIYSwMbo/TVKxmfVuZbm7QwgTQwgT+/Tpk80mt48QDQAAAGV3dA6TdK+kuSGE29pYpm+0nMzsqKg9a7PVpj1WWMjFVgAAAJDV7hzHSbpU0jtm9lY070ZJB0tSCOEuSV+SdLWZNUpqkHRRCCFksU17hko0AAAAlDBEm9lxkt4KIWwysymSxkv6cQhhSVuvCSG8LMnaW28I4Q5Jd6Rob24RogEAAKDk3TnulFRvZmMk/YOkDyT9Jmut6qoI0QAAAFDyEN0YdbM4W9IdIYSfSqrKXrO6KEI0AAAAlLxPdJ2Z3SBpiqQTzKxAUnH2mtVFEaIBAACg5JXoCyVtkXRFCOFjSQMl/Z+staqrIkQDAABAKSrR8hMJm8zscElHSJqavWZ1UYRoAAAAKHkl+kVJpWY2QNIf5EPX/TpbjeqyGCcaAAAASh6iLYRQL+lcST8LIZwvaWT2mtVFUYkGAACAUoRoM5sk6cuSnkz52v0HIRoAAABKHoSvkXSDpMdDCHPM7FOSpmevWV0UIRoAAABKeGJhCOFPkv5kZpVmVhlCWCTpW9ltWhdEiAYAAIASVqLNbJSZvSlpjqT3zGymmY3IbtO6IEI0AAAAlLw7x88l/X0IYXAI4WD5pb/vyV6zuihCNAAAAJQ8RHcLIezoAx1CeEFSt6y0qCsjRAMAAEDJL7ayyMz+RdL90eMpkhZlp0ldGONEAwAAQMkr0V+V1EfSY9HUJ5qXX6hEAwAAQMlH5/hE+TgaR0sFBVIIPpnlujUAAADIkXZDtJn9j6TQ1vMhhLM6vUVdWUFUuCdEAwAA5LWOKtG37pVW7CviEL19e+Y+AAAA8k67ITq6yMpOzGx8CGFW9prUhTUP0QAAAMhbu1NO/UWnt2JfQYgGAACAdi9E529nYEI0AAAAtHsh+nud3op9RWGh3zJWNAAAQF5rN0Sb2ZRm94+TpBDC76LH38xu07ogKtEAAABQx5Xov292/79aPJefF1uRCNEAAAB5rqMQbW3cb+3x/o8QDQAAAHUcokMb91t7vP8jRAMAAEAdX2zlCDObLa86HxLdV/T4U1ltWVdEiAYAAIA6DtHD90or9hWEaAAAAKjjKxYuaf7YzHpJOkHSRyGEmdlsWJdEiAYAAIA6HuLu92Y2MrrfT9K78lE57jeza/ZC+7oWxokGAACAOj6xcGgI4d3o/uWSng0hfEHS0WKIOwAAAOSpjkL0tmb3T5E0TZJCCHWS8i9JEqIBAACgjk8sXGpmfydpmaTxkp6WJDMrl1Sc5bZ1PYRoAAAAqONK9BWSRki6TNKFIYT10fxjJP0qi+3qmgjRAAAAUMejc6yW9DetzJ8uaXq2GtVlEaIBAACgDkK0mT3R3vMhhLM6tzldHCEaAAAA6rhP9CRJSyVNlfSa/EqF+YsQDQAAAHUcovtKOlXSxZIukfSkpKkhhDnZbliXxDjRAAAAUAcnFoYQmkIIT4cQviI/mXChpBfM7Jt7pXVdDZVoAAAAqONKtMysVNKZ8mr0EEk/kfR4dpvVRRGiAQAAoI5PLPyNpJHyi6x8r9nVC/MTIRoAAADquBI9RdImSd+W9C2zHecVmqQQQuiexbZ1PYRoAAAAqONxoju6GEt+IUQDAABAHV+xEM0RogEAACBCdDqEaAAAAIgQnQ7jRAMAAECE6HSoRAMAAECE6HQI0QAAABAhOh1CNAAAAESITocQDQAAABGi0yFEAwAAQITodAjRAAAAECE6HUI0AAAARIhOh3GiAQAAIEJ0OlSiAQAAIEJ0OoRoAAAAiBCdDiEaAAAAymKINrNBZjbdzN4zszlm9u1WljEz+4mZLTSz2WY2Plvt6RSEaAAAAEgqyuK6GyX9QwhhlplVSZppZs+GEN5rtswZkg6LpqMl3Rnddk2EaAAAACiLlegQwsoQwqzofp2kuZIGtFjsbEm/Ce5VSdVm1i9bbdpjhGgAAABoL/WJNrMhksZJeq3FUwMkLW32eJl2DdpdByEaAAAA2gsh2swqJT0q6ZoQwobdXMdVZjbDzGbU1NR0bgPTYJxoAAAAKMsh2syK5QH6gRDCY60sslzSoGaPB0bzdhJCuDuEMDGEMLFPnz7ZaWwSVKIBAACg7I7OYZLulTQ3hHBbG4s9Iemvo1E6jpFUG0JYma027TFCNAAAAJTd0TmOk3SppHfM7K1o3o2SDpakEMJdkqZJmixpoaR6SZdnsT17jhANAAAAZTFEhxBelmQdLBMkfSNbbeh0hGgAAACIKxamQ4gGAACACNHpEKIBAAAgQnQ6hGgAAACIEJ1OPE40IRoAACCvEaLTiCvRXGwFAAAgrxGi06A7BwAAAESITocQDQAAABGi0yFEAwAAQITodAjRAAAAECE6HUI0AAAARIhOx6KrmBOiAQAA8hohOq3CQkI0AABAniNEp1VQwDjRAAAAeY4QnVZBAZVoAACAPEeITosQDQAAkPcI0WkRogEAAPIeITotQjQAAEDeI0SnRYgGAADIe4TotAjRAAAAeY8QnRbjRAMAAOQ9QnRajBMNAACQ9wjRadGdAwAAIO8RotMiRAMAAOQ9QnRahGgAAIC8R4hOixANAACQ9wjRaRGiAQAA8h4hOi1CNAAAQN4jRKfFONEAAAB5jxCdFuNEAwAA5D1CdFp05wAAAMh7hOi0CNEAAAB5jxCdFiEaAAAg7xGi0yJEAwAA5D1CdFqEaAAAgLxHiE6LEA0AAJD3CNFpMU40AABA3iNEp8U40QAAAHmPEJ0W3TkAAADyHiE6LUI0AABA3iNEp0WIBgAAyHuE6LQI0QAAAHmPEJ0WIRoAACDvEaLTIkQDAADkPUJ0WowTDQAAkPcI0WkxTjQAAEDeI0SnRXcOAACAvEeITosQDQAAkPcI0WkRogEAAPIeITotQjQAAEDeI0SnRYgGAADIe4TotAjRAAAAeY8QnRbjRAMAAOQ9QnRajBMNAACQ9wjRadGdAwAAIO8RotMiRAMAAOQ9QnRahGgAAIC8R4hOixANAACQ97IWos3sl2a22szebeP5E82s1szeiqabstWWTkWIBgAAyHtFWVz3ryXdIek37SzzUgjh81lsQ+cjRAMAAOS9rFWiQwgvSlqXrfXnDONEAwAA5L1c94meZGZvm9lTZjYix21JhnGiAQAA8l42u3N0ZJakwSGEjWY2WdLvJB3W2oJmdpWkqyTp4IMP3nstbA3dOQAAAPJezirRIYQNIYSN0f1pkorNrHcby94dQpgYQpjYp0+fvdrOXRCiAQAA8l7OQrSZ9TUzi+4fFbVlba7akxghGgAAIO9lrTuHmU2VdKKk3ma2TNLNkoolKYRwl6QvSbrazBolNUi6KIQQstWeTkOIBgAAyHtZC9EhhIs7eP4O+RB4+xZCNAAAQN7L9egc+x5CNAAAQN4jRKfFONEAAAB5jxCdFuNEAwAA5D1CdFp05wAAAMh7hOi0CqJNtg8MJAIAAIDsIESnFYdoqtEAAAB5ixCdFiEaAAAg7xGi0yJEAwAA5D1CdFqEaAAAgLxHiE6rsNBvCdEAAAB5ixCdVlyJZqxoAACAvEWITovuHAAAAHmPEJ0WIRoAACDvEaLTIkQDAADkPUJ0WoRoAACAvEeITosQDQAAkPcI0WkRogEAAPIeITotxokGAADIe4TotKhEAwAA5D1CdFpcbAUAACDvEaLTohINAACQ9wjRaRGiAQAA8h4hOi1CNAAAQN4jRKdFiAYAAMh7hOi0CNEAAAB5jxCdFuNEAwAA5D1CdFpUogEAAPIeITotxokGAADIe4TotKhEAwAA5D1CdFqEaAAAgLxHiE6LEA0AAJD3CNFpEaIBAADyHiE6LUI0AABA3iNEp0WIBgAAyHuE6LS42AoAAEDeI0SnxTjRAAAAeY8QnRbdOQAAAPIeITotQiBZTpEAACAASURBVDQAAEDeI0SnRYgGAADIe4TotAjRAAAAeY8QnRYhGgAAIO8RotMiRAMAAOQ9QnRajBMNAACQ9wjRaTFONAAAQN4jRKdFdw4AAIC8R4hOixANAAD2I1u35roF+yZCdFqEaAAAsB9Ys0b65jelbt2kr39damjo+DVNTdJvfys995wUwu6/dwjSggXSSy9JdXWZ+QsXSt/9rnTeedKPfyy9//6evU82FeW6AfscQjQAAF1aY6M0fbo0dar0wQfS8OHSyJHSYYdJPXtKPXpIffv6bRIhSKtWSQcemIkBaaxdKz39tE9//KN05JHST38qDRvW9mu2bpXefFN65RV//0sukQ46yJ97/33pO9+R3npLmjLFA/BBB0kzZkh33eWv+7u/k77ylV3bG4L04YfSo49KP/iBtHGjdMop0t13+3s9/LB0xBGtt2nuXOnyy6XXXvPHn/mM9O//Lk2a1PryIXhIvu02D8eHHCIdeqhUXy8984y3Q/I2jhwplZZKb7whmUkDB0qPPebPDxkiPfusv7YrsdBV430bJk6cGGbMmJG7Bixa5HvBffdJf/3XuWsHAGC/EoKHh7RWrJD+1//yoHH99R4S21NTI/3rv0pLl0pnnil94QseDpOqq/Ow9tFH0rJl0vr10uGHS6NHexDq1q3t1374oYe0k0+WPv3pXZ9vaPBw9cQTHoQHDdp1qq5ufTt99JH0pz9JL7wg/f730urVUlWVNGKEh85PPtl5+YIC6bjjpC9+0W8XL5bee09avtzD7ZgxHrSfeMIrr3PnelA97TTpc5+Txo3zUFdS4q999FEPyYcdJl1wgXT88dKcOdKPfuSv37pV6t1bOukkD4T19dINN0hXXCFt2eKffeFC6S9/8TA7Y4bPjxUVeVt79JB+/WupvFyaMME/c3GxR5N583z7Dxni7z1unPQv/+Lrnj/f5/3lL77PSNIZZ0i33uqh/umnpUsv9XYNH+7bp7BQOuAA/9wlJf6+lZXS7bf7z/1f/9W/XPTu7T+TggJf/pBDfHrlFen11/35SZP85//BB77cySf7dhw82IPza6/5z+hLX5IuvthD9Icf+v7w/PPS//2/3oa9zcxmhhAmtvocITqlxYuloUOlX/1Kuuyy3LUDANBlbNkiffyxtHJlZjKTLrqo41C7fLmHkfvu8xB6+uk+TZzYcWj4/e/9X9GmTd6G6mqvUJ5xhs/buNEDVu/e3o6pU/1QeV2d1K+fh2AzD3xTpniA6d7dQ8v990vvvONB8YgjPLQ995z08svStm2ZNpSU7NyndsCATMXx0EP9fo8e0i9/KT3ySOZA7iWXSP/2bx6onnkmU6ndtMnbWlXl26blYFjV1d6eYcP8tfPne7eA1av9+Z49pc9+1rf95MlSWZl/QVm50gNcba1P8+dLv/udNHt2Zt1mUq9e3s2hueOP93XNni394Q9eWZY82Pbr519IJA+fixd7aK2u9qBZUeE/o8su89BbUODB8+//3sN1SyUlvtyxx3rwnDRJ2rDBK8X33ec/u6uvlv75n/3Lz4IFXtV+803pwgv951hVJT34oHTddZm2mXm4PuYY/zwnnOBfMJpbvtxD9+rVvt0bG6V163zfXrNGOuss6Y47MhXxTZukn//cw38I/rOtqfHt/MEHUv/+/gXvK1/x7SD5MiFkRgzu6gjRe2jdOulnP/NvRoeULJUOPli6917pq1/dq+0AgHxTWyv9+c/SX/2Vh7s0PvrIQ1JBgQe7AQN8HeXlO09FRR4K163zcLRsmQehDz/ceWps9BAyZIhX+5oH5pZVzlhFhf+ruPxyf4+aGl92yxZ/PH++H35vbPQA9OGH0quvetAoK/Nq7THHeAguKvKpocFD1aJFHpTGjvXbLVs8ND39dPvb5bTTvDo6fLj09tu+jR56yKuYJSUe/lav9uB79NGZ6uH27V5tPv10ryIecohv09JSackSD5jvvuuBKp4+/jjzvj16SH/zNx4m77/fD/Fv25YJyf37e0g77zzvJlBc7M99/LH/LJcu9WnhQq8sx31lDzvMK+GjRkknnuhfRNJ0uVi0yAPoIYd4MC8v9/1g9mx/35NO8n/7saamzGedO9dfP2GCt/tTn/JgOW2a9OSTvo2vvNKrs6156SXf7mVl/r4DB3r1uLS09eU3b/afc9JuKA0N/qWnf3//fGVlybdLS2mPlOzukZWuhhC9h1as8F+gf/gH6YffWu57+d13+28GAOSZ7dv9n2N7/yDXrfNgMnu2T8uXe1BsavJK3/nne2Dq1s3Xt3ChV+fGjvUqWlOTHzq+8UYPdN26eWXx0ks9CG/d6hW5d97x95k3z6tyhx7qlcGnn/bD+kkUFXnbWiou9r/9Q4f6VFjoYTGuNPbr54f7+/Xbderb1z/P7bd7tbF55bY5M68cfve7HsAkD9nPP+9fHv7yF2nWrF1fX1joQeorX/E+qc1D15//7GGzstK329atXkVcs8ZD3amn7vqzC8Hf5/77/UvB+edLn/98JnRt2eLhsK0w2JaNGz1kLl/uX4SqqjLPLV0q/dd/eVXzc5/zquj+ELqwfyFEd4LzzpNefFFa9sZKlQ7t76WDr399r7cD+WHbNv/Hzj+Uri8OCHV1PvXq5YFr0CB/vqbGp4aGzOHOAQO8mpn257t9u1flNm70ads2D44HHZQ5VNrea2fP9tA5Z463+6CDPHQOHerBbckSr8L27evVvCOP9Irn/Pk+vf9+5tB5CFKfPv7+8dSnjwe1V1/15WJ9+3q/x+Ji368XLPBt1q2bVw/nzMmcnR+fYBSCt/XYY/1w8FNPebW1vn7Xz9avnwewmhoP45s2eXXy0ku9y0Blpb9f/HNqaPCKXkNDZior859dr15etRs61G8745DzihV+klt1tW+jAw7w0Ftc7NugebBsTWOjh9jGRp/iCjp/H4DsI0R3guee82/vD/ysVpf8bbX377j66r3eDuzfli2TbrlF+sUvPEwMGuQHPuJDzbW1Xqm74ALp7LPbP7zd2Oj77YoVHpbisHXggZnq0ubNXnX66KNMOFqzxkNH9+4+VVX5bXyyyNCh3rY33vD+mK+95sFj4EAPM6Wl/nwIXmlatMgPB3/yiQeyujo/ND1kiAervn39cRxg4vslJV6NbGry9+7bt+PQEII0c6ZXIJct8/c3k845x7dXZaVvw+nTvXoZH24sK/MQNm6cf47m77NsmR+WffllrxROmuQh76mnvFdXfJZ6S2btD8vUp48fKu/ZM3OYuqnJf+YHH7zz7aZNXll96imvbramtNTfM+4GcMwxfhh6xAg/iemxxzxESh7eBg/2Cu+GDbu2a+3aXQcgKirKHO4+7DAPlzU1vo7Vq/3+qlW+rxxzjE8TJ/rh/5YnrW3f7oexH3jAK8ijR0vjx/v++cYbfjLSqlXSP/2Td6OLfx4bNvjPNgTfP8rLvbIa98+M94F163yfIWQC2FOE6E6wfbv/8+jba5teeq3Ee9Z/4xt7vR3Y1Zo1flLO5Mne12931dX50YamJq9+9e7t85uavLq1ZYufzNL8RJ/4kHLPnsn74NXWZg4db9vmfQ3ff98P295/v+9rl17qISA++72kxB936+ZnYi9d6qGpd29fx7ZtHv6OOsr7UM6f7wGlrcDVvbuHonXrdp5fWuohauNGDyytjeRYWOhVz7o6/8xjxvjyS5d6KG+uuNjD8tCh3tbu3T3I1tT4IfHFiz2AJRmbtHt33/79+2cqcT17ZoLm4sV+vu877/jylZU+f8MGD4/l5f762bMzfTBbC7o9enjFsFs3X+79931+a+FyxAjv7zp6tH/Z6NbN98cPP/T2FBVlKrQVFb69zPyLxauv+lRfnxl1oLDQt2P8c2/exaBnT9+//+qvMiddFRT4tvz440yfXDP/zC++6CMNSB6qTz/dv0x8+tNefS4u9s8e/ywOOMDbUFrqP8d58/z1PXr4374hQ/zzAEA+IUR3kltvla69VnpHIzXyJ1/3QRjRqmXLPGQNH9456wvB/9HPmeNDEcVnu7/9tg/5s3ixPz77bD9ZJe5b2NJHH/nZ6Q895NW9gQP90PrixV79ah5ahg3z4Pbuu5mQV1LiwalPHw/WS5Z40Cou9krpQQd56OjRw0PJgAH+HmVlXsl8/nmv9ramtNT7N95wgweWtmzf7tXPxx7zEBwfIl+0yIcSWrvW533+876+0aMz1cJVq3xavdqD94ABHkoHDvTqYhzk4m0en8C0YYOHw4ULM9XqE07wfoxxH8kQfLnmJwr17p3scHhDg7d7zZrMbdylpaDA2ztvnk+rVmUOwa9du/MQUEcd5aH2vPO8mh1XZv/yF+8KMGeOh9BTT/VKafyFaONGD9dvvZXpWlBf7/vDscf6thw+3Jd7/XXf74491ivJ2ap2NjX5Z43PrJ8wIX2IXbXKg/CnP+1fKgAA6eQkRJvZLyV9XtLqEMLIVp43ST+WNFlSvaTLQgizOlpvLkP0mjXSwIFBX9vyU91xe5P07W/npB2dbc0ar7y19g86Dq+zZnngGDbMA+rq1T5g/B//6EHjqKM8UKxc6edcPv20v/boo/1s7JNO8upc3LcynurqPBSfcopXCWfM8MO8773nbYq7Hrz2mgdzyR+fe64fKv7Od3y5hx7yk2n+9//O9BPdutWnkhKvApaUZPppTprkwXHZMp8OPNCD1amnegD98599qq/3EDpmjL/vm2/6tG6dV/MOO8zD2qpV/tlXrcoMn7R2rW+n+Fese3c/4/zYYzPjqBYUeJV22DA/vL6nlb745xWH+P1dCL7/Ll3q27S9CxcAAJBWrkL0CZI2SvpNGyF6sqS/k4fooyX9OIRwdEfrzfU40Zde0qj/nlqvFUefq8o/PCZ1764Q/HDqkiWZw7cHHugVqvhkoy1bMmMjbt++6xSCH24++GAPQPE658zxgLBtW+akkngqLPRAe+ihXlFcvdr7v65Z46Gxe3cPjh984MPwLFniQfW44zwYPvWU97194QWvGJ5zjofTzZs9zMZTPB5mrPmZ7L17+3vEA7dLXtm84goPcXff7e/dXFlZZkiisjJ//7ivpuSfZ+xY326rVnn1b8IEH9dy2DDp8ce9q0JtrYfhRx/1vriSt+M//9PH5iwp8UC8bZuH4fp6D8OXXNJ2pbqzbd3q4XrDBq9kcjgcAIB9R866c5jZEEm/byNE/1zSCyGEqdHj9yWdGEJY2d46cx2i4/FKK7RJ3Yq2qOygHqpZW7hLX9A90b27B+S2xh3dHYWFmYH1mzvkEOnLX/ZD9E884V0c4uVHjfJq78SJHmLj/qHz5mUGsx8zxqupy5Z5tbiiwqu5cViML/k5Z46H48MP9y4DzfsPh5A5qW3ChEwgbk9Dg1eEJ0xoezxNAACAPdFeiM5lXWyApKXNHi+L5rUbonPt2GP9nMIPnl2lhv95TpvrD1Svz43U0EMLNXhYmbaXlGnVumKtXlckKyxQVQ+fSkulgu2NKghNKiguUEFpiQoKTQUFmZON6uoyJxVt3erDS40c6Yf7S0oyA+3H09atXmVesMCrnQce6BXpPn08ZMZDOQ0Z4gG2tNSD+SuveAA99ljvXhAH2oYGP2mtutrDcXn5rp//6DaOFQwc6FNLZt539oQT2t6mZl5hTnMovrzc2w8AAJALuaxE/17SLSGEl6PHf5R0XQhhlzKzmV0l6SpJOvjggycsWbIka21O5Y9/9DPZ4vJtWhUV3pGzoqL9+1VV3jciHgOs+f1evfbsEkQAAABoVVetRC+XNKjZ44HRvF2EEO6WdLfk3Tmy37SETjnFS8cffJAZ/qChwTsMN+/EvG2bl1uLizPXMa2v9/Add9Ztfn/tWl9vPD8uKbelvHznUN0yZPfr5x2V+/XzU/Tj64u2VmoGAABAh3IZop+Q9E0ze1B+YmFtR/2hu6SePb3TcLY1NPiQEGvX7jy1Nu/ddzPPxWONtaay0vt/tJwGDswMXFtV5X1HCguTD4QMAACwn8taiDazqZJOlNTbzJZJullSsSSFEO6SNE0+MsdC+RB3l2erLfuF8vJMyE1q+3YfwmLlSh+24uOPvbq9ebPffvxx5lq4f/qTL9N8oOSWevbMXGauZ08P1oWFHsZ79951qqrKhO/4tqDAP0vzK5YAAADsY7IWokMIF3fwfJDEJf+yqaDAw27Pnn6WYke2b/cuKfH1kuMuJU1N3iUlvrTZggUezpuaPHTHXVGSMvNq96GH+ph+FRUerEtLfaiO+FrM1dWZ9h9wwM7348u1AQAA5ACj1iKjoMAvu9e3b/ouKnFf7jVrMlNdXWYg7KamzP0NG7wf+cKFfnJmfPm5rVs9PBcUZAJ6e22trvbwXVbW+lRa6reDBvnYeocf7pcUrKryKb7usZTpsgIAAJAAIRqdIx5JZNCgjpdNIr7m9Lp1Pi7fJ5/sen/9el9m82aftmzJ3F+/3h/X10uPPOIBvSNlZT4qSs+efhJm374e1IuLM5c9jE/aPPBAr6IPHuyBHAAA5BVCNLoms0wwb20A6jQaG33w7fnzMxXyurpMpdvMQ/amTT6tW+f9yGfP9jC+bVvmsoetnahZWelXyKmszFS5W049emRGSYn7th9wgL83AADY5xCisf8rKvLrfO/ptb5D8K4o69b5SZkffeTXUl+5MhPM42np0p0ftzZEYWlpJlTHQxAWFHi4b2rKDE/Yr5+H9Hjs8Oa3lZUEcQAAcoAQDSRl5hXlHj38MpKTJiV/7ZYtmVFSli/P3Mb333xTeuopX7aoyN9r/XrvQ96eysrMF4TiYn9Nba1Xv5v3A4+7pMRjlRcXe4ivrPQwXlXlXVeK+JMAAEAS/McE9obSUh8ecMiQ5K9pbPTRUlaulDZu3PXiPJs2+UgqH3wgvf++B+7qap9qa6WpUz1Up1FVtfNoKNXVXgXv3t2fi2/Lyrxv+po1Xp0/4IDMSal9+3pwP+ggKuUAgP0WIRroqoqKvJtH//679/oQMiOlxP26m0+bN2fCeF3dridvfvKJ9yOPu6Rs2LBrn/DiYg/V7VXN464nIfjrm5oylfCyMg/bgwf7VFmZaXt9vX8ZqK315eNwXlXlwTweySW+X16eCe/V1ZnhEktK/AsBQyICADoRIRrYX5lJffr41BlC8OC9YYP38Y7H6zbzYLxmjfcVX7Uqc1tXl6mgm2WGEty2LTN6ysqV0uuvS48+6vNjRUWZ7jNbt/r6mj+fRlFRJlxv3ervHfc779PHb8vKMl1eSkp2nkpLfX59vX/++nrvq37ooT517+6fK/588W2PHv46AMB+hxANIJm42ltevutzhYWZKvDuiscRb77O5l1BQvCK98aNmSpzCP6aEDysr17tYbu2NlOh3rLF561a5dX1uAJu5mOb19R4t5itW3edtmzZtcJeWurbIElXGTO/eme/fv66lkcDGhu9Qt7aSaOt3VZU+Hriin4IrYf+5lPz50tL/WfUvXtm227b5kcgysszX4oAAB0iRAPoGuLLwrfFLHPVyr2pqcnD9LZtHmLjyvKmTX7BoA8+yAx/GE+NjT7FwyWuXOmhvPmJnfHU1LRzf/eaGh/1JZ6X9oqgSZSX+1jndXXexlhh4a5XCS0tzVTWq6v9dX36eNvjsdkbGzNfaqTM/epq6YgjpOHDPbgvWeJTfb2PKT9okL9Hba1/Kdm82av3cX98qvgAujBCNAC0p7DQw3NL3bpJY8b4lG3xxYfq6z24FhZmrrAZV7WbV9DbetzQ4BX5FSu8at+jh4fiXr38ueb94det80r91q2ZLwW1tR7yWxsvPRviix/16JHp/y5lbgsLPdD37euf4ZNPMp+tf3/pyCM9xBcXZy7MVFLi6+3WzT/bhg0+lZVlzkEoL88cKTDb+Qqoze+XltLXHshjhGgA6OqaX3wo17Zv94Dd2JgJkvGwjC2nmhpp7lyfNm3KXOWzosK70Cxd6sG3utrDclmZB/XmYf6TTzzkxt1qmle8Gxsz71FT49Xz+GqjH30kPftssquV7oni4p3DdUVFpi9/dXXmtqrKQ39BgQf5vn19jPh+/fy1cbU/7kbU2Jj5gkMXG6BLIkQDAJIrKPB+3knEJ7aecMKuz40Y0bntak1jo3cf2b49059/27bMkJHFxR5yu3f3Kv+KFT5t2ZLpbrN9e6bbSnzb/H7LeZs2ZUaVWb4801Vld7vklJd72O7Z04N4ZaX/DOI+8X36+DjxQ4dmKvYtp6qqzOg2JSWZ0XniLwAAdgshGgCwfyoqkg45JNmyVVUeMseNy05b4hNn4z72K1d6yF61KlN5bmzMnABaWOjPLV3qVfvaWu/Dvnath+e4O8/Mmb6upAoLd+6OU1npFe+4/3vL7iqtzSsr8+4wgwd7gB84cOdzB4qK/HOUle18EiuwnyFEAwCQbXFVuKjIg2j37tKwYZ2z7oYGafFir4LHYb15aN+wYecTXMvLPeBu3erdYGpqvFoeV9Xr6lqvtm/Z4lPcnSaJ5n3N424t3br5kJhLl3rlv7w8c9QiPnG1Tx8P4vGJuiUlmSusxp+5oWHnq7NWV3fO9gQSIkQDALAvKy/3EVD2hhC82rxhgwf3RYs8CMdfDoqLPfTGJ7J+/LFX3Fes8Ip6PHZ8r14+OsvYsR7QV6/259980+/vzpjwcQW/oCBz8m1r91ubV1yc6ZvffHSa5lP37t7WjRu9e0737t61qVcvv215hda4Kk8lfr9FiAYAAMmYeVW4d2+fJk7s/PcIwUN6HELjEy43bfIAK2X6uK9f71dWnT8/M3JMXIGPb9u633ze1q2+rnnzMqPTbNmSrt3FxR6ot23zLwtbt+5ciS8t9S8YTU3+Odav96l7dz9HYMQI36b19f4FZNu2TNgvKfHlunf3gD9ggHej6dVr52E141FlJH+uW7dMiI+/AJWUdN7PKs8RogEAQNdh5idJttSr167z+vb1YQyzoeWwjxs2eHCvrPTbDRu8j/ratd49Jb5fXJzptrJhQ+aE1c2bM1XvQYOkUaO8+r1unTRnjvT885ngXl7uXyDisJ+2G02stNS3ZTzmfAi+zQ49VBoyxOevXethvrg48+WksdHfc+tW/yxxF5v4s1dUeNvjL1OlpZkvJcXFu14kqrzcq/6bN/t2jbfRPo4QDQAA0FIcKPv33zvvF5+cWVa26/jj8VVZN2zwwL58uU/r1nnYbj7Fo8rEY72vX+9BtqrKn1+8WFqwQHrpJQ/FvXr5SaKNjR6qa2sz3XMqK72qPmuWv+/GjbvX1aY11dX+vgceuPPnbO3Lgpn0wAM7L9sFEKIBAAByra0LO0keIisrferfXxo9eu+2rbk4bK9f78G6psaDddy/vLFx56uwxvfjoSYrKrwiHV/BdM2azNjy8Wdt3o+8+ZVQuxhCNAAAAJIpKsr0zz744Fy3Jqe4XikAAACQEiEaAAAASIkQDQAAAKREiAYAAABSIkQDAAAAKRGiAQAAgJQI0QAAAEBKhGgAAAAgJUI0AAAAkBIhGgAAAEiJEA0AAACkRIgGAAAAUiJEAwAAACkRogEAAICUCNEAAABASoRoAAAAICVCNAAAAJASIRoAAABIyUIIuW5DKmZWI2lJjt6+t6Q1OXrvfRHbKz22WTpsr/TYZumwvdJhe6XHNktnb2+vwSGEPq09sc+F6FwysxkhhIm5bse+gu2VHtssHbZXemyzdNhe6bC90mObpdOVthfdOQAAAICUCNEAAABASoTodO7OdQP2MWyv9Nhm6bC90mObpcP2SoftlR7bLJ0us73oEw0AAACkRCUaAAAASIkQnYCZnW5m75vZQjO7Ptft6WrMbJCZTTez98xsjpl9O5r/XTNbbmZvRdPkXLe1KzGzxWb2TrRtZkTzDjCzZ81sQXTbM9ft7ArMbFiz/egtM9tgZtewj+3MzH5pZqvN7N1m81rdp8z9JPq7NtvMxueu5bnTxjb7P2Y2L9ouj5tZdTR/iJk1NNvf7spdy3Ojje3V5u+hmd0Q7WPvm9nnctPq3Gpjmz3UbHstNrO3ovnsY21nii73t4zuHB0ws0JJ8yWdKmmZpDckXRxCeC+nDetCzKyfpH4hhFlmViVppqQvSrpA0sYQwq05bWAXZWaLJU0MIaxpNu8/JK0LIdwSfWHrGUK4Lldt7Iqi38nlko6WdLnYx3YwsxMkbZT0mxDCyGheq/tUFHT+TtJk+bb8cQjh6Fy1PVfa2GanSXo+hNBoZj+UpGibDZH0+3i5fNTG9vquWvk9NLMjJU2VdJSk/pKek3R4CKFprzY6x1rbZi2e/09JtSGE77OPtZspLlMX+1tGJbpjR0laGEJYFELYKulBSWfnuE1dSghhZQhhVnS/TtJcSQNy26p91tmS7ovu3yf/w4GdnSLpgxBCri661GWFEF6UtK7F7Lb2qbPl/9RDCOFVSdXRP6+80to2CyH8IYTQGD18VdLAvd6wLqqNfawtZ0t6MISwJYTwoaSF8v+peaW9bWZmJi84Td2rjerC2skUXe5vGSG6YwMkLW32eJkIiG2KvkWPk/RaNOub0eGVX9I1YRdB0h/MbKaZXRXNOyiEsDK6/7Gkg3LTtC7tIu38D4d9rH1t7VP8bUvmq5KeavZ4qJm9aWZ/MrPjc9WoLqi130P2sY4dL2lVCGFBs3nsY5EWmaLL/S0jRKPTmFmlpEclXRNC2CDpTkmHSBoraaWk/8xh87qivwohjJd0hqRvRIf8dgje14r+Vs2YWYmksyT9v2gW+1gK7FPpmNk/S2qU9EA0a6Wkg0MI4yT9vaTfmln3XLWvC+H3cPddrJ2LAuxjkVYyxQ5d5W8ZIbpjyyUNavZ4YDQPzZhZsXxnfyCE8JgkhRBWhRCaQgjbJd2jPDyM154QwvLodrWkx+XbZ1V8GCq6XZ27FnZJZ0iaFUJYJbGPJdTWPsXftnaY2WWSPi/py9E/bEXdEtZG92dK+kDS4Tlr6ofSQAAAA/RJREFUZBfRzu8h+1g7zKxI0rmSHornsY+51jKFuuDfMkJ0x96QdJiZDY2qYBdJeiLHbepSoj5d90qaG0K4rdn85n2SzpH0bsvX5isz6xadMCEz6ybpNPn2eULSV6LFviLpv3PTwi5rp6oN+1gibe1TT0j66+jM9mPkJzatbG0F+cbMTpf0T5LOCiHUN5vfJzqxVWb2KUmHSVqUm1Z2He38Hj4h6SIzKzWzofLt9frebl8X9llJ80IIy+IZ7GNtZwp1wb9lRXvjTfZl0dnZ35T0jKRCSb8MIczJcbO6muMkXSrpnXiYHkk3SrrYzMbKD7kslvT13DSvSzpI0uP+t0JFkn4bQnjazN6Q9LCZXSFpifyEE2jHl41TtfN+9B/sYxlmNlXSiZJ6m9kySTdLukWt71PT5GezL5RULx/pJO+0sc1ukFQq6dnod/TVEMLfSDpB0vfNbJuk7ZL+JoSQ9CS7/UIb2+vE1n4PQwhzzOxhSe/Ju8V8I99G5pBa32YhhHu16/kdEvuY1Ham6HJ/yxjiDgAAAEiJ7hwAAABASoRoAAAAICVCNAAAAJASIRoAAABIiRANAAAApESIBoB9kJn9s5nNiS61/JaZHW1m15hZRa7bBgD5gCHuAGAfY2aTJN0m6cQQwhYz6y2pRNJfJE0MIazJaQMBIA9QiQaAfU8/SWtCCFskKQrNX5LUX9J0M5suSWZ2mpm9YmazzOz/mVllNH+xmf2Hmb1jZq+b2aHR/PPN7F0ze9vMXszNRwOAfQOVaADYx0Rh+GVJFZKek/RQCOFPZrZYUSU6qk4/JumMEMImM7tOUmkI4fvRcveEEH5gZn8t/f/27li1iiCKw/j3R8EiCtcHEItgBCshhYqVnWDqSKqkMeQhQl7A0hewC76BIMJtLmphoXbBxiI2IgixEZRjkSmWCykGEi8bv18zw+zMcqY7HM6yrFfVWpJPwMOqOkwyqaofC7mgJI2AlWhJGpmq+gmsAtvAN+BFkq25bXeBW8Cs/Tp3E7g+eL4/GO+1+Qx4nuQJcOFsopek8+HiogOQJPWrqj/AFJi2CvLm3JYAr6pq46RXzM+raifJHeAR8D7JalV9P93IJel8sBItSSOT5GaSG4Ol28AX4Ai40tbeAvcH/c5LSVYGZx4Pxjdtz3JVvauqPY4r3NfO8BqSNGpWoiVpfC4Dz5JMgN/AZ45bOzaAl0m+VtWD1uKxn+RSO7cLHLT51SQfgV/tHMDTlpwHeA18+Ce3kaQR8sNCSfrPDD9AXHQskjRWtnNIkiRJnaxES5IkSZ2sREuSJEmdTKIlSZKkTibRkiRJUieTaEmSJKmTSbQkSZLUySRakiRJ6vQXfm2UzzB1/T8AAAAASUVORK5CYII=\n","text/plain":["<Figure size 864x576 with 1 Axes>"]},"metadata":{"tags":[],"needs_background":"light"}}]},{"cell_type":"markdown","metadata":{"id":"E4IrkY3Adh3G"},"source":["## **3. Variational Auto Encoder**"]},{"cell_type":"markdown","metadata":{"id":"oy34192Vd1Al"},"source":["### 3.1 定义模型和损失函数"]},{"cell_type":"code","metadata":{"id":"FvEj4FiKUU2g","executionInfo":{"status":"ok","timestamp":1605470953012,"user_tz":-480,"elapsed":1233,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}}},"source":["class VAE(nn.Module):\n","\n","  def __init__(self, nb_movies, device=\"cuda:0\"):\n","    super(VAE, self).__init__()\n","    self.nb_movies = nb_movies\n","    self.encoder = nn.Sequential(\n","        nn.Linear(self.nb_movies, 512),\n","        nn.Sigmoid(),\n","        nn.Dropout(0.9), # 需要一个较大的dropout\n","        nn.Linear(512, 80),\n","        nn.Sigmoid()\n","        )\n","    self.fc1 = nn.Linear(80, 32)\n","    self.fc2 = nn.Linear(80, 32)\n","    self.decoder = nn.Sequential(\n","        nn.Linear(32, 80),\n","        nn.Sigmoid(),\n","        nn.Linear(80, 512),\n","        nn.Sigmoid(),\n","        nn.Linear(512, self.nb_movies)\n","        )\n","    \n","  # reparameterize\n","  def reparameterize(self, mu, logvar):\n","    eps = Variable(torch.randn(mu.size(0), mu.size(1))).to(device)\n","    z = mu + eps * torch.exp(logvar/2) \n","    return z\n","\n","  def forward(self, x):\n","    out1, out2 = self.encoder(x), self.encoder(x)\n","    mu = self.fc1(out1)\n","    logvar = self.fc2(out2)\n","    z = self.reparameterize(mu, logvar)\n","    return self.decoder(z), mu, logvar\n","\n","def loss_func(recon_x, x, mu, logvar):\n","  \"\"\"\n","  VAE的损失包括两部分：\n","  一部分是预测结果和真实结果的平均绝对误差；\n","  另一部分是KL-divergence（KL散度），用来衡量潜在变量的分布和单位高斯分布的差异。\n","  \"\"\"\n","  MSE = torch.mean(torch.norm((x - recon_x), p=2, dim=1, keepdim=False)**2/torch.sum(recon_x!=0,axis=1))\n","  KLD = -0.5 * torch.sum(1 + logvar - mu.pow(2) - logvar.exp())\n","  return MSE + KLD"],"execution_count":25,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"PUffuiIkd6DN"},"source":["### 3.2 定义训练，验证和测试函数"]},{"cell_type":"code","metadata":{"id":"bPHi0HL2AVMK","executionInfo":{"status":"ok","timestamp":1605471486931,"user_tz":-480,"elapsed":529746,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}},"outputId":"42dcce6b-e802-41e5-905c-cea4bf13bf38","colab":{"base_uri":"https://localhost:8080/"}},"source":["def train(train_loader, dev_loader=None, is_validate=True, device=\"cuda:0\"):\n","  vae.train()\n","  total_loss = 0\n","  for _, data in enumerate(train_loader, 0):\n","    data = Variable(data[0]).to(device)\n","    target = data.clone()\n","    optimizer.zero_grad()\n","    recon_x, mu, logvar = vae.forward(data)\n","    recon_x[target == 0] = 0\n","    loss = loss_func(recon_x, data, mu, logvar)\n","    loss.backward()\n","    total_loss += loss.item()\n","    optimizer.step()\n","    epoch_train_loss = total_loss / len(train_loader)\n","\n","  if (is_validate == True):\n","    vae.eval()\n","    total_loss = 0\n","    with torch.no_grad():\n","      for _, data in enumerate(dev_loader, 0):\n","        data = Variable(data[0]).to(device)\n","        target = data.clone()\n","        recon_x, mu, logvar = vae.forward(data)\n","        recon_x[target == 0] = 0\n","        loss = loss_func(recon_x, data, mu, logvar)\n","        total_loss += loss.item()\n","        epoch_dev_loss = total_loss / len(dev_loader)\n","    print('====> Epoch: {} Training Average loss: {:.4f}, Validating Average loss: {:.4f}'.format(epoch, epoch_train_loss, epoch_dev_loss))\n","    return epoch_train_loss, epoch_dev_loss\n","\n","  else:\n","    print('====> Epoch: {} Training Average loss: {:.4f}'.format(epoch, epoch_train_loss))\n","    return epoch_train_loss\n","\n","def test(test_loader, device=\"cuda:0\"):\n","  vae.eval()\n","  total_loss = 0\n","  with torch.no_grad():\n","    for _, data in enumerate(test_loader, 0):\n","      data = Variable(data[0]).to(device)\n","      target = data.clone()\n","      recon_x, mu, logvar = vae.forward(data)\n","      recon_x[target == 0] = 0\n","      loss = loss_func(recon_x, data, mu, logvar)\n","      total_loss += loss.item()\n","  print('Average test loss: {:.4f}'.format(total_loss / len(test_loader)))\n","\n","if __name__ == \"__main__\":\n","  device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n","  vae = VAE(nb_movies = nb_movies).to(device)\n","  optimizer =  optim.Adam(vae.parameters(), lr=1e-4, betas=(0.9, 0.999), eps=1e-08, weight_decay=0)\n","  EPOCH = 200\n","  epoches_train_loss = []\n","  epoches_dev_loss = []\n","\n","  print(\"\\n\", 20 * \"=\", \"Training Variational Auto Encoder on device: {}\".format(device), 20 * \"=\")\n","  for epoch in range(1, EPOCH + 1):\n","    epoch_train_loss, epoch_dev_loss = train(train_loader, dev_loader = dev_loader)\n","    epoches_train_loss.append(epoch_train_loss)\n","    epoches_dev_loss.append(epoch_dev_loss)\n","  print(\"\\n\", 20 * \"=\", \"Testing Variational Auto Encoder on device: {}\".format(device), 20 * \"=\")\n","  test(test_loader)"],"execution_count":26,"outputs":[{"output_type":"stream","text":["\n"," ==================== Training Variational Auto Encoder on device: cuda:0 ====================\n","====> Epoch: 1 Training Average loss: 7.4468, Validating Average loss: 1.4327\n","====> Epoch: 2 Training Average loss: 1.9506, Validating Average loss: 1.2280\n","====> Epoch: 3 Training Average loss: 1.4592, Validating Average loss: 1.0943\n","====> Epoch: 4 Training Average loss: 1.2420, Validating Average loss: 1.0396\n","====> Epoch: 5 Training Average loss: 1.1448, Validating Average loss: 1.0240\n","====> Epoch: 6 Training Average loss: 1.0957, Validating Average loss: 1.0269\n","====> Epoch: 7 Training Average loss: 1.0674, Validating Average loss: 1.0248\n","====> Epoch: 8 Training Average loss: 1.0468, Validating Average loss: 1.0277\n","====> Epoch: 9 Training Average loss: 1.0347, Validating Average loss: 1.0238\n","====> Epoch: 10 Training Average loss: 1.0244, Validating Average loss: 1.0253\n","====> Epoch: 11 Training Average loss: 1.0172, Validating Average loss: 1.0253\n","====> Epoch: 12 Training Average loss: 1.0119, Validating Average loss: 1.0226\n","====> Epoch: 13 Training Average loss: 1.0087, Validating Average loss: 1.0191\n","====> Epoch: 14 Training Average loss: 1.0040, Validating Average loss: 1.0173\n","====> Epoch: 15 Training Average loss: 1.0028, Validating Average loss: 1.0190\n","====> Epoch: 16 Training Average loss: 1.0002, Validating Average loss: 1.0164\n","====> Epoch: 17 Training Average loss: 0.9982, Validating Average loss: 1.0133\n","====> Epoch: 18 Training Average loss: 0.9969, Validating Average loss: 1.0129\n","====> Epoch: 19 Training Average loss: 0.9953, Validating Average loss: 1.0135\n","====> Epoch: 20 Training Average loss: 0.9956, Validating Average loss: 1.0137\n","====> Epoch: 21 Training Average loss: 0.9943, Validating Average loss: 1.0121\n","====> Epoch: 22 Training Average loss: 0.9939, Validating Average loss: 1.0153\n","====> Epoch: 23 Training Average loss: 0.9934, Validating Average loss: 1.0120\n","====> Epoch: 24 Training Average loss: 0.9929, Validating Average loss: 1.0115\n","====> Epoch: 25 Training Average loss: 0.9931, Validating Average loss: 1.0099\n","====> Epoch: 26 Training Average loss: 0.9919, Validating Average loss: 1.0087\n","====> Epoch: 27 Training Average loss: 0.9912, Validating Average loss: 1.0122\n","====> Epoch: 28 Training Average loss: 0.9908, Validating Average loss: 1.0076\n","====> Epoch: 29 Training Average loss: 0.9905, Validating Average loss: 1.0092\n","====> Epoch: 30 Training Average loss: 0.9908, Validating Average loss: 1.0069\n","====> Epoch: 31 Training Average loss: 0.9900, Validating Average loss: 1.0079\n","====> Epoch: 32 Training Average loss: 0.9898, Validating Average loss: 1.0101\n","====> Epoch: 33 Training Average loss: 0.9906, Validating Average loss: 1.0094\n","====> Epoch: 34 Training Average loss: 0.9896, Validating Average loss: 1.0101\n","====> Epoch: 35 Training Average loss: 0.9894, Validating Average loss: 1.0078\n","====> Epoch: 36 Training Average loss: 0.9893, Validating Average loss: 1.0076\n","====> Epoch: 37 Training Average loss: 0.9892, Validating Average loss: 1.0071\n","====> Epoch: 38 Training Average loss: 0.9888, Validating Average loss: 1.0088\n","====> Epoch: 39 Training Average loss: 0.9890, Validating Average loss: 1.0057\n","====> Epoch: 40 Training Average loss: 0.9882, Validating Average loss: 1.0082\n","====> Epoch: 41 Training Average loss: 0.9884, Validating Average loss: 1.0074\n","====> Epoch: 42 Training Average loss: 0.9885, Validating Average loss: 1.0068\n","====> Epoch: 43 Training Average loss: 0.9876, Validating Average loss: 1.0037\n","====> Epoch: 44 Training Average loss: 0.9884, Validating Average loss: 1.0073\n","====> Epoch: 45 Training Average loss: 0.9871, Validating Average loss: 1.0059\n","====> Epoch: 46 Training Average loss: 0.9879, Validating Average loss: 1.0056\n","====> Epoch: 47 Training Average loss: 0.9873, Validating Average loss: 1.0058\n","====> Epoch: 48 Training Average loss: 0.9873, Validating Average loss: 1.0040\n","====> Epoch: 49 Training Average loss: 0.9873, Validating Average loss: 1.0040\n","====> Epoch: 50 Training Average loss: 0.9865, Validating Average loss: 1.0072\n","====> Epoch: 51 Training Average loss: 0.9866, Validating Average loss: 1.0055\n","====> Epoch: 52 Training Average loss: 0.9872, Validating Average loss: 1.0077\n","====> Epoch: 53 Training Average loss: 0.9867, Validating Average loss: 1.0047\n","====> Epoch: 54 Training Average loss: 0.9857, Validating Average loss: 1.0067\n","====> Epoch: 55 Training Average loss: 0.9863, Validating Average loss: 1.0051\n","====> Epoch: 56 Training Average loss: 0.9861, Validating Average loss: 1.0035\n","====> Epoch: 57 Training Average loss: 0.9855, Validating Average loss: 1.0043\n","====> Epoch: 58 Training Average loss: 0.9856, Validating Average loss: 1.0043\n","====> Epoch: 59 Training Average loss: 0.9861, Validating Average loss: 1.0057\n","====> Epoch: 60 Training Average loss: 0.9856, Validating Average loss: 1.0032\n","====> Epoch: 61 Training Average loss: 0.9859, Validating Average loss: 1.0059\n","====> Epoch: 62 Training Average loss: 0.9854, Validating Average loss: 1.0047\n","====> Epoch: 63 Training Average loss: 0.9847, Validating Average loss: 1.0054\n","====> Epoch: 64 Training Average loss: 0.9851, Validating Average loss: 1.0045\n","====> Epoch: 65 Training Average loss: 0.9855, Validating Average loss: 1.0035\n","====> Epoch: 66 Training Average loss: 0.9848, Validating Average loss: 1.0038\n","====> Epoch: 67 Training Average loss: 0.9848, Validating Average loss: 1.0031\n","====> Epoch: 68 Training Average loss: 0.9852, Validating Average loss: 1.0018\n","====> Epoch: 69 Training Average loss: 0.9845, Validating Average loss: 1.0044\n","====> Epoch: 70 Training Average loss: 0.9844, Validating Average loss: 1.0055\n","====> Epoch: 71 Training Average loss: 0.9837, Validating Average loss: 1.0013\n","====> Epoch: 72 Training Average loss: 0.9848, Validating Average loss: 1.0043\n","====> Epoch: 73 Training Average loss: 0.9842, Validating Average loss: 1.0048\n","====> Epoch: 74 Training Average loss: 0.9839, Validating Average loss: 1.0017\n","====> Epoch: 75 Training Average loss: 0.9836, Validating Average loss: 1.0029\n","====> Epoch: 76 Training Average loss: 0.9842, Validating Average loss: 1.0046\n","====> Epoch: 77 Training Average loss: 0.9844, Validating Average loss: 1.0039\n","====> Epoch: 78 Training Average loss: 0.9838, Validating Average loss: 1.0024\n","====> Epoch: 79 Training Average loss: 0.9840, Validating Average loss: 1.0039\n","====> Epoch: 80 Training Average loss: 0.9833, Validating Average loss: 1.0020\n","====> Epoch: 81 Training Average loss: 0.9837, Validating Average loss: 1.0042\n","====> Epoch: 82 Training Average loss: 0.9833, Validating Average loss: 1.0044\n","====> Epoch: 83 Training Average loss: 0.9833, Validating Average loss: 1.0031\n","====> Epoch: 84 Training Average loss: 0.9834, Validating Average loss: 1.0070\n","====> Epoch: 85 Training Average loss: 0.9834, Validating Average loss: 1.0032\n","====> Epoch: 86 Training Average loss: 0.9828, Validating Average loss: 1.0044\n","====> Epoch: 87 Training Average loss: 0.9832, Validating Average loss: 1.0047\n","====> Epoch: 88 Training Average loss: 0.9832, Validating Average loss: 1.0029\n","====> Epoch: 89 Training Average loss: 0.9819, Validating Average loss: 1.0034\n","====> Epoch: 90 Training Average loss: 0.9827, Validating Average loss: 1.0048\n","====> Epoch: 91 Training Average loss: 0.9827, Validating Average loss: 1.0040\n","====> Epoch: 92 Training Average loss: 0.9828, Validating Average loss: 1.0021\n","====> Epoch: 93 Training Average loss: 0.9821, Validating Average loss: 1.0037\n","====> Epoch: 94 Training Average loss: 0.9821, Validating Average loss: 1.0023\n","====> Epoch: 95 Training Average loss: 0.9824, Validating Average loss: 1.0036\n","====> Epoch: 96 Training Average loss: 0.9828, Validating Average loss: 1.0025\n","====> Epoch: 97 Training Average loss: 0.9819, Validating Average loss: 1.0016\n","====> Epoch: 98 Training Average loss: 0.9822, Validating Average loss: 1.0032\n","====> Epoch: 99 Training Average loss: 0.9821, Validating Average loss: 1.0026\n","====> Epoch: 100 Training Average loss: 0.9820, Validating Average loss: 1.0018\n","====> Epoch: 101 Training Average loss: 0.9819, Validating Average loss: 1.0028\n","====> Epoch: 102 Training Average loss: 0.9817, Validating Average loss: 1.0025\n","====> Epoch: 103 Training Average loss: 0.9817, Validating Average loss: 1.0023\n","====> Epoch: 104 Training Average loss: 0.9811, Validating Average loss: 1.0027\n","====> Epoch: 105 Training Average loss: 0.9825, Validating Average loss: 0.9997\n","====> Epoch: 106 Training Average loss: 0.9815, Validating Average loss: 1.0015\n","====> Epoch: 107 Training Average loss: 0.9816, Validating Average loss: 1.0013\n","====> Epoch: 108 Training Average loss: 0.9813, Validating Average loss: 1.0025\n","====> Epoch: 109 Training Average loss: 0.9809, Validating Average loss: 1.0025\n","====> Epoch: 110 Training Average loss: 0.9812, Validating Average loss: 1.0014\n","====> Epoch: 111 Training Average loss: 0.9817, Validating Average loss: 1.0033\n","====> Epoch: 112 Training Average loss: 0.9812, Validating Average loss: 1.0022\n","====> Epoch: 113 Training Average loss: 0.9809, Validating Average loss: 1.0018\n","====> Epoch: 114 Training Average loss: 0.9814, Validating Average loss: 1.0014\n","====> Epoch: 115 Training Average loss: 0.9817, Validating Average loss: 1.0012\n","====> Epoch: 116 Training Average loss: 0.9806, Validating Average loss: 1.0038\n","====> Epoch: 117 Training Average loss: 0.9813, Validating Average loss: 1.0045\n","====> Epoch: 118 Training Average loss: 0.9813, Validating Average loss: 1.0014\n","====> Epoch: 119 Training Average loss: 0.9807, Validating Average loss: 1.0018\n","====> Epoch: 120 Training Average loss: 0.9809, Validating Average loss: 1.0017\n","====> Epoch: 121 Training Average loss: 0.9808, Validating Average loss: 1.0018\n","====> Epoch: 122 Training Average loss: 0.9814, Validating Average loss: 1.0014\n","====> Epoch: 123 Training Average loss: 0.9807, Validating Average loss: 1.0023\n","====> Epoch: 124 Training Average loss: 0.9807, Validating Average loss: 1.0005\n","====> Epoch: 125 Training Average loss: 0.9806, Validating Average loss: 1.0033\n","====> Epoch: 126 Training Average loss: 0.9803, Validating Average loss: 1.0010\n","====> Epoch: 127 Training Average loss: 0.9806, Validating Average loss: 1.0025\n","====> Epoch: 128 Training Average loss: 0.9807, Validating Average loss: 1.0057\n","====> Epoch: 129 Training Average loss: 0.9802, Validating Average loss: 1.0006\n","====> Epoch: 130 Training Average loss: 0.9804, Validating Average loss: 1.0034\n","====> Epoch: 131 Training Average loss: 0.9800, Validating Average loss: 1.0015\n","====> Epoch: 132 Training Average loss: 0.9800, Validating Average loss: 1.0043\n","====> Epoch: 133 Training Average loss: 0.9804, Validating Average loss: 1.0036\n","====> Epoch: 134 Training Average loss: 0.9799, Validating Average loss: 1.0010\n","====> Epoch: 135 Training Average loss: 0.9802, Validating Average loss: 0.9998\n","====> Epoch: 136 Training Average loss: 0.9798, Validating Average loss: 1.0021\n","====> Epoch: 137 Training Average loss: 0.9803, Validating Average loss: 1.0026\n","====> Epoch: 138 Training Average loss: 0.9797, Validating Average loss: 1.0006\n","====> Epoch: 139 Training Average loss: 0.9795, Validating Average loss: 1.0011\n","====> Epoch: 140 Training Average loss: 0.9797, Validating Average loss: 0.9986\n","====> Epoch: 141 Training Average loss: 0.9796, Validating Average loss: 1.0040\n","====> Epoch: 142 Training Average loss: 0.9802, Validating Average loss: 1.0001\n","====> Epoch: 143 Training Average loss: 0.9793, Validating Average loss: 1.0009\n","====> Epoch: 144 Training Average loss: 0.9795, Validating Average loss: 1.0014\n","====> Epoch: 145 Training Average loss: 0.9794, Validating Average loss: 1.0001\n","====> Epoch: 146 Training Average loss: 0.9786, Validating Average loss: 0.9996\n","====> Epoch: 147 Training Average loss: 0.9802, Validating Average loss: 1.0011\n","====> Epoch: 148 Training Average loss: 0.9792, Validating Average loss: 1.0016\n","====> Epoch: 149 Training Average loss: 0.9793, Validating Average loss: 1.0014\n","====> Epoch: 150 Training Average loss: 0.9793, Validating Average loss: 0.9998\n","====> Epoch: 151 Training Average loss: 0.9790, Validating Average loss: 0.9990\n","====> Epoch: 152 Training Average loss: 0.9783, Validating Average loss: 1.0001\n","====> Epoch: 153 Training Average loss: 0.9791, Validating Average loss: 0.9996\n","====> Epoch: 154 Training Average loss: 0.9794, Validating Average loss: 1.0007\n","====> Epoch: 155 Training Average loss: 0.9790, Validating Average loss: 1.0004\n","====> Epoch: 156 Training Average loss: 0.9790, Validating Average loss: 1.0009\n","====> Epoch: 157 Training Average loss: 0.9786, Validating Average loss: 1.0011\n","====> Epoch: 158 Training Average loss: 0.9786, Validating Average loss: 1.0017\n","====> Epoch: 159 Training Average loss: 0.9788, Validating Average loss: 1.0026\n","====> Epoch: 160 Training Average loss: 0.9784, Validating Average loss: 1.0017\n","====> Epoch: 161 Training Average loss: 0.9793, Validating Average loss: 1.0014\n","====> Epoch: 162 Training Average loss: 0.9782, Validating Average loss: 1.0018\n","====> Epoch: 163 Training Average loss: 0.9784, Validating Average loss: 1.0005\n","====> Epoch: 164 Training Average loss: 0.9784, Validating Average loss: 1.0015\n","====> Epoch: 165 Training Average loss: 0.9784, Validating Average loss: 1.0018\n","====> Epoch: 166 Training Average loss: 0.9786, Validating Average loss: 1.0007\n","====> Epoch: 167 Training Average loss: 0.9784, Validating Average loss: 1.0006\n","====> Epoch: 168 Training Average loss: 0.9782, Validating Average loss: 0.9988\n","====> Epoch: 169 Training Average loss: 0.9787, Validating Average loss: 1.0002\n","====> Epoch: 170 Training Average loss: 0.9785, Validating Average loss: 1.0015\n","====> Epoch: 171 Training Average loss: 0.9780, Validating Average loss: 0.9992\n","====> Epoch: 172 Training Average loss: 0.9783, Validating Average loss: 1.0017\n","====> Epoch: 173 Training Average loss: 0.9785, Validating Average loss: 1.0008\n","====> Epoch: 174 Training Average loss: 0.9781, Validating Average loss: 0.9997\n","====> Epoch: 175 Training Average loss: 0.9780, Validating Average loss: 0.9998\n","====> Epoch: 176 Training Average loss: 0.9781, Validating Average loss: 0.9999\n","====> Epoch: 177 Training Average loss: 0.9784, Validating Average loss: 1.0008\n","====> Epoch: 178 Training Average loss: 0.9778, Validating Average loss: 1.0006\n","====> Epoch: 179 Training Average loss: 0.9785, Validating Average loss: 0.9993\n","====> Epoch: 180 Training Average loss: 0.9778, Validating Average loss: 0.9995\n","====> Epoch: 181 Training Average loss: 0.9779, Validating Average loss: 0.9994\n","====> Epoch: 182 Training Average loss: 0.9780, Validating Average loss: 0.9994\n","====> Epoch: 183 Training Average loss: 0.9779, Validating Average loss: 1.0004\n","====> Epoch: 184 Training Average loss: 0.9780, Validating Average loss: 0.9999\n","====> Epoch: 185 Training Average loss: 0.9775, Validating Average loss: 1.0005\n","====> Epoch: 186 Training Average loss: 0.9771, Validating Average loss: 1.0006\n","====> Epoch: 187 Training Average loss: 0.9775, Validating Average loss: 0.9999\n","====> Epoch: 188 Training Average loss: 0.9776, Validating Average loss: 0.9979\n","====> Epoch: 189 Training Average loss: 0.9777, Validating Average loss: 1.0007\n","====> Epoch: 190 Training Average loss: 0.9775, Validating Average loss: 0.9994\n","====> Epoch: 191 Training Average loss: 0.9778, Validating Average loss: 0.9989\n","====> Epoch: 192 Training Average loss: 0.9783, Validating Average loss: 1.0005\n","====> Epoch: 193 Training Average loss: 0.9782, Validating Average loss: 1.0009\n","====> Epoch: 194 Training Average loss: 0.9772, Validating Average loss: 0.9991\n","====> Epoch: 195 Training Average loss: 0.9776, Validating Average loss: 0.9989\n","====> Epoch: 196 Training Average loss: 0.9776, Validating Average loss: 1.0020\n","====> Epoch: 197 Training Average loss: 0.9772, Validating Average loss: 0.9983\n","====> Epoch: 198 Training Average loss: 0.9774, Validating Average loss: 1.0010\n","====> Epoch: 199 Training Average loss: 0.9780, Validating Average loss: 1.0002\n","====> Epoch: 200 Training Average loss: 0.9774, Validating Average loss: 1.0003\n","\n"," ==================== Testing Variational Auto Encoder on device: cuda:0 ====================\n","Average test loss: 0.9956\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"whP5w_Lc_qWw"},"source":["### 3.3 画图展示训练和验证过程中的loss变化情况，发现虽然VAE的training loss比AE大，但是validating loss和test loss小于AE。考虑是因为VAE的training loss包含了两部分，因而training loss偏大；但VAE模型可能捕捉到了数据中更复杂的模式，使得在验证集和测试集上有更好的表现。 "]},{"cell_type":"code","metadata":{"id":"Stk1vrbEfkM1","executionInfo":{"status":"ok","timestamp":1605471495077,"user_tz":-480,"elapsed":1414,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}},"outputId":"1b626262-7a5b-4521-8630-0bdc5e384b54","colab":{"base_uri":"https://localhost:8080/","height":515}},"source":["import matplotlib.pyplot as plt\n","plt.figure(figsize = (12,8))\n","plt.plot(np.arange(len(epoches_train_loss)), epoches_train_loss, color='r', label='training loss')\n","plt.plot(np.arange(len(epoches_dev_loss)), epoches_dev_loss, color='b', label='validating loss')\n","plt.xlabel('Steps')\n","plt.ylabel('MSE-loss')\n","plt.legend()"],"execution_count":27,"outputs":[{"output_type":"execute_result","data":{"text/plain":["<matplotlib.legend.Legend at 0x7f59a5e3f400>"]},"metadata":{"tags":[]},"execution_count":27},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAAscAAAHgCAYAAABJt8A9AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3de5xddX3v/9dn75nMZCaTCxCVABKsCjEJJJByOSl35cdFoVDlIukRqtDSWuXRUwp6LKg9bW3LocpRseAFqwhSFPUUBNGCSI8gIXKHgmDQEIQESTKZyUzm8v39sdae7AxzTbJnT1iv5+OxH/u+1nevvfae917z3mtHSglJkiRJUKr3ACRJkqTJwnAsSZIk5QzHkiRJUs5wLEmSJOUMx5IkSVLOcCxJkiTlGuo9gGq77bZbmjt3br2HIUmSpNewBx54YG1KafZQ102qcDx37lyWL19e72FIkiTpNSwinhvuOmsVkiRJUs5wLEmSJOUMx5IkSVJuUnWOJUmSJruenh5WrVpFV1dXvYeiUTQ3N7PnnnvS2Ng45vsYjiVJksZh1apVtLW1MXfuXCKi3sPRMFJKvPzyy6xatYp99tlnzPezViFJkjQOXV1d7LrrrgbjSS4i2HXXXce9hd9wLEmSNE4G453DtjxPhmNJkqSdyLp16/j85z+/Tfc98cQTWbdu3Yi3ufTSS/nhD3+4TdMfbO7cuaxdu3aHTGuiGI4lSZJ2IiOF497e3hHve+uttzJz5swRb/PJT36St7/97ds8vp2d4ViSJGkncskll/DMM8+waNEiLrroIu666y4OP/xwTj75ZN72trcB8Pu///scdNBBzJ8/n6uvvnrgvpUtuStXrmTevHmcd955zJ8/n+OOO45NmzYBcM4553DTTTcN3P6yyy7jwAMPZOHChTz55JMArFmzhne84x3Mnz+fD3zgA+y9996jbiG+4oorWLBgAQsWLODTn/40AB0dHZx00kkccMABLFiwgG9+85sDj/Ftb3sb+++/P3/5l3+5YxfgKNxbhSRJ0ra68EJ48MEdO81FiyAPj0P51Kc+xaOPPsqD+XzvuusuVqxYwaOPPjqwV4Yvf/nL7LLLLmzatInf/d3f5Q/+4A/Yddddt5rO008/zfXXX88111zD6aefzre+9S2WLVv2qvntttturFixgs9//vNcfvnlfPGLX+QTn/gExxxzDB/5yEe47bbb+NKXvjTiQ3rggQf4yle+wn333UdKiUMOOYQjjzySZ599ljlz5nDLLbcAsH79el5++WVuvvlmnnzySSJi1BrIjuaWY0mSpJ3cwQcfvNXuyq688koOOOAADj30UH7961/z9NNPv+o+++yzD4sWLQLgoIMOYuXKlUNO+7TTTnvVbe655x7OPPNMAI4//nhmzZo14vjuueceTj31VFpbW5k2bRqnnXYaP/nJT1i4cCF33HEHF198MT/5yU+YMWMGM2bMoLm5mfe///18+9vfpqWlZbyLY7u45ViSJGlbjbCFdyK1trYOnL7rrrv44Q9/yE9/+lNaWlo46qijhtydWVNT08Dpcrk8UKsY7nblcnnUTvN4vfWtb2XFihXceuutfOxjH+PYY4/l0ksv5Wc/+xk/+tGPuOmmm/jsZz/Lf/zHf+zQ+Y7ELceSJEk7kba2Ntrb24e9fv369cyaNYuWlhaefPJJ7r333h0+hqVLl3LjjTcC8IMf/IBXXnllxNsffvjhfOc736Gzs5OOjg5uvvlmDj/8cFavXk1LSwvLli3joosuYsWKFWzcuJH169dz4okn8s///M889NBDO3z8I3HLsSRJ0k5k1113ZenSpSxYsIATTjiBk046aavrjz/+eL7whS8wb9489t13Xw499NAdPobLLruMs846i6997WscdthhvOENb6CtrW3Y2x944IGcc845HHzwwQB84AMfYPHixdx+++1cdNFFlEolGhsbueqqq2hvb+eUU06hq6uLlBJXXHHFDh//SCKlNKEzHMmSJUvS8uXL6z0MSZKkYT3xxBPMmzev3sOoq+7ubsrlMg0NDfz0pz/lggsuGPiC4GQz1PMVEQ+klJYMdXu3HG/cCCnBCJ92JEmStMWvfvUrTj/9dPr7+5kyZQrXXHNNvYe0wxiOTz4Zenvh7rvrPRJJkqSdwlve8hZ+/vOf13sYNeEX8kol6O+v9ygkSZI0CRiOSyXo66v3KCRJkjQJGI7LZbccS5IkCTAcW6uQJEnSAMOxtQpJkvQaN23aNABWr17Nu9/97iFvc9RRRzHaLnU//elP09nZOXD+xBNPZN26dds9vpUrV7JgwYLtns6OYDi2ViFJkgpizpw53HTTTdt8/8Hh+NZbb2XmzJk7YmiThuHYWoUkSdqJXHLJJXzuc58bOP/xj3+cyy+/nI0bN3Lsscdy4IEHsnDhQr773e++6r7VW2g3bdrEmWeeybx58zj11FPZtGnTwO0uuOAClixZwvz587nssssAuPLKK1m9ejVHH300Rx99NABz585l7dq1rFy5knnz5nHeeecxf/58jjvuuIHp3X///ey///4sWrSIiy66aNQtxF1dXZx77rksXLiQxYsXc+eddwLw2GOPcfDBB7No0SL2339/nn76aTo6OjjppJM44IADWLBgAd/85je3Y8lm3M+xtQpJkrSNLrwQdvQPwy1aBJ/+9PDXn3HGGVx44YX82Z/9GQA33ngjt99+O83Nzdx8881Mnz6dtWvXcuihh3LyyScTEUNO56qrrqKlpYUnnniChx9+mAMPPHDgur/9279ll112oa+vj2OPPZaHH36YD33oQ1xxxRXceeed7Lbbbq+a3tNPP83111/PNddcw+mnn863vvUtli1bxrnnnss111zDYYcdxiWXXDLq4//c5z5HRPDII4/w5JNPctxxx/HUU0/xhS98gQ9/+MOcffbZbN68mb6+Pm699VbmzJnDLbfcAsD69etHnf5o3HJsrUKSJO1EFi9ezEsvvcTq1at56KGHmDVrFnvttRcpJT760Y+y//778/a3v53nn3+eF198cdjp3H333SxbtgyA/fffn/3333/guhtvvJEDDzyQxYsX89hjj/H444+POq599tmHRYsWAXDQQQexcuVK1q1bR3t7O4cddhgA733ve0edzj333DMwrv3224+9996bp556isMOO4y/+7u/4x/+4R947rnnmDp1KgsXLuSOO+7g4osv5ic/+QkzZswYdfqjccuxtQpJkrSNRtrCW0vvec97uOmmm/jNb37DGWecAcB1113HmjVreOCBB2hsbGTu3Ll0dXWNe9q//OUvufzyy7n//vuZNWsW55xzzpim09TUNHC6XC5vVdPYEd773vdyyCGHcMstt3DiiSfyL//yLxxzzDGsWLGCW2+9lY997GMce+yxXHrppds1H7ccW6uQJEk7mTPOOIMbbriBm266ife85z1AVil43eteR2NjI3feeSfPPffciNM44ogj+MY3vgHAo48+ysMPPwzAhg0baG1tZcaMGbz44ot8//vfH7hPW1sb7e3tYx7nzJkzaWtr47777gPghhtuGPU+hx9+ONdddx0ATz31FL/61a/Yd999efbZZ3nTm97Ehz70IU455RQefvhhVq9eTUtLC8uWLeOiiy5ixYoVYx7bcNxybK1CkiTtZObPn097ezt77LEHu+++OwBnn30273rXu1i4cCFLlixhv/32G3EaF1xwAeeeey7z5s1j3rx5HHTQQQAccMABLF68mP3224+99tqLpUuXDtzn/PPP5/jjj2fOnDkDX5QbzZe+9CXOO+88SqUSRx555KjVhz/90z/lggsuYOHChTQ0NHDttdfS1NTEjTfeyNe+9jUaGxt5wxvewEc/+lHuv/9+LrroIkqlEo2NjVx11VVjGtNIIqW03RPZUZYsWZJG27/eDnfOOXDXXbBy5cTOV5Ik7ZSeeOIJ5s2bV+9h7DQ2btw4sJ/lT33qU7zwwgt85jOfmbD5D/V8RcQDKaUlQ93eLcfWKiRJkmrmlltu4e///u/p7e1l77335tprr633kEZkOLZWIUmSVDNnnHHGwJcGdwZ+Ic+9VUiSJClnOLZWIUmSxmkyfWdLw9uW58lwbK1CkiSNQ3NzMy+//LIBeZJLKfHyyy/T3Nw8rvvZObZWIUmSxmHPPfdk1apVrFmzpt5D0Siam5vZc889x3Ufw7G1CkmSNA6NjY3ss88+9R6GasRahbUKSZIk5QzH1iokSZKUMxxbq5AkSVKuZuE4IvaNiAerDhsi4sJazW+bWauQJElSrmZfyEsp/RewCCAiysDzwM21mt82s1YhSZKk3ETVKo4FnkkpPTdB8xs7axWSJEnKTVQ4PhO4foLmNT7lcnbsjrwlSZIKr+bhOCKmACcD/zbM9edHxPKIWF6XnWmX8kVgtUKSJKnwJmLL8QnAipTSi0NdmVK6OqW0JKW0ZPbs2RMwnEEq4dhqhSRJUuFNRDg+i8laqYAttQq3HEuSJBVeTcNxRLQC7wC+Xcv5bBdrFZIkScrVbFduACmlDmDXWs5ju1mrkCRJUs5fyLNWIUmSpJzh2FqFJEmScoZjaxWSJEnKGY6tVUiSJClnOLZWIUmSpJzh2FqFJEmScoZjaxWSJEnKGY6tVUiSJClnOLZWIUmSpJzh2FqFJEmScoZjaxWSJEnKGY6tVUiSJClnOLZWIUmSpJzh2FqFJEmScoZjaxWSJEnKGY6tVUiSJClnOLZWIUmSpJzh2FqFJEmScoZjaxWSJEnKGY6tVUiSJClnOLZWIUmSpJzh2FqFJEmScoZjaxWSJEnKGY6tVUiSJClnOLZWIUmSpJzh2FqFJEmScoZjaxWSJEnKGY6tVUiSJClnOLZWIUmSpJzh2FqFJEmScoZjaxWSJEnKGY6tVUiSJClnOLZWIUmSpJzh2FqFJEmScoZjaxWSJEnKGY6tVUiSJClnOLZWIUmSpJzh2FqFJEmScoZjaxWSJEnKGY6tVUiSJClnOLZWIUmSpJzh2FqFJEmScoZjaxWSJEnKGY6tVUiSJClnOLZWIUmSpJzh2FqFJEmScoZjaxWSJEnKGY6tVUiSJClnOLZWIUmSpJzhOCI7NhxLkiQVnuEYsmqFtQpJkqTCMxxDVq1wy7EkSVLh1TQcR8TMiLgpIp6MiCci4rBazm+blUqGY0mSJNFQ4+l/BrgtpfTuiJgCtNR4ftvGWoUkSZKoYTiOiBnAEcA5ACmlzcDmWs1vu1irkCRJErWtVewDrAG+EhE/j4gvRkRrDee37axVSJIkidqG4wbgQOCqlNJioAO4ZPCNIuL8iFgeEcvXrFlTw+GMwFqFJEmSqG04XgWsSindl5+/iSwsbyWldHVKaUlKacns2bNrOJwRWKuQJEkSNQzHKaXfAL+OiH3zi44FHq/V/LaLtQpJkiRR+71V/DlwXb6nimeBc2s8v21jrUKSJEnUOBynlB4EltRyHjuEtQpJkiThL+RlrFVIkiQJw3HGWoUkSZIwHGesVUiSJAnDccZahSRJkjAcZ6xVSJIkCcNxxlqFJEmSMBxnrFVIkiQJw3HGWoUkSZIwHGesVUiSJAnDccZahSRJkjAcZ6xVSJIkCcNxxlqFJEmSMBxnrFVIkiQJw3HGWoUkSZIwHGfccixJkiQMxxk7x5IkScJwnLFWIUmSJAzHGWsVkiRJwnCcsVYhSZIkDMcZaxWSJEnCcJyxViFJkiQMxxlrFZIkScJwnLFWIUmSJAzHGWsVkiRJwnCcsVYhSZIkDMcZaxWSJEnCcJyxViFJkiQMxxlrFZIkScJwnLFWIUmSJAzHGWsVkiRJwnCcsVYhSZIkDMcZaxWSJEnCcJyxViFJkiQMxxlrFZIkScJwnLFWIUmSJAzHGWsVkiRJwnCcsVYhSZIkDMcZaxWSJEnCcJyxViFJkiQMxxlrFZIkScJwnLFWIUmSJAzHGWsVkiRJwnCcsVYhSZIkDMeZUr4YDMiSJEmFZjgGw7EkSZIAw3GmXM6ODceSJEmFZjiGLVuO3WOFJElSoRmOwVqFJEmSAMNxxlqFJEmSMBxnrFVIkiQJw3HGWoUkSZIwHGesVUiSJAloqOXEI2Il0A70Ab0ppSW1nN82s1YhSZIkahyOc0enlNZOwHy2nbUKSZIkYa0iY61CkiRJ1D4cJ+AHEfFARJxf43ltO2sVkiRJova1it9LKT0fEa8D7oiIJ1NKd1ffIA/N5wO88Y1vrPFwhmGtQpIkSdR4y3FK6fn8+CXgZuDgIW5zdUppSUppyezZs2s5nOFZq5AkSRI1DMcR0RoRbZXTwHHAo7Wa33axViFJkiRqW6t4PXBzRFTm842U0m01nN+2s1YhSZIkahiOU0rPAgfUavo7lLUKSZIk4a7cMtYqJEmShOE4Y61CkiRJGI4z1iokSZKE4ThjrUKSJEkYjjPWKiRJkoThOGOtQpIkSRiOM9YqJEmShOE4Y61CkiRJGI4z1iokSZKE4ThjrUKSJEkYjjPWKiRJkoThOGOtQpIkSRiOM245liRJEobjjJ1jSZIkYTjOWKuQJEkShuOMtQpJkiRhOM5Yq5AkSRKG44y1CkmSJGE4zlirkCRJEobjjLUKSZIkYTjOWKuQJEkShuOMtQpJkiRhOM5Yq5AkSRKG44y1CkmSJDHGcBwRSyOiNT+9LCKuiIi9azu0CWStQpIkSYx9y/FVQGdEHAD8D+AZ4F9rNqqJZq1CkiRJjD0c96aUEnAK8NmU0ueAttoNa4JZq5AkSRLQMMbbtUfER4BlwBERUQIaazesCWatQpIkSYx9y/EZQDfw/pTSb4A9gX+q2agmmrUKSZIkMY4tx8BnUkp9EfFWYD/g+toNa4JZq5AkSRJj33J8N9AUEXsAPwD+ELi2VoOacNYqJEmSxNjDcaSUOoHTgM+nlN4DLKjdsCaYtQpJkiQxjnAcEYcBZwO3jPO+k5+1CkmSJDH2gHsh8BHg5pTSYxHxJuDO2g1rglmrkCRJEmP8Ql5K6cfAjyNiWkRMSyk9C3yotkObQNYqJEmSxNh/PnphRPwceAx4PCIeiIj5tR3aBLJWIUmSJMZeq/gX4C9SSnunlN5I9hPS19RuWBPMWoUkSZIYezhuTSkNdIxTSncBrTUZUT1EZMfWKiRJkgptrD8C8mxE/DXwtfz8MuDZ2gypTspltxxLkiQV3Fi3HP8RMBv4dn6YnV/22lEqGY4lSZIKbqx7q3iF19LeKYZSKlmrkCRJKrgRw3FE/F8gDXd9SunkHT6ierFWIUmSVHijbTm+fEJGMRlYq5AkSSq8EcNx/uMfW4mIA1NKK2o3pDqxViFJklR4Y/1CXrUv7vBRTAbWKiRJkgpvW8Jx7PBRTAbWKiRJkgpvW8LxJ3b4KCYDaxWSJEmFN2I4johlVaeXAqSUvpOf/2BthzbBrFVIkiQV3mhbjv+i6vT/GXSdPwIiSZKk15TRwnEMc3qo8zs3axWSJEmFN1o4TsOcHur8zs1ahSRJUuGN9iMg+0XEw2RbiX8nP01+/k01HdlEs1YhSZJUeKOF43kTMorJwFqFJElS4Y32C3nPVZ+PiF2BI4BfpZQeGMsMIqIMLAeeTym9c1sHWnPWKiRJkgpvtF25/XtELMhP7w48SraXiq9FxIVjnMeHgSe2a5QTwVqFJElS4Y32hbx9UkqP5qfPBe5IKb0LOIQx7MotIvYETmJn+MlpaxWSJEmFN1o47qk6fSxwK0BKqR0Yy2bWTwN/NdJtI+L8iFgeEcvXrFkzhknWiLUKSZKkwhstHP86Iv48Ik4FDgRuA4iIqUDjSHeMiHcCL43WTU4pXZ1SWpJSWjJ79uxxDH0Hs1YhSZJUeKOF4/cD84FzgDNSSuvyyw8FvjLKfZcCJ0fESuAG4JiI+Pq2D7XGrFVIkiQV3mh7q3gJ+JMhLr8TuHOU+34E+AhARBwF/GVKadk2j7TWrFVIkiQV3ojhOCK+N9L1KaWTd+xw6shahSRJUuGN9iMghwG/Bq4H7iP7ZbxxSyndBdy1LfedMNYqJEmSCm+0cPwG4B3AWcB7gVuA61NKj9V6YBPOWoUkSVLhjfiFvJRSX0rptpTS+8i+hPcL4K6I+OCEjG4iWauQJEkqvNG2HBMRTWQ/5HEWMBe4Eri5tsOqA2sVkiRJhTfaF/L+FVhA9uMfn6j6tbzXnnIZNm+u9ygkSZJUR6NtOV4GdAAfBj4UMfB9vABSSml6Dcc2saxVSJIkFd5o+zke7UdCXjusVUiSJBVeccLvaNxbhSRJUuEZjiusVUiSJBWe4bjCWoUkSVLhGY4rrFVIkiQVnuG4wlqFJElS4RmOK6xVSJIkFZ7huMJahSRJUuEZjiusVUiSJBWe4bjCWoUkSVLhGY4rrFVIkiQVnuG4wlqFJElS4RmOK6xVSJIkFZ7huMJahSRJUuEZjiusVUiSJBWe4bjCWoUkSVLhGY4rrFVIkiQVnuG4wlqFJElS4RmOK6xVSJIkFZ7huMJahSRJUuEZjiusVUiSJBWe4bjCWoUkSVLhGY4rrFVIkiQVnuG4wlqFJElS4RmOKyrhOKV6j0SSJEl1YjiuKJezY8OxJElSYRmOK0r5orBaIUmSVFiG44pKOHaPFZIkSYVlOK6o1CrccixJklRYhuMKaxWSJEmFZziusFYhSZJUeIbjCmsVkiRJhWc4rrBWIUmSVHiG4wprFZIkSYVnOK6wViFJklR4huMKaxWSJEmFZziusFYhSZJUeIbjCmsVkiRJhWc4rrBWIUmSVHiG4wprFZIkSYVnOK6wViFJklR4huMKaxWSJEmFZziusFYhSZJUeIbjCmsVkiRJhWc4rrBWIUmSVHiG4wprFZIkSYVnOK6wViFJklR4NQvHEdEcET+LiIci4rGI+ESt5rVDWKuQJEkqvIYaTrsbOCaltDEiGoF7IuL7KaV7azjPbWetQpIkqfBqFo5TSgnYmJ9tzA+pVvPbbtYqJEmSCq+mneOIKEfEg8BLwB0ppfuGuM35EbE8IpavWbOmlsMZmbUKSZKkwqtpOE4p9aWUFgF7AgdHxIIhbnN1SmlJSmnJ7NmzazmckVmrkCRJKrwJ2VtFSmkdcCdw/ETMb5tYq5AkSSq8Wu6tYnZEzMxPTwXeATxZq/ltN2sVkiRJhVfLvVXsDnw1IspkIfzGlNK/13B+28dahSRJUuHVcm8VDwOLazX9Hc5ahSRJUuH5C3kV1iokSZIKz3BcYa1CkiSp8AzHFdYqJEmSCs9wXGGtQpIkqfAMxxXWKiRJkgrPcFxhrUKSJKnwDMcV1iokSZIKz3BcYa1CkiSp8AzHFdYqJEmSCs9wXGGtQpIkqfAMxxXWKiRJkgrPcFxhrUKSJKnwDMcV1iokSZIKz3BcYa1CkiSp8AzHFdYqJEmSCs9wXGGtQpIkqfAMxxXWKiRJkgrPcFxhrUKSJKnwDMcV1iokSZIKz3BcYa1CkiSp8AzHFdYqJEmSCs9wXGGtQpIkqfAMxxXWKiRJkgrPcFzhlmNJkqTCMxxXRGQHw7EkSVJhGY6rlUrWKiRJkgrMcFytXHbLsSRJUoEZjquVSoZjSZKkAjMcV7NWIUmSVGiG42rWKiRJkgrNcFzNWoUkSVKhGY6rWauQJEkqNMNxNWsVkiRJhWY4rmatQpIkqdAMx9WsVUiSJBWa4biatQpJkqRCMxxXs1YhSZJUaIbjatYqJEmSCs1wXM1ahSRJUqEZjqtZq5AkSSo0w3E1axWSJEmFZjiuZq1CkiSp0AzH1axVSJIkFZrhuJq1CkmSpEIzHFezViFJklRohuNq1iokSZIKzXBczVqFJElSoRmOq1mrkCRJKjTDcTVrFZIkSYVmOK5mrUKSJKnQDMfVrFVIkiQVmuG4mrUKSZKkQqtZOI6IvSLizoh4PCIei4gP12peO4y1CkmSpEJrqOG0e4H/kVJaERFtwAMRcUdK6fEaznP7WKuQJEkqtJptOU4pvZBSWpGfbgeeAPao1fx2CGsVkiRJhTYhneOImAssBu6biPltM2sVkiRJhVbzcBwR04BvARemlDYMcf35EbE8IpavWbOm1sMZmbUKSZKkQqtpOI6IRrJgfF1K6dtD3SaldHVKaUlKacns2bNrOZzRWauQJEkqtFrurSKALwFPpJSuqNV8dqiGBti8ud6jkCRJUp3UcsvxUuAPgWMi4sH8cGIN57f9Xvc6ePHFeo9CkiRJdVKzXbmllO4BolbTr4k99oCXX4bubmhqqvdoJEmSNMH8hbxqc+Zkxy+8UN9xSJIkqS4Mx9Uq4Xj16vqOQ5IkSXVhOK5mOJYkSSo0w3E1w7EkSVKhGY6r7borNDYajiVJkgrKcFwtItt6bDiWJEkqJMPxYIZjSZKkwjIcD2Y4liRJKizD8WCGY0mSpMIyHA82Zw6sXw8dHfUeiSRJkiaY4Xgwd+cmSZJUWIbjwQzHkiRJhWU4HmyPPbJjw7EkSVLhGI4Hc8uxJElSYRmOB5s+HVpaDMeSJEkFZDgezF/JkyRJKizD8VAMx5IkSYVkOB6K4ViSJKmQDMdDqYTjlOo9EkmSJE0gw/FQ5syBzk7YsKHeI5EkSdIEMhwPxd25SZIkFZLheCiGY0mSpEIyHA/FcCxJklRIhuOh7L57dvz88/UdhyRJkiaU4Xgo06Zlv5TnlmNJkqRCMRwPx30dS5IkFY7heDh77GE4liRJKhjD8XD22ANWrqz3KCRJkjSBCh+Or7oKrrxyiCsOOgheeAF+/esJH5MkSZLqo/Dh+PbbhwnHS5dmx//5nxM6HkmSJNVP4cPxkUfCM88Msde2Aw6A1lbDsSRJUoEUPhwfcUR2fPfdg65oaIBDDjEcS5IkFUjhw/GiRdDWBj/+8RBXLl0KDz0E7e0TPi5JkiRNvMKH43IZfu/3hthyDFk47u+H++6b8HFJkiRp4hU+HEPWO37iCXjppUFXHHooRFitkCRJKgjDMSP0jmfMgIULDceSJEkFYTgGliyBlpYRqhX33gt9fRM+LkmSJE0swzHQ2Aj/7b+N8KW89nZ45JEJH5ckSZImluE4d8QRWf797W8HXeGPgUiSJBWG4Th35JGQEtxzz6Ar9t4b5swxHEuSJBWA4Th38MHQ1DREtSIi23r84x9Db+1FA4wAABNcSURBVG9dxiZJkqSJYTjONTdnP4j3ox8NceWZZ8Lq1XDddRM+LkmSJE0cw3GV007LfhDvVd+9O/VUWLwYPvEJ6Ompy9gkSZJUe4bjKu99LzQ0wFe/OuiKCPhf/wt++Uv4ylfqMjZJkiTVnuG4yuzZ8M53wte/PkS9+IQT4LDD4G/+Brq66jI+SZIk1ZbheJBzzoEXX4Tbbx90RWXr8apVcPXV9RiaJEmSasxwPMgJJ8Buu8G11w5x5THHwNFHZ1uPn356oocmSZKkGjMcDzJlCpx9Nnzve/Dyy0Pc4LOfzbYiH3kkPPnkhI9PkiRJtWM4HsI558DmzXDDDUNc+ba3wZ13Qn8/HHUUPPbYBI9OkiRJtWI4HsKiRXDAAfD5z0Nn5xA3mD8f7roLSqXsd6e/8AV/IESSJOk1wHA8jL/5G3jiCXjPe4bZtfF++8Hdd8OCBXDBBVmivu227DeoJUmStFMyHA/jXe+Cq66CW2+FP/qjrEXxKm9+c7YF+Vvfgk2bsm/zLVwIn/scbNgw0UOWJEnSdqpZOI6IL0fESxHxaK3mUWt//MfZFuSvfx3OOy+rF79qw3BE9tN6jz8OX/4ytLTABz8Ir399Vrm4+GL4zney/cNJkiRpUotUoxpARBwBbAT+NaW0YCz3WbJkSVq+fHlNxrOtUoK/+iu4/PLs/JvfDEuXZhuK16/Pfg9k6tQsE0+ZAu3tsP75drpeeIVZm15g1/aVzErZbi9623ahd/Yb6G3bhb5pM+hrbaNl16lMn93M9BlBWxtMn54dKqfb2rKt1p2d2bzKZWhtzeZXOa6cLpfH//h6e7PaSHNzlvMlSZJe6yLigZTSkiGvq1U4zmc8F/j3nTkcV6xene3e7bvfhUce2RJem5uz0NrRke3hoq0NZszILl+3Dtau6ee3a/oo9fbQ0NdFQ88mGvq6KdNHiX46aWED02mnjbSdG/KnTMlCcmNjFqgHHyKycTU3Z7dfty4L85DdZ9as7DE1NWXnp0zJDpXTleNSKftg8MorWXtk1qxsQ/ns2dmHia4u6O7OpjNtWjYmyIJ49aGn59WXVS5vbMzuO21aNs+ILQcY/fRotyuXs+dp5sxsfBs2wG9/my2TiOxnxBsbtz6G7PGllD22qVOzQ+VDSUrZerBuXXbo78+ub27OTnd0ZIeIbL4zZmTT7uzMDr29W56f6kNjI/T1ZddXjqtP9/Vl825tzZZXS0s2nw0bYOPGbOyV57JUtYpVfxgql7NDQ8PWx0Nd1tCQzXfTpmzclePOzuy6170uWxeam7P1ZP36bDx9fdlyKJW2PP5p04Z+bNXn+/uzsTc1Zcc9PVuvYy0t2XLu7t6yjFPaMv5yOZtnuZwtj7Vrs0O5DLvumu3XvKlp6HGUSlue5ylTtn49VR7P4NdY9YflzZuzsW7e/OrH0N2dHZfL2XNcvb41NmbXbdqU3T+lrdft0Q4Vlbf3ynFjY7bcZ8zIxlJ57ob8XsUQ60n1dDdtypZ1V9eW5TvUulL9HFTG19mZrZ8bNmTLprLcp07dsm40NW15vY3lUP36rH7eyuV8o0W+MaOyMaG5OZt3d3d2+bp12Xvaxo3Z+jtnTrYud3dn42xvz567ymt606YtjyGlLY+1uXnLRovKYerU7PF1dGTT7+7ess6Uy1tu19CwZd0ql7PXR1tb9tiefz47bNgAu+ySrbczZmy5/eDXzFDvYZXXbk/P1ofKuj749o2N2fM11DIe6vRQl/X3b/08jfV89fo33Hv8jry8t3fL+lBZ9pW/Xe3t2WHz5q3fV6rX78r50iQsqzY2blnv+/u3rLfd3cM/54PXkc2bt6xXlTxQOZRK2fTWrcvW7+nTs/Vz5szstVX5W1R5bUZkx5XDLrtkOWKiGY4nm44OeOYZ+MUvsl/ce/550qrn6Xi5i/ZXetmwrp/29f1s2FhiQ0eZMr1MZRPNdNFHmU5a6KD11ccxjY6G6fSWmyiVS5QaglI5KAWUSon+KNOdmuiiiUSJmQ0bmdnYwZRyL+v72nilr40Nfa1sTo30pEY2pwZ6UkN2vr9h4HxvKjOjcRO7NHXQNqWLVza38mLXDNZ0tVGOfpobeplS7qO7r4GOniY29kzJAmepf6tDY7mfhuinodRHQ/RtdV1vKrNxcxPtm5vY3F/O3jjJ3tUSseXNNz89cN0opyv36+0v0dP36k3trU1ZSujtC3r6SvSnbXuna2rsoxSJrp4yKWVjaGzop7Wpl/4UbOhsfNV9yqV++von4Tvra1C5nPI/wv67RJLq6ZOfhL/+64mf70jhuGGiBzNYRJwPnA/wxje+sc6jmSCtrbD//tkhF8C0/LB79W37+7OPXJVNcNWHSteisnlp03roenHryyof9yqbuaqPKx/Th9oENvgw0m3K/TC1H6YMuk30Q0M/lKou66m6TfXmrIaG7OPk4HnV8MNbF02sYyYbmcYM1jOTdTR2b71Lvn6CXhrozV8qJbJvZm5mCp20sImp9Fdt8W+lg5mso7mnG4CU37ZEP429vdC7ZbrttNFDI6100EwX0Q+9lOmmiS6a2cRUumimh0Ya6KVMXz6SLacrxwAdtNJOG520MI2NTGcD09hIXz7NzUzZ6gNGRSLop0QvDfRRftXx4Mt6aaCBXlroZCqbtjruoZE1zOYlXscmpjKTdcxkHa105FPqo5cG1jNjYNk3DEx168dYOQSJzUyhmya6aWIKm2mmiya66aZp4HmYwmZa6aCVDoJEP6WB8VdOT2Mju7GWGX3r6afEOmaylt3YzJQhl3E/JTYxlU1MpYdGSvSPeOinRBfNdNJCN00000UzXTTSM/AYemikkR6msJlGegaWaw+NWx2qH2eJ/uzD3RgPQfa6qRxTKhOloDtNYX1fK+uZwWamDDxvjfRsuS1AZJt3Un78KhG00ElLKXt8KX+dDKwv0ZifLtGbsqWZUvbhtJ8SrXTQRjttaQPN0U05Zcu7i2bWpRm8wix6aMzGFLH1owuGfeT50Kqet2b6Upnppez10BzdbErNbGQam5iarVGxmeboZmasYxbraIlNrGE2q/vfwEtpNlOjK3stlTrppYHOlE23JbqYHu1Mjw2USAPPYXfa8t7QmabSSQudaSpl+phW6sxe75E9p6XI7rcpNdOZptITUwb+q9hLAxtTKxtTK4lgTvk37FH6DdNLG3mlfwZr0y5s6G+jVIKG6KOcb2goRxqYbm9k61JvaQo9pabstdvXTWPvpuwQPTTmGyT6o5zdjsYt62PKI0Jkf6Oql3H1OjawNXaI60v0D5wvRX82ncjegUr0b/Wcbrk+Oz+w8SNKA+9Vgw/jvjyyD8WDL2+IPppiM1NKvfRRpqOvmfb+bNNxW7mTtnInTbE5W79T/m6WSgPreV/KLu+nNOi1NOj186qX0+DrR/jA/qppjfLhPv/72ZMa6OhrpqOvmYjEjHIH0xs6aSptpjeV6enPNnr15BvAEtAYfTRGb3YoZadLkbbcrn/L7ftTML2hk5kNG2ktd7Ght4WXe2fwSu80ppY2M72hg2nlTZRI9BP0p1L2dydlf3sOKC0A3jXyY5lgbjlW/VT+VzzabQaH8eH+lzrcZWO5Ta2nNdb7jeV/l6NNf0ed3tb/ow73v/7xjmG8lw2+brjrq4+39bqxHtd7GpB92KwcIrK+QlNT9oG0+vU13Afg0cYy0vXV/8OuHA91WfVxterpbsv5Ikyj8hwOdZzSlue10rtoasq6H1OmZNMY/Pz39e3Ycdfy/GQaS63Pj+W2w3yQ3S5DrYc7+j7vfz+8733jn892mtRbjlVgY3nRRmwpdEmSJNVYLXfldj3wU2DfiFgVEe+v1bwkSZKkHaFmW45TSmfVatqSJElSLfjVeEmSJClnOJYkSZJyhmNJkiQpZziWJEmScoZjSZIkKWc4liRJknKGY0mSJClnOJYkSZJyhmNJkiQpZziWJEmScoZjSZIkKWc4liRJknKGY0mSJClnOJYkSZJyhmNJkiQpZziWJEmScpFSqvcYBkTEGuC5Osx6N2BtHea7s3J5jZ/LbHxcXuPnMhsfl9f4uczGx+U1fhO5zPZOKc0e6opJFY7rJSKWp5SW1HscOwuX1/i5zMbH5TV+LrPxcXmNn8tsfFxe4zdZlpm1CkmSJClnOJYkSZJyhuPM1fUewE7G5TV+LrPxcXmNn8tsfFxe4+cyGx+X1/hNimVm51iSJEnKueVYkiRJyhU+HEfE8RHxXxHxi4i4pN7jmWwiYq+IuDMiHo+IxyLiw/nlH4+I5yPiwfxwYr3HOllExMqIeCRfLsvzy3aJiDsi4un8eFa9xzlZRMS+VevRgxGxISIudB3bWkR8OSJeiohHqy4bcr2KzJX5+9rDEXFg/UZeH8Msr3+KiCfzZXJzRMzML58bEZuq1rUv1G/k9THM8hr2NRgRH8nXr/+KiP+vPqOur2GW2TerltfKiHgwv9x1bPg8Menexwpdq4iIMvAU8A5gFXA/cFZK6fG6DmwSiYjdgd1TSisiog14APh94HRgY0rp8roOcBKKiJXAkpTS2qrL/hH4bUrpU/mHsFkppYvrNcbJKn9NPg8cApyL69iAiDgC2Aj8a0ppQX7ZkOtVHmL+HDiRbFl+JqV0SL3GXg/DLK/jgP9IKfVGxD8A5MtrLvDvldsV0TDL6+MM8RqMiLcB1wMHA3OAHwJvTSn1Teig62yoZTbo+v8NrE8pfdJ1bMQ8cQ6T7H2s6FuODwZ+kVJ6NqW0GbgBOKXOY5pUUkovpJRW5KfbgSeAPeo7qp3SKcBX89NfJXtD0KsdCzyTUqrHjwFNaimlu4HfDrp4uPXqFLI/2CmldC8wM//DVBhDLa+U0g9SSr352XuBPSd8YJPUMOvXcE4BbkgpdaeUfgn8guzvaaGMtMwiIsg2Il0/oYOaxEbIE5Pufazo4XgP4NdV51dh8BtW/sl3MXBfftEH8391fNmawFYS8IOIeCAizs8ve31K6YX89G+A19dnaJPemWz9x8R1bGTDrVe+t43uj4DvV53fJyJ+HhE/jojD6zWoSWio16Dr1+gOB15MKT1ddZnrWG5Qnph072NFD8cao4iYBnwLuDCltAG4CvgdYBHwAvC/6zi8yeb3UkoHAicAf5b/621AyrpMxe0zDSMipgAnA/+WX+Q6Ng6uV2MXEf8T6AWuyy96AXhjSmkx8BfANyJier3GN4n4Gtx2Z7H1B33XsdwQeWLAZHkfK3o4fh7Yq+r8nvllqhIRjWQr8nUppW8DpJReTCn1pZT6gWso4L/UhpNSej4/fgm4mWzZvFj5d1B+/FL9RjhpnQCsSCm9CK5jYzTceuV72zAi4hzgncDZ+R9i8nrAy/npB4BngLfWbZCTxAivQdevEUREA3Aa8M3KZa5jmaHyBJPwfazo4fh+4C0RsU++1epM4Ht1HtOkkvemvgQ8kVK6oury6t7PqcCjg+9bRBHRmn/RgIhoBY4jWzbfA96X3+x9wHfrM8JJbastLa5jYzLcevU94L/n3/Y+lOxLQS8MNYEiiYjjgb8CTk4pdVZdPjv/MigR8SbgLcCz9Rnl5DHCa/B7wJkR0RQR+5Atr59N9PgmsbcDT6aUVlUucB0bPk8wCd/HGiZiJpNV/o3lDwK3A2Xgyymlx+o8rMlmKfCHwCOVXdIAHwXOiohFZP/+WAn8cX2GN+m8Hrg5ew+gAfhGSum2iLgfuDEi3g88R/ZFDeXyDxLvYOv16B9dx7aIiOuBo4DdImIVcBnwKYZer24l+4b3L4BOsj1/FMowy+sjQBNwR/4avTel9CfAEcAnI6IH6Af+JKU01i+nvSYMs7yOGuo1mFJ6LCJuBB4nq6f8WdH2VAFDL7OU0pd49XcnwHUMhs8Tk+59rNC7cpMkSZKqFb1WIUmSJA0wHEuSJEk5w7EkSZKUMxxLkiRJOcOxJEmSlDMcS9IkEhH/MyIey3+y98GIOCQiLoyIlnqPTZKKwF25SdIkERGHAVcAR6WUuiNiN2AK8P+AJSmltXUdoCQVgFuOJWny2B1Ym1LqBsjD8LuBOcCdEXEnQEQcFxE/jYgVEfFvETEtv3xlRPxjRDwSET+LiDfnl78nIh6NiIci4u76PDRJ2jm45ViSJok85N4DtAA/BL6ZUvpxRKwk33Kcb03+NnBCSqkjIi4GmlJKn8xvd01K6W8j4r8Dp6eU3hkRjwDHp5Sej4iZKaV1dXmAkrQTcMuxJE0SKaWNwEHA+cAa4JsRcc6gmx0KvA34z/wnWN8H7F11/fVVx4flp/8TuDYizgPKtRm9JL02NNR7AJKkLVJKfcBdwF35Ft/3DbpJAHeklM4abhKDT6eU/iQiDgFOAh6IiINSSi/v2JFL0muDW44laZKIiH0j4i1VFy0CngPagbb8snuBpVV94taIeGvVfc6oOv5pfpvfSSndl1K6lGyL9F41fBiStFNzy7EkTR7TgP8TETOBXuAXZBWLs4DbImJ1SunovGpxfUQ05ff7GPBUfnpWRDwMdOf3A/inPHQH8CPgoQl5NJK0E/ILeZL0GlH9xb16j0WSdlbWKiRJkqScW44lSZKknFuOJUmSpJzhWJIkScoZjiVJkqSc4ViSJEnKGY4lSZKknOFYkiRJyv3/+us9NpcB7f8AAAAASUVORK5CYII=\n","text/plain":["<Figure size 864x576 with 1 Axes>"]},"metadata":{"tags":[],"needs_background":"light"}}]},{"cell_type":"markdown","metadata":{"id":"LRDJM3ECArls"},"source":["## **4. 利用模型对用户未评分的电影进行预测，根据预测分值，对用户推荐他没看过的电影**"]},{"cell_type":"markdown","metadata":{"id":"T36nVcojeiEv"},"source":["### 4.1 定义预测函数"]},{"cell_type":"code","metadata":{"id":"fDzzf4JYAyQX","executionInfo":{"status":"ok","timestamp":1605472072694,"user_tz":-480,"elapsed":1356,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}}},"source":["#prediction\n","def Prediction(model, movies, user_id, nb_recommend):\n","    user_input = Variable(test_set[user_id - 1]).unsqueeze(0).to(device)\n","    predict_output, _, _ = model.forward(user_input)\n","    predict_output = predict_output.cpu().detach().numpy()\n","    user_input = user_input.cpu().detach().numpy()\n","    predicted_result = np.vstack([user_input, predict_output])\n","    recommend = np.array(predicted_result)\n","    trian_movie_id = np.array([i for i in range(1, nb_movies+1)])#create a temporary index for movies since we are going to delete some movies that the user had seen, 创建一个类似id的index，排序用\n","    recommend = np.row_stack((recommend, trian_movie_id))#insert that index into the result array, 把index插入结果\n","    recommend = recommend.T#transpose row and col 数组的行列倒置\n","    recommend = recommend.tolist()#tansfer into list for further process转化为list以便处理\n","\n","    movie_not_seen = []#delete the rows comtaining the movies that the user had seen 删除users看过的电影\n","    for i in range(len(recommend)):\n","        if recommend[i][0] == 0.0:\n","            movie_not_seen.append(recommend[i])\n","\n","    movie_not_seen = sorted(movie_not_seen, key=itemgetter(1), reverse=True)#sort the movies by mark 按照预测的分数降序排序\n","\n","    recommend_movie = []#create list for recommended movies with the index we created 推荐的top20\n","    for i in range(0, nb_recommend):\n","        recommend_movie.append(movie_not_seen[i][2])\n","\n","    recommend_index = []#get the real index in the original file of 'movies.dat' by using the temporary index这20部电影在原movies文件里面真正的index\n","    for i in range(len(recommend_movie)):\n","        recommend_index.append(movies[(movies.iloc[:,0]==recommend_movie[i])].index.tolist())\n","\n","    recommend_movie_name = []#get a list of movie names using the real index将对应的index输入并导出movie names\n","    for i in range(len(recommend_index)):\n","        np_movie = movies.iloc[recommend_index[i],1].values#transefer to np.array\n","        list_movie = np_movie.tolist()#transfer to list\n","        recommend_movie_name.append(list_movie)\n","\n","    print('Highly Recommended Moives for You:\\n')\n","    for i in range(len(recommend_movie_name)):\n","        print(str(recommend_movie_name[i]))\n","    \n","    return recommend_movie_name"],"execution_count":35,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"snJRCgx3emKd"},"source":["### 4.2 对指定用户，从他没看过的电影中推荐他最可能喜欢的几部电影"]},{"cell_type":"code","metadata":{"id":"LMC_L9axA4VQ","executionInfo":{"status":"ok","timestamp":1605472083401,"user_tz":-480,"elapsed":979,"user":{"displayName":"Yuxin JIANG","photoUrl":"","userId":"08237551511886881536"}},"outputId":"ce7b5bfc-95d8-46a1-9e3c-e19a6c374a8e","colab":{"base_uri":"https://localhost:8080/"}},"source":["# 指定用户的ID\n","user_id = 367\n","# 指定推荐的电影数目\n","nb_recommend = 20\n","# 读取原电影数据以便在推荐时查询\n","movies = pd.read_csv('ml-1m/movies.dat', sep = '::', header = None, engine = 'python', encoding = 'latin-1')\n","# 使用VAE模型进行预测推荐\n","movie_for_you = Prediction(model = vae, movies = movies, user_id = user_id, nb_recommend = nb_recommend)"],"execution_count":36,"outputs":[{"output_type":"stream","text":["Highly Recommended Moives for You:\n","\n","['Ulysses (Ulisse) (1954)']\n","['Song of Freedom (1936)']\n","['Paralyzing Fear: The Story of Polio in America, A (1998)']\n","['Gate of Heavenly Peace, The (1995)']\n","['Mamma Roma (1962)']\n","['Skipped Parts (2000)']\n","['Window to Paris (1994)']\n","['Lamerica (1994)']\n","['24 7: Twenty Four Seven (1997)']\n","['Lured (1947)']\n","['Smashing Time (1967)']\n","['Seven Chances (1925)']\n","['I Am Cuba (Soy Cuba/Ya Kuba) (1964)']\n","['Follow the Bitch (1998)']\n","['Baby, The (1973)']\n","['Firelight (1997)']\n","['Apple, The (Sib) (1998)']\n","['Eighth Day, The (Le Huitième jour ) (1996)']\n","['Time of the Gypsies (Dom za vesanje) (1989)']\n","['Message to Love: The Isle of Wight Festival (1996)']\n"],"name":"stdout"}]}]}