{"nbformat":4,"nbformat_minor":0,"metadata":{"kernelspec":{"display_name":"Python 3.8.5 64-bit ('myenv': conda)","language":"python","name":"python_defaultSpec_1598967474157"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.8.5-final"},"colab":{"name":"seq2seq-Train.ipynb","provenance":[],"collapsed_sections":["YPkpBrj7bgFd","Wp-kdiUTbgFe"],"toc_visible":true},"accelerator":"GPU"},"cells":[{"cell_type":"code","metadata":{"id":"ZRRxzlqKboUx","colab_type":"code","outputId":"7038003e-8170-482e-cec5-8d2e2bec7f9e","executionInfo":{"status":"ok","timestamp":1591350342864,"user_tz":-480,"elapsed":2664,"user":{"displayName":"罗杰","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14Gg-a_YBdWbEZ2-QLI4_OGvTo7Epwx70DhGLvFws=s64","userId":"01455695120655509307"}},"colab":{"base_uri":"https://localhost:8080/","height":34}},"source":["import tensorflow as tf\n","tf.__version__"],"execution_count":1,"outputs":[{"output_type":"execute_result","data":{"text/plain":"'2.3.0'"},"metadata":{},"execution_count":1}]},{"cell_type":"code","metadata":{"id":"EOxuGQtG3J-W","colab_type":"code","outputId":"3213ef20-e7e5-4adc-b1ae-d6da87d33ba7","executionInfo":{"status":"ok","timestamp":1591350344898,"user_tz":-480,"elapsed":4514,"user":{"displayName":"罗杰","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14Gg-a_YBdWbEZ2-QLI4_OGvTo7Epwx70DhGLvFws=s64","userId":"01455695120655509307"}},"colab":{"base_uri":"https://localhost:8080/","height":306},"tags":[]},"source":["!nvidia-smi"],"execution_count":2,"outputs":[{"output_type":"stream","name":"stdout","text":"Tue Sep 01 21:38:04 2020       \n+-----------------------------------------------------------------------------+\n| NVIDIA-SMI 452.06       Driver Version: 452.06       CUDA Version: 11.0     |\n|-------------------------------+----------------------+----------------------+\n| GPU  Name            TCC/WDDM | Bus-Id        Disp.A | Volatile Uncorr. ECC |\n| Fan  Temp  Perf  Pwr:Usage/Cap|         Memory-Usage | GPU-Util  Compute M. |\n|===============================+======================+======================|\n|   0  GeForce RTX 2060   WDDM  | 00000000:01:00.0  On |                  N/A |\n| 36%   41C    P8    15W / 170W |   1353MiB /  6144MiB |     10%      Default |\n+-------------------------------+----------------------+----------------------+\n                                                                               \n+-----------------------------------------------------------------------------+\n| Processes:                                                                  |\n|  GPU   GI   CI        PID   Type   Process name                  GPU Memory |\n|        ID   ID                                                   Usage      |\n|=============================================================================|\n|    0   N/A  N/A      1204    C+G   Insufficient Permissions        N/A      |\n|    0   N/A  N/A      1376    C+G   ...cw5n1h2txyewy\\LockApp.exe    N/A      |\n|    0   N/A  N/A      2984    C+G   ...y\\ShellExperienceHost.exe    N/A      |\n|    0   N/A  N/A      5040    C+G   ...cent\\WeChat\\WeChatApp.exe    N/A      |\n|    0   N/A  N/A      6580    C+G   C:\\Windows\\explorer.exe         N/A      |\n|    0   N/A  N/A      7216    C+G   ...w5n1h2txyewy\\SearchUI.exe    N/A      |\n|    0   N/A  N/A      7240    C+G   ...sk\\BaiduNetdiskRender.exe    N/A      |\n|    0   N/A  N/A      8828    C+G   ...ekyb3d8bbwe\\YourPhone.exe    N/A      |\n|    0   N/A  N/A      9168    C+G   ...es.TextInput.InputApp.exe    N/A      |\n|    0   N/A  N/A     10964    C+G   ...perience\\NVIDIA Share.exe    N/A      |\n|    0   N/A  N/A     15708    C+G   ...in\\TBC\\xlbrowsershell.exe    N/A      |\n|    0   N/A  N/A     20144    C+G   ...icrosoft VS Code\\Code.exe    N/A      |\n|    0   N/A  N/A     21336    C+G   ...me\\Application\\chrome.exe    N/A      |\n|    0   N/A  N/A     22680    C+G   ...cent\\WeChat\\wechatweb.exe    N/A      |\n|    0   N/A  N/A     26952    C+G   ...f.win7\\steamwebhelper.exe    N/A      |\n|    0   N/A  N/A     27428    C+G   D:\\Captura\\captura.exe          N/A      |\n+-----------------------------------------------------------------------------+\n"}]},{"cell_type":"code","metadata":{"id":"jnTVMG743zuk","colab_type":"code","outputId":"4cfec163-ea91-4455-a0e3-bb9930c91bdf","executionInfo":{"status":"ok","timestamp":1591350367060,"user_tz":-480,"elapsed":20286,"user":{"displayName":"罗杰","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14Gg-a_YBdWbEZ2-QLI4_OGvTo7Epwx70DhGLvFws=s64","userId":"01455695120655509307"}},"colab":{"base_uri":"https://localhost:8080/","height":122},"tags":[]},"source":["!import google.colab\n","from google.colab import drive\n","drive.mount('/content/drive')"],"execution_count":7,"outputs":[{"output_type":"stream","name":"stderr","text":"'import' �����ڲ����ⲿ���Ҳ���ǿ����еĳ���\n���������ļ���\n"},{"output_type":"error","ename":"ModuleNotFoundError","evalue":"No module named 'google.colab'","traceback":["\u001b[1;31m---------------------------------------------------------------------------\u001b[0m","\u001b[1;31mModuleNotFoundError\u001b[0m                       Traceback (most recent call last)","\u001b[1;32m<ipython-input-7-ca83b1f08f28>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m      1\u001b[0m \u001b[0mget_ipython\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msystem\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'import google.colab'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 2\u001b[1;33m \u001b[1;32mfrom\u001b[0m \u001b[0mgoogle\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mcolab\u001b[0m \u001b[1;32mimport\u001b[0m \u001b[0mdrive\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m      3\u001b[0m \u001b[0mdrive\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mmount\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'/content/drive'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n","\u001b[1;31mModuleNotFoundError\u001b[0m: No module named 'google.colab'"]}]},{"cell_type":"markdown","metadata":{"id":"liBgwP3_bgEN","colab_type":"text"},"source":["# Encoder And Decoder Model"]},{"cell_type":"markdown","metadata":{"id":"GZqjMB11bgEN","colab_type":"text"},"source":["# 0. GPU测试"]},{"cell_type":"code","metadata":{"id":"yFNzsq_VbgEO","colab_type":"code","colab":{},"tags":[]},"source":["%load_ext autoreload\n","%autoreload 2"],"execution_count":3,"outputs":[]},{"cell_type":"code","metadata":{"id":"6a-etFKdd2ke","colab_type":"code","colab":{}},"source":["# ! unzip drive/'My Drive'/kaikeba/project01/lecture01_code.zip"],"execution_count":9,"outputs":[]},{"cell_type":"code","metadata":{"id":"eMdU5w9abgES","colab_type":"code","outputId":"53043dae-6efb-495d-8a07-abb334c841eb","executionInfo":{"status":"ok","timestamp":1591350381199,"user_tz":-480,"elapsed":13826,"user":{"displayName":"罗杰","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14Gg-a_YBdWbEZ2-QLI4_OGvTo7Epwx70DhGLvFws=s64","userId":"01455695120655509307"}},"colab":{"base_uri":"https://localhost:8080/","height":170},"tags":[]},"source":["import warnings\n","warnings.filterwarnings(\"ignore\")\n","import sys\n","sys.path.append(r'C:\\Users\\Administrator.DESKTOP-BN41LK7\\Desktop\\lecture01_1_2\\code')\n","import pandas as pd\n","import numpy as np\n","import matplotlib.pyplot as plt\n","import matplotlib.ticker as ticker\n","from utils.data_loader import build_dataset,load_dataset,preprocess_sentence,load_test_dataset\n","from utils.wv_loader import load_embedding_matrix,Vocab\n","from utils.config import *\n","from gensim.models.word2vec import LineSentence, Word2Vec\n","from utils.gpu_utils import config_gpu\n","config_gpu()\n","import tensorflow as tf\n","from utils.plot_utils import plot_attention\n","from tqdm import tqdm\n","import time\n","from seq2seq_tf2.seq2seq_batcher import train_batch_generator"],"execution_count":4,"outputs":[{"output_type":"stream","name":"stdout","text":"C:\\Users\\Administrator.DESKTOP-BN41LK7\\Desktop\\lecture01_1_2\\code\nBuilding prefix dict from the default dictionary ...\n2020-09-01 21:39:07,638 : DEBUG : Building prefix dict from the default dictionary ...\nLoading model from cache C:\\Users\\ADMINI~1.DES\\AppData\\Local\\Temp\\jieba.cache\n2020-09-01 21:39:07,639 : DEBUG : Loading model from cache C:\\Users\\ADMINI~1.DES\\AppData\\Local\\Temp\\jieba.cache\nLoading model cost 0.530 seconds.\n2020-09-01 21:39:08,169 : DEBUG : Loading model cost 0.530 seconds.\nPrefix dict has been built successfully.\n2020-09-01 21:39:08,170 : DEBUG : Prefix dict has been built successfully.\n"}]},{"cell_type":"code","execution_count":14,"metadata":{"colab_type":"text","id":"Y3k4CQKzbgEV"},"outputs":[],"source":["# 0. 预处理数据"]},{"cell_type":"code","metadata":{"id":"zuLR2LHQfXCv","colab_type":"code","colab":{},"tags":["outputPrepend"]},"source":["# %%time\n","build_dataset(train_data_path,test_data_path)"],"execution_count":12,"outputs":[{"output_type":"stream","name":"stdout","text":"     大众      朗逸   \n19998  Q19999       金杯      雷龙   \n19999  Q20000     东风小康    东风风光   \n\n                                                Question  \\\n0                                             帕萨特 烧 机油 ？   \n1                                             修 一下 钱 换 修   \n2             帕萨特 领域 喇叭 坏 店里 说 方向盘 里线 坏 换 一根 两三百 不 感觉 太贵   \n3                                          发动机 漏气 会 征兆 ？   \n4      请问 那天 右后 胎扎 订 ， 补 胎后 跑 高速 80 有点 抖 ， 110 时速 以上 ...   \n...                                                  ...   \n19995  路虎 极光 2.0 t 发动机 ， 进气 链轮 损坏 更换 进气 链轮 后 发动 一会儿 自...   \n19996  别克 英朗XT 别撞 后 ， 发现 左 雾灯 下雨 后 起雾 ， 更换 新 原厂 雾灯 需要...   \n19997         师傅 ， 想 问 下车 一周 开 两次 周一 开 过来 周五 开回去 对车 影响 ？   \n19998  你好 大师 ， 车前 分泵 ， 制动 钳 导管 之间 晃动 ， 颠簸 路段 当当响 ， 应该 处理   \n19999  大师 好 ， 东风风光330 开车 下坡 时 没有 踩 刹车 四档 不 小心 挂到 一档 响...   \n\n                                                Dialogue  \n0      技师说 你好 ， 请问 车 跑 公里 ， 保修期 内 ， 当地 4 店 里面 进行 检查 维...  \n1      技师说 你好 师傅 ！ 抛光 处理 一下 好 ！ 50 元 左右 好 ， 希望 能够 帮到 ...  \n2                        技师说 你好 ， 气囊 油丝坏 ， 价格 不 贵 。 更换 。  \n3      技师说 你好 ！ 发动机 没力 ， 伴有 “ 啪啪 ” 漏气 声音 。 二 发动机 没力 ，...  \n4      技师说 你好 师傅 ！ 可能 前轮 平衡 快 脱落 不 平衡 造成 ！ 建议 前轮 做 一下...  \n...                                                  ...  \n19995  技师说 主要 检查 正时 专用工具 车主说 ， 专用工具 ， 车主说 两大 疑问 技师说 车...  \n19996  技师说 你好 ， 原厂 件 厂家 合作 汽配 厂 生产 。 都 品牌 。 想要 完全 只能 ...  \n19997  技师说 你好 。 车 不要 经常 停放 。 超过 一周 需要 电瓶 负极 线 拆掉 避免 电...  \n19998  技师说 你好 ， 最 简单 办法 中间 垫点 薄 铁皮 。 祝您 用车 愉快 ！ 车主说 导...  \n19999  技师说 你好 ， 现在 开着 没有 异常 没事 ， 这种 情况 尽量 避免 ， 容易 打坏 ...  \n\n[20000 rows x 5 columns]\n------------------------------------\ntrain data size 82873,test data size 20000,merged_df data size 102873\n2020-07-29 22:38:49,302 : INFO : collecting all words and their counts\n2020-07-29 22:38:49,306 : INFO : PROGRESS: at sentence #0, processed 0 words, keeping 0 word types\nstart build w2v model\n2020-07-29 22:38:49,554 : INFO : PROGRESS: at sentence #10000, processed 1276705 words, keeping 34209 word types\n2020-07-29 22:38:49,809 : INFO : PROGRESS: at sentence #20000, processed 2572180 words, keeping 49557 word types\n2020-07-29 22:38:50,062 : INFO : PROGRESS: at sentence #30000, processed 3842841 words, keeping 60845 word types\n2020-07-29 22:38:50,303 : INFO : PROGRESS: at sentence #40000, processed 5078847 words, keeping 70288 word types\n2020-07-29 22:38:50,565 : INFO : PROGRESS: at sentence #50000, processed 6389788 words, keeping 78649 word types\n2020-07-29 22:38:50,838 : INFO : PROGRESS: at sentence #60000, processed 7783786 words, keeping 87077 word types\n2020-07-29 22:38:51,123 : INFO : PROGRESS: at sentence #70000, processed 9207801 words, keeping 95322 word types\n2020-07-29 22:38:51,380 : INFO : PROGRESS: at sentence #80000, processed 10475253 words, keeping 102247 word types\n2020-07-29 22:38:51,616 : INFO : PROGRESS: at sentence #90000, processed 11633277 words, keeping 109005 word types\n2020-07-29 22:38:51,851 : INFO : PROGRESS: at sentence #100000, processed 12792682 words, keeping 114974 word types\n2020-07-29 22:38:51,922 : INFO : collected 116674 word types from a corpus of 13134376 raw words and 102873 sentences\n2020-07-29 22:38:51,922 : INFO : Loading a fresh vocabulary\n2020-07-29 22:38:52,021 : INFO : effective_min_count=5 retains 31851 unique words (27% of original 116674, drops 84823)\n2020-07-29 22:38:52,022 : INFO : effective_min_count=5 leaves 13003784 word corpus (99% of original 13134376, drops 130592)\n2020-07-29 22:38:52,087 : INFO : deleting the raw counts dictionary of 116674 items\n2020-07-29 22:38:52,090 : INFO : sample=0.001 downsamples 39 most-common words\n2020-07-29 22:38:52,091 : INFO : downsampling leaves estimated 9644326 word corpus (74.2% of prior 13003784)\n2020-07-29 22:38:52,154 : INFO : estimated required memory for 31851 words and 300 dimensions: 92367900 bytes\n2020-07-29 22:38:52,155 : INFO : resetting layer weights\n2020-07-29 22:38:56,374 : INFO : training model with 8 workers on 31851 vocabulary and 300 features, using sg=1 hs=0 sample=0.001 negative=5 window=5\n2020-07-29 22:38:57,384 : INFO : EPOCH 1 - PROGRESS: at 6.12% examples, 589557 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:38:58,386 : INFO : EPOCH 1 - PROGRESS: at 12.74% examples, 607849 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:38:59,402 : INFO : EPOCH 1 - PROGRESS: at 19.16% examples, 611473 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:00,416 : INFO : EPOCH 1 - PROGRESS: at 25.06% examples, 600490 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:01,420 : INFO : EPOCH 1 - PROGRESS: at 31.34% examples, 599858 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:02,455 : INFO : EPOCH 1 - PROGRESS: at 37.03% examples, 582624 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:03,476 : INFO : EPOCH 1 - PROGRESS: at 42.17% examples, 568492 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:04,482 : INFO : EPOCH 1 - PROGRESS: at 47.15% examples, 559601 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:05,494 : INFO : EPOCH 1 - PROGRESS: at 51.83% examples, 550075 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:06,498 : INFO : EPOCH 1 - PROGRESS: at 56.37% examples, 542989 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:07,518 : INFO : EPOCH 1 - PROGRESS: at 60.96% examples, 537597 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:08,541 : INFO : EPOCH 1 - PROGRESS: at 65.64% examples, 533182 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:09,571 : INFO : EPOCH 1 - PROGRESS: at 70.39% examples, 529474 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:10,577 : INFO : EPOCH 1 - PROGRESS: at 75.50% examples, 526857 words/s, in_qsize 12, out_qsize 0\n2020-07-29 22:39:11,588 : INFO : EPOCH 1 - PROGRESS: at 80.60% examples, 522923 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:12,607 : INFO : EPOCH 1 - PROGRESS: at 86.49% examples, 520745 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:13,646 : INFO : EPOCH 1 - PROGRESS: at 91.73% examples, 518338 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:14,647 : INFO : EPOCH 1 - PROGRESS: at 97.90% examples, 517570 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:14,962 : INFO : worker thread finished; awaiting finish of 7 more threads\n2020-07-29 22:39:14,970 : INFO : worker thread finished; awaiting finish of 6 more threads\n2020-07-29 22:39:14,981 : INFO : worker thread finished; awaiting finish of 5 more threads\n2020-07-29 22:39:14,982 : INFO : worker thread finished; awaiting finish of 4 more threads\n2020-07-29 22:39:14,994 : INFO : worker thread finished; awaiting finish of 3 more threads\n2020-07-29 22:39:14,996 : INFO : worker thread finished; awaiting finish of 2 more threads\n2020-07-29 22:39:15,007 : INFO : worker thread finished; awaiting finish of 1 more threads\n2020-07-29 22:39:15,022 : INFO : worker thread finished; awaiting finish of 0 more threads\n2020-07-29 22:39:15,023 : INFO : EPOCH - 1 : training on 13134376 raw words (9643455 effective words) took 18.6s, 517252 effective words/s\n2020-07-29 22:39:16,035 : INFO : EPOCH 2 - PROGRESS: at 4.73% examples, 451018 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:17,050 : INFO : EPOCH 2 - PROGRESS: at 9.80% examples, 463174 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:18,052 : INFO : EPOCH 2 - PROGRESS: at 14.81% examples, 470156 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:19,069 : INFO : EPOCH 2 - PROGRESS: at 19.73% examples, 471425 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:20,070 : INFO : EPOCH 2 - PROGRESS: at 24.71% examples, 474671 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:21,078 : INFO : EPOCH 2 - PROGRESS: at 29.84% examples, 475700 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:22,079 : INFO : EPOCH 2 - PROGRESS: at 34.96% examples, 474942 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:23,082 : INFO : EPOCH 2 - PROGRESS: at 40.20% examples, 477228 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:24,088 : INFO : EPOCH 2 - PROGRESS: at 45.19% examples, 478019 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:25,101 : INFO : EPOCH 2 - PROGRESS: at 49.95% examples, 478937 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:26,109 : INFO : EPOCH 2 - PROGRESS: at 54.66% examples, 478651 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:27,118 : INFO : EPOCH 2 - PROGRESS: at 59.20% examples, 479582 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:28,124 : INFO : EPOCH 2 - PROGRESS: at 63.74% examples, 479507 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:29,149 : INFO : EPOCH 2 - PROGRESS: at 68.16% examples, 479193 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:30,163 : INFO : EPOCH 2 - PROGRESS: at 73.17% examples, 479776 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:31,165 : INFO : EPOCH 2 - PROGRESS: at 78.46% examples, 480204 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:32,189 : INFO : EPOCH 2 - PROGRESS: at 84.09% examples, 480091 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:33,242 : INFO : EPOCH 2 - PROGRESS: at 89.46% examples, 480158 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:34,252 : INFO : EPOCH 2 - PROGRESS: at 95.48% examples, 480919 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:34,965 : INFO : worker thread finished; awaiting finish of 7 more threads\n2020-07-29 22:39:34,971 : INFO : worker thread finished; awaiting finish of 6 more threads\n2020-07-29 22:39:34,975 : INFO : worker thread finished; awaiting finish of 5 more threads\n2020-07-29 22:39:34,980 : INFO : worker thread finished; awaiting finish of 4 more threads\n2020-07-29 22:39:34,987 : INFO : worker thread finished; awaiting finish of 3 more threads\n2020-07-29 22:39:34,989 : INFO : worker thread finished; awaiting finish of 2 more threads\n2020-07-29 22:39:34,996 : INFO : worker thread finished; awaiting finish of 1 more threads\n2020-07-29 22:39:35,020 : INFO : worker thread finished; awaiting finish of 0 more threads\n2020-07-29 22:39:35,021 : INFO : EPOCH - 2 : training on 13134376 raw words (9644132 effective words) took 20.0s, 482313 effective words/s\n2020-07-29 22:39:36,024 : INFO : EPOCH 3 - PROGRESS: at 4.79% examples, 462437 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:37,041 : INFO : EPOCH 3 - PROGRESS: at 10.03% examples, 475518 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:38,045 : INFO : EPOCH 3 - PROGRESS: at 15.09% examples, 480509 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:39,051 : INFO : EPOCH 3 - PROGRESS: at 20.14% examples, 484155 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:40,056 : INFO : EPOCH 3 - PROGRESS: at 25.20% examples, 484525 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:41,057 : INFO : EPOCH 3 - PROGRESS: at 30.41% examples, 485730 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:42,064 : INFO : EPOCH 3 - PROGRESS: at 35.56% examples, 484108 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:43,099 : INFO : EPOCH 3 - PROGRESS: at 40.95% examples, 484255 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:44,113 : INFO : EPOCH 3 - PROGRESS: at 45.91% examples, 484680 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:45,179 : INFO : EPOCH 3 - PROGRESS: at 50.67% examples, 482411 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:46,215 : INFO : EPOCH 3 - PROGRESS: at 55.45% examples, 481933 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:47,224 : INFO : EPOCH 3 - PROGRESS: at 59.74% examples, 480158 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:48,241 : INFO : EPOCH 3 - PROGRESS: at 64.23% examples, 479016 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:49,253 : INFO : EPOCH 3 - PROGRESS: at 68.69% examples, 479206 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:50,269 : INFO : EPOCH 3 - PROGRESS: at 73.48% examples, 478238 words/s, in_qsize 12, out_qsize 0\n2020-07-29 22:39:51,301 : INFO : EPOCH 3 - PROGRESS: at 78.64% examples, 477495 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:52,330 : INFO : EPOCH 3 - PROGRESS: at 84.19% examples, 476557 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:53,344 : INFO : EPOCH 3 - PROGRESS: at 89.39% examples, 477042 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:54,357 : INFO : EPOCH 3 - PROGRESS: at 94.96% examples, 476408 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:55,164 : INFO : worker thread finished; awaiting finish of 7 more threads\n2020-07-29 22:39:55,171 : INFO : worker thread finished; awaiting finish of 6 more threads\n2020-07-29 22:39:55,175 : INFO : worker thread finished; awaiting finish of 5 more threads\n2020-07-29 22:39:55,193 : INFO : worker thread finished; awaiting finish of 4 more threads\n2020-07-29 22:39:55,198 : INFO : worker thread finished; awaiting finish of 3 more threads\n2020-07-29 22:39:55,210 : INFO : worker thread finished; awaiting finish of 2 more threads\n2020-07-29 22:39:55,216 : INFO : worker thread finished; awaiting finish of 1 more threads\n2020-07-29 22:39:55,226 : INFO : worker thread finished; awaiting finish of 0 more threads\n2020-07-29 22:39:55,227 : INFO : EPOCH - 3 : training on 13134376 raw words (9644056 effective words) took 20.2s, 477340 effective words/s\n2020-07-29 22:39:56,235 : INFO : EPOCH 4 - PROGRESS: at 4.45% examples, 424635 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:57,293 : INFO : EPOCH 4 - PROGRESS: at 9.36% examples, 433625 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:58,319 : INFO : EPOCH 4 - PROGRESS: at 14.29% examples, 444260 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:39:59,324 : INFO : EPOCH 4 - PROGRESS: at 18.95% examples, 445928 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:00,328 : INFO : EPOCH 4 - PROGRESS: at 23.49% examples, 445405 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:01,339 : INFO : EPOCH 4 - PROGRESS: at 28.26% examples, 446470 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:02,343 : INFO : EPOCH 4 - PROGRESS: at 33.04% examples, 446441 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:03,371 : INFO : EPOCH 4 - PROGRESS: at 38.07% examples, 447316 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:04,387 : INFO : EPOCH 4 - PROGRESS: at 42.77% examples, 446920 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:05,398 : INFO : EPOCH 4 - PROGRESS: at 47.23% examples, 446680 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:06,414 : INFO : EPOCH 4 - PROGRESS: at 51.68% examples, 447029 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:07,417 : INFO : EPOCH 4 - PROGRESS: at 56.03% examples, 447842 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:08,430 : INFO : EPOCH 4 - PROGRESS: at 60.22% examples, 447568 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:09,442 : INFO : EPOCH 4 - PROGRESS: at 64.52% examples, 447538 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:10,476 : INFO : EPOCH 4 - PROGRESS: at 68.68% examples, 447227 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:11,480 : INFO : EPOCH 4 - PROGRESS: at 73.33% examples, 447726 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:12,495 : INFO : EPOCH 4 - PROGRESS: at 78.31% examples, 448021 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:13,500 : INFO : EPOCH 4 - PROGRESS: at 83.64% examples, 449021 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:14,532 : INFO : EPOCH 4 - PROGRESS: at 88.73% examples, 449098 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:15,565 : INFO : EPOCH 4 - PROGRESS: at 93.92% examples, 448641 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:16,571 : INFO : EPOCH 4 - PROGRESS: at 99.36% examples, 449159 words/s, in_qsize 9, out_qsize 0\n2020-07-29 22:40:16,604 : INFO : worker thread finished; awaiting finish of 7 more threads\n2020-07-29 22:40:16,607 : INFO : worker thread finished; awaiting finish of 6 more threads\n2020-07-29 22:40:16,630 : INFO : worker thread finished; awaiting finish of 5 more threads\n2020-07-29 22:40:16,634 : INFO : worker thread finished; awaiting finish of 4 more threads\n2020-07-29 22:40:16,636 : INFO : worker thread finished; awaiting finish of 3 more threads\n2020-07-29 22:40:16,651 : INFO : worker thread finished; awaiting finish of 2 more threads\n2020-07-29 22:40:16,659 : INFO : worker thread finished; awaiting finish of 1 more threads\n2020-07-29 22:40:16,662 : INFO : worker thread finished; awaiting finish of 0 more threads\n2020-07-29 22:40:16,662 : INFO : EPOCH - 4 : training on 13134376 raw words (9643875 effective words) took 21.4s, 449961 effective words/s\n2020-07-29 22:40:17,669 : INFO : EPOCH 5 - PROGRESS: at 4.52% examples, 432198 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:18,673 : INFO : EPOCH 5 - PROGRESS: at 9.35% examples, 445196 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:19,694 : INFO : EPOCH 5 - PROGRESS: at 14.15% examples, 448032 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:20,702 : INFO : EPOCH 5 - PROGRESS: at 19.02% examples, 454016 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:21,732 : INFO : EPOCH 5 - PROGRESS: at 23.76% examples, 453876 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:22,733 : INFO : EPOCH 5 - PROGRESS: at 28.75% examples, 456592 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:23,741 : INFO : EPOCH 5 - PROGRESS: at 33.69% examples, 456997 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:24,744 : INFO : EPOCH 5 - PROGRESS: at 38.60% examples, 457149 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:25,744 : INFO : EPOCH 5 - PROGRESS: at 43.49% examples, 458067 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:26,756 : INFO : EPOCH 5 - PROGRESS: at 48.01% examples, 458058 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:27,783 : INFO : EPOCH 5 - PROGRESS: at 52.55% examples, 457589 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:28,799 : INFO : EPOCH 5 - PROGRESS: at 56.88% examples, 457616 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:29,817 : INFO : EPOCH 5 - PROGRESS: at 61.18% examples, 457677 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:30,823 : INFO : EPOCH 5 - PROGRESS: at 65.64% examples, 458088 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:31,841 : INFO : EPOCH 5 - PROGRESS: at 69.99% examples, 457968 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:32,862 : INFO : EPOCH 5 - PROGRESS: at 74.85% examples, 458295 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:33,880 : INFO : EPOCH 5 - PROGRESS: at 79.87% examples, 458269 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:34,900 : INFO : EPOCH 5 - PROGRESS: at 85.42% examples, 458320 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:35,932 : INFO : EPOCH 5 - PROGRESS: at 90.22% examples, 458177 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:36,999 : INFO : EPOCH 5 - PROGRESS: at 95.93% examples, 456565 words/s, in_qsize 15, out_qsize 0\n2020-07-29 22:40:37,682 : INFO : worker thread finished; awaiting finish of 7 more threads\n2020-07-29 22:40:37,698 : INFO : worker thread finished; awaiting finish of 6 more threads\n2020-07-29 22:40:37,722 : INFO : worker thread finished; awaiting finish of 5 more threads\n2020-07-29 22:40:37,735 : INFO : worker thread finished; awaiting finish of 4 more threads\n2020-07-29 22:40:37,744 : INFO : worker thread finished; awaiting finish of 3 more threads\n2020-07-29 22:40:37,751 : INFO : worker thread finished; awaiting finish of 2 more threads\n2020-07-29 22:40:37,761 : INFO : worker thread finished; awaiting finish of 1 more threads\n2020-07-29 22:40:37,777 : INFO : worker thread finished; awaiting finish of 0 more threads\n2020-07-29 22:40:37,778 : INFO : EPOCH - 5 : training on 13134376 raw words (9645504 effective words) took 21.1s, 456830 effective words/s\n2020-07-29 22:40:37,778 : INFO : training on a 65671880 raw words (48221022 effective words) took 101.4s, 475530 effective words/s\n2020-07-29 22:40:45,386 : INFO : saving Word2Vec object under C:\\Users\\Administrator.DESKTOP-HN1J6IE\\Desktop\\lecture01_1_2\\code\\data\\wv\\word2vec.model, separately None\n2020-07-29 22:40:45,387 : INFO : not storing attribute vectors_norm\n2020-07-29 22:40:45,387 : INFO : not storing attribute cum_table\n2020-07-29 22:40:45,909 : INFO : saved C:\\Users\\Administrator.DESKTOP-HN1J6IE\\Desktop\\lecture01_1_2\\code\\data\\wv\\word2vec.model\nfinish retrain w2v model\nfinal w2v_model has vocabulary of  31851\n"},{"output_type":"execute_result","data":{"text/plain":"(array([[    2,   419,   907, ...,     0,     0,     0],\n        [    2,   817,     1, ...,     0,     0,     0],\n        [    2,  1400,    90, ...,     0,     0,     0],\n        ...,\n        [    2,   228,   897, ...,     0,     0,     0],\n        [    2, 12701,  3153, ...,     0,     0,     0],\n        [    2,    14,  3284, ...,     0,     0,     0]]),\n array([[  2, 329, 395, ...,   0,   0,   0],\n        [  2, 329, 395, ...,   0,   0,   0],\n        [  2,  82,  11, ...,   0,   0,   0],\n        ...,\n        [  2,  34,  25, ...,   0,   0,   0],\n        [  2,  34,  25, ...,   0,   0,   0],\n        [  2,  34,  25, ...,   0,   0,   0]]),\n array([[   2, 1080,  281, ...,    0,    0,    0],\n        [   2,  225,   16, ...,    0,    0,    0],\n        [   2, 1080, 6187, ...,    0,    0,    0],\n        ...,\n        [   2,   70,    4, ...,    0,    0,    0],\n        [   2,   14,  107, ...,    0,    0,    0],\n        [   2,  107,   12, ...,    0,    0,    0]]))"},"metadata":{},"execution_count":12}]},{"cell_type":"markdown","metadata":{"id":"qMf9tuTubgEX","colab_type":"text"},"source":["# 1. 加载数据"]},{"cell_type":"markdown","metadata":{"id":"ZoJhksySbgEY","colab_type":"text"},"source":["## 1.1 加载数据集"]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","metadata":{"id":"nvA2eoHnbgEY","colab_type":"code","colab":{}},"source":["params = {}\n","params[\"vocab_size\"] = 30000\n","params[\"embed_size\"] = 300\n","params[\"enc_units\"] = 512\n","params[\"attn_units\"] = 512\n","params[\"dec_units\"] = 512\n","params[\"batch_size\"] = 64\n","params[\"epochs\"] = 5\n","params[\"max_enc_len\"] = 200\n","params[\"max_dec_len\"] = 40"],"execution_count":18,"outputs":[]},{"cell_type":"code","metadata":{"id":"JADHS0X6bgEb","colab_type":"code","outputId":"c76c4200-4af1-403c-9df2-94731bfad475","executionInfo":{"status":"ok","timestamp":1591350390458,"user_tz":-480,"elapsed":1571,"user":{"displayName":"罗杰","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14Gg-a_YBdWbEZ2-QLI4_OGvTo7Epwx70DhGLvFws=s64","userId":"01455695120655509307"}},"colab":{"base_uri":"https://localhost:8080/","height":34}},"source":["dataset, steps_per_epoch = train_batch_generator(batch_size=params[\"batch_size\"] ,\n","                                                 max_enc_len=params[\"max_enc_len\"],\n","                                                 max_dec_len=params[\"max_dec_len\"])\n","test_X = load_test_dataset(params[\"max_enc_len\"])\n","steps_per_epoch"],"execution_count":19,"outputs":[{"output_type":"execute_result","data":{"text/plain":"1294"},"metadata":{},"execution_count":19}]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","metadata":{"id":"rPxO1oWKQt3V","colab_type":"code","outputId":"ae8f248b-651a-4a48-9c02-b0e5ac35afe6","executionInfo":{"status":"ok","timestamp":1591350390459,"user_tz":-480,"elapsed":1396,"user":{"displayName":"罗杰","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14Gg-a_YBdWbEZ2-QLI4_OGvTo7Epwx70DhGLvFws=s64","userId":"01455695120655509307"}},"colab":{"base_uri":"https://localhost:8080/","height":34}},"source":["test_X.shape"],"execution_count":12,"outputs":[{"output_type":"execute_result","data":{"text/plain":"(20000, 200)"},"metadata":{},"execution_count":12}]},{"cell_type":"markdown","metadata":{"id":"TQsNOX8MbgEd","colab_type":"text"},"source":["## 1.2. 加载vocab"]},{"cell_type":"code","metadata":{"id":"RdIrGTVgbgEd","colab_type":"code","outputId":"9fde473a-e304-4b5a-b3fe-413bf214dc41","executionInfo":{"status":"ok","timestamp":1591350392525,"user_tz":-480,"elapsed":659,"user":{"displayName":"罗杰","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14Gg-a_YBdWbEZ2-QLI4_OGvTo7Epwx70DhGLvFws=s64","userId":"01455695120655509307"}},"colab":{"base_uri":"https://localhost:8080/","height":34},"tags":[]},"source":["vocab = Vocab(vocab_max_size=params[\"vocab_size\"])"],"execution_count":13,"outputs":[{"output_type":"stream","name":"stdout","text":"max_size of vocab was specified as 30000; we now have 29997 words. Stopping reading.\n"}]},{"cell_type":"markdown","metadata":{"id":"GvP588eqbgEe","colab_type":"text"},"source":["## 1.3 加载预训练权重"]},{"cell_type":"code","metadata":{"id":"a3apok7JbgEf","colab_type":"code","outputId":"5f901f2d-ebb0-4532-cc71-e49b11cda484","executionInfo":{"status":"ok","timestamp":1591350393419,"user_tz":-480,"elapsed":947,"user":{"displayName":"罗杰","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14Gg-a_YBdWbEZ2-QLI4_OGvTo7Epwx70DhGLvFws=s64","userId":"01455695120655509307"}},"colab":{"base_uri":"https://localhost:8080/","height":34}},"source":["embedding_matrix=load_embedding_matrix(max_vocab_size=params[\"vocab_size\"])\n","embedding_matrix.shape"],"execution_count":14,"outputs":[{"output_type":"execute_result","data":{"text/plain":"(30000, 300)"},"metadata":{},"execution_count":14}]},{"cell_type":"markdown","metadata":{"id":"Ur7KV6z8bgEg","colab_type":"text"},"source":["# 2. 模型训练"]},{"cell_type":"markdown","metadata":{"id":"r5TI8Cf5bgEj","colab_type":"text"},"source":["## 2.2 构建Encoder"]},{"cell_type":"code","metadata":{"id":"Mwl-ouVybgEj","colab_type":"code","colab":{}},"source":["from seq2seq_tf2.seq2seq_model import Seq2Seq"],"execution_count":15,"outputs":[]},{"cell_type":"code","metadata":{"id":"rVy9ej3obgEl","colab_type":"code","colab":{}},"source":["model=Seq2Seq(params,vocab)"],"execution_count":16,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"ghj91uiqbgEm","colab_type":"text"},"source":["# 2 读取训练好的模型"]},{"cell_type":"code","metadata":{"id":"Snxlig7fbgEn","colab_type":"code","colab":{}},"source":["from utils.config import checkpoint_dir,checkpoint_prefix"],"execution_count":17,"outputs":[]},{"cell_type":"code","metadata":{"id":"33RKO7esbgEo","colab_type":"code","colab":{}},"source":["ckpt = tf.train.Checkpoint(Seq2Seq=model)\n","ckpt_manager = tf.train.CheckpointManager(ckpt, checkpoint_dir, max_to_keep=5)"],"execution_count":18,"outputs":[]},{"cell_type":"code","metadata":{"id":"ZbGPGREqbgEq","colab_type":"code","outputId":"ba01d83c-e00c-49af-d115-8a3d5a210ad7","executionInfo":{"status":"ok","timestamp":1591350400130,"user_tz":-480,"elapsed":991,"user":{"displayName":"罗杰","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14Gg-a_YBdWbEZ2-QLI4_OGvTo7Epwx70DhGLvFws=s64","userId":"01455695120655509307"}},"colab":{"base_uri":"https://localhost:8080/","height":34},"tags":[]},"source":["ckpt.restore(ckpt_manager.latest_checkpoint)\n","if ckpt_manager.latest_checkpoint:\n","    print(\"Restored from {}\".format(ckpt_manager.latest_checkpoint))\n","else:\n","    print(\"Initializing from scratch.\")"],"execution_count":19,"outputs":[{"output_type":"stream","name":"stdout","text":"Initializing from scratch.\n"}]},{"cell_type":"markdown","metadata":{"id":"618jZOvebgEs","colab_type":"text"},"source":["# 3. 训练"]},{"cell_type":"code","metadata":{"id":"-GFVllXZbgEs","colab_type":"code","colab":{}},"source":["optimizer = tf.keras.optimizers.Adam(name='Adam',learning_rate=0.001)\n","loss_object = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True, reduction='none')\n","\n","\n","pad_index=vocab.PAD_TOKEN_INDEX\n","\n","def loss_function(real, pred):\n","    mask = tf.math.logical_not(tf.math.equal(real, pad_index))\n","    loss_ = loss_object(real, pred)\n","    mask = tf.cast(mask, dtype=loss_.dtype)\n","    loss_ *= mask\n","    return tf.reduce_mean(loss_)"],"execution_count":20,"outputs":[]},{"cell_type":"code","metadata":{"id":"IV3KuuCobgEv","colab_type":"code","colab":{}},"source":["@tf.function\n","def train_step(dec_input, targ , enc_hidden):\n","    with tf.GradientTape() as tape:\n","      \n","        # 1. 构建encoder\n","        enc_output, enc_hidden = model.encoder(inp,enc_hidden)\n","        # 2. 复制\n","        dec_hidden = enc_hidden\n","        \n","        # 逐个预测序列 (dec_hidden, enc_output, dec_target)\n","        predictions, _ = model.teacher_decoder(dec_hidden, enc_output, targ)\n","        \n","        batch_loss = loss_function(targ, predictions)\n","\n","        variables = model.trainable_variables\n","    \n","        gradients = tape.gradient(batch_loss, variables)\n","\n","        optimizer.apply_gradients(zip(gradients, variables))\n","\n","        return batch_loss"],"execution_count":21,"outputs":[]},{"cell_type":"code","metadata":{"scrolled":true,"id":"NzRkRi7lbgEw","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":1000},"outputId":"bf475884-9923-42cd-bc48-ff0880a325fa","tags":[]},"source":["epochs = 2 # params[\"epochs\"]\n","# 如果检查点存在，则恢复最新的检查点。\n","if ckpt_manager.latest_checkpoint:\n","    ckpt.restore(ckpt_manager.latest_checkpoint)\n","    print ('Latest checkpoint restored!!')\n","    \n","for epoch in range(epochs):\n","    start = time.time()\n","    enc_hidden = model.encoder.initialize_hidden_state()\n","    total_loss = 0\n","\n","    for (batch, (inp, targ)) in enumerate(dataset.take(steps_per_epoch)):\n","        batch_loss = train_step(inp, targ,enc_hidden)\n","        total_loss += batch_loss\n","\n","        if batch % 20 == 0:\n","            print('Epoch {} Batch {} Loss {:.4f}'.format(epoch + 1,\n","                                                         batch,\n","                                                         batch_loss.numpy()))\n","    # saving (checkpoint) the model every 2 epochs\n","    if (epoch + 1) % 1 == 0:\n","        ckpt_save_path = ckpt_manager.save()\n","        print ('Saving checkpoint for epoch {} at {}'.format(epoch+1,\n","                                                             ckpt_save_path))\n","\n","    print('Epoch {} Loss {:.4f}'.format(epoch + 1,\n","                                      total_loss / steps_per_epoch))\n","    print('Time taken for 1 epoch {} sec\\n'.format(time.time() - start))"],"execution_count":22,"outputs":[{"output_type":"stream","name":"stdout","text":"Epoch 1 Batch 0 Loss 4.8529\nEpoch 1 Batch 20 Loss 3.5289\nEpoch 1 Batch 40 Loss 2.5683\nEpoch 1 Batch 60 Loss 2.4900\nEpoch 1 Batch 80 Loss 2.5647\nEpoch 1 Batch 100 Loss 2.6186\nEpoch 1 Batch 120 Loss 2.7623\nEpoch 1 Batch 140 Loss 2.7401\nEpoch 1 Batch 160 Loss 2.6987\nEpoch 1 Batch 180 Loss 2.4562\nEpoch 1 Batch 200 Loss 2.7491\nEpoch 1 Batch 220 Loss 2.3363\nEpoch 1 Batch 240 Loss 2.4749\nEpoch 1 Batch 260 Loss 2.5217\nEpoch 1 Batch 280 Loss 2.6531\nEpoch 1 Batch 300 Loss 2.4026\nEpoch 1 Batch 320 Loss 2.8451\nEpoch 1 Batch 340 Loss 2.6870\nEpoch 1 Batch 360 Loss 2.3534\nEpoch 1 Batch 380 Loss 2.6536\nEpoch 1 Batch 400 Loss 2.6413\nEpoch 1 Batch 420 Loss 2.6601\nEpoch 1 Batch 440 Loss 2.4223\nEpoch 1 Batch 460 Loss 2.7835\nEpoch 1 Batch 480 Loss 3.0876\nEpoch 1 Batch 500 Loss 2.5008\nEpoch 1 Batch 520 Loss 2.5648\nEpoch 1 Batch 540 Loss 2.8014\nEpoch 1 Batch 560 Loss 2.6883\nEpoch 1 Batch 580 Loss 2.7182\nEpoch 1 Batch 600 Loss 2.6465\nEpoch 1 Batch 620 Loss 2.4776\nEpoch 1 Batch 640 Loss 2.4409\nEpoch 1 Batch 660 Loss 2.5704\nEpoch 1 Batch 680 Loss 2.8148\nEpoch 1 Batch 700 Loss 2.7632\nEpoch 1 Batch 720 Loss 2.5498\nEpoch 1 Batch 740 Loss 2.4096\nEpoch 1 Batch 760 Loss 2.6400\nEpoch 1 Batch 780 Loss 2.4084\nEpoch 1 Batch 800 Loss 2.1621\nEpoch 1 Batch 820 Loss 2.3049\nEpoch 1 Batch 840 Loss 2.5747\nEpoch 1 Batch 860 Loss 2.6945\nEpoch 1 Batch 880 Loss 2.2286\nEpoch 1 Batch 900 Loss 2.3236\nEpoch 1 Batch 920 Loss 2.7991\nEpoch 1 Batch 940 Loss 2.6982\nEpoch 1 Batch 960 Loss 2.6228\nEpoch 1 Batch 980 Loss 2.5817\nEpoch 1 Batch 1000 Loss 2.8120\nEpoch 1 Batch 1020 Loss 2.4333\nEpoch 1 Batch 1040 Loss 2.3816\nEpoch 1 Batch 1060 Loss 2.3805\nEpoch 1 Batch 1080 Loss 2.3691\nEpoch 1 Batch 1100 Loss 2.5102\nEpoch 1 Batch 1120 Loss 2.1480\nEpoch 1 Batch 1140 Loss 2.6632\nEpoch 1 Batch 1160 Loss 2.4887\nEpoch 1 Batch 1180 Loss 2.4384\nEpoch 1 Batch 1200 Loss 2.3286\nEpoch 1 Batch 1220 Loss 2.5253\nEpoch 1 Batch 1240 Loss 2.2299\nEpoch 1 Batch 1260 Loss 2.2266\nEpoch 1 Batch 1280 Loss 2.4650\nSaving checkpoint for epoch 1 at C:\\Users\\Administrator.DESKTOP-HN1J6IE\\Desktop\\lecture01_1_2\\code\\data\\checkpoints\\training_checkpoints_pgn_cov_backed\\ckpt-1\nEpoch 1 Loss 2.5264\nTime taken for 1 epoch 12640.320547580719 sec\n\nEpoch 2 Batch 0 Loss 2.1030\nEpoch 2 Batch 20 Loss 2.1922\nEpoch 2 Batch 40 Loss 2.3972\nEpoch 2 Batch 60 Loss 2.8649\nEpoch 2 Batch 80 Loss 2.4161\nEpoch 2 Batch 100 Loss 2.5383\nEpoch 2 Batch 120 Loss 2.5105\nEpoch 2 Batch 140 Loss 2.5549\nEpoch 2 Batch 160 Loss 2.0540\nEpoch 2 Batch 180 Loss 2.2700\nEpoch 2 Batch 200 Loss 2.6514\nEpoch 2 Batch 220 Loss 2.2094\nEpoch 2 Batch 240 Loss 2.2798\nEpoch 2 Batch 260 Loss 2.6191\nEpoch 2 Batch 280 Loss 2.4418\nEpoch 2 Batch 300 Loss 2.3431\nEpoch 2 Batch 320 Loss 2.3785\nEpoch 2 Batch 340 Loss 2.3184\nEpoch 2 Batch 360 Loss 2.2478\nEpoch 2 Batch 380 Loss 2.3838\nEpoch 2 Batch 400 Loss 2.2780\nEpoch 2 Batch 420 Loss 2.6280\nEpoch 2 Batch 440 Loss 2.4019\nEpoch 2 Batch 460 Loss 2.4858\nEpoch 2 Batch 480 Loss 2.1885\nEpoch 2 Batch 500 Loss 2.5210\nEpoch 2 Batch 520 Loss 2.2818\nEpoch 2 Batch 540 Loss 2.4629\nEpoch 2 Batch 560 Loss 2.4287\nEpoch 2 Batch 580 Loss 2.2268\nEpoch 2 Batch 600 Loss 2.2821\nEpoch 2 Batch 620 Loss 1.9959\nEpoch 2 Batch 640 Loss 2.4855\nEpoch 2 Batch 660 Loss 2.3648\nEpoch 2 Batch 680 Loss 2.2326\nEpoch 2 Batch 700 Loss 2.2520\nEpoch 2 Batch 720 Loss 2.3414\nEpoch 2 Batch 740 Loss 2.2883\nEpoch 2 Batch 760 Loss 2.5284\nEpoch 2 Batch 780 Loss 2.4365\nEpoch 2 Batch 800 Loss 2.3014\nEpoch 2 Batch 820 Loss 2.5329\nEpoch 2 Batch 840 Loss 2.3448\nEpoch 2 Batch 860 Loss 2.4687\nEpoch 2 Batch 880 Loss 2.0850\nEpoch 2 Batch 900 Loss 2.5248\nEpoch 2 Batch 920 Loss 2.0914\nEpoch 2 Batch 940 Loss 2.3419\nEpoch 2 Batch 960 Loss 2.1931\nEpoch 2 Batch 980 Loss 2.1798\nEpoch 2 Batch 1000 Loss 2.0301\nEpoch 2 Batch 1020 Loss 2.2010\nEpoch 2 Batch 1040 Loss 2.4057\nEpoch 2 Batch 1060 Loss 2.2113\nEpoch 2 Batch 1080 Loss 2.4540\nEpoch 2 Batch 1100 Loss 2.5042\nEpoch 2 Batch 1120 Loss 2.3183\nEpoch 2 Batch 1140 Loss 2.0838\nEpoch 2 Batch 1160 Loss 2.3215\nEpoch 2 Batch 1180 Loss 2.3326\nEpoch 2 Batch 1200 Loss 2.3530\nEpoch 2 Batch 1220 Loss 2.2966\nEpoch 2 Batch 1240 Loss 2.3266\nEpoch 2 Batch 1260 Loss 2.3733\nEpoch 2 Batch 1280 Loss 2.5770\nSaving checkpoint for epoch 2 at C:\\Users\\Administrator.DESKTOP-HN1J6IE\\Desktop\\lecture01_1_2\\code\\data\\checkpoints\\training_checkpoints_pgn_cov_backed\\ckpt-2\nEpoch 2 Loss 2.3378\nTime taken for 1 epoch 8763.764795064926 sec\n\n"}]},{"cell_type":"markdown","metadata":{"id":"6_f-9cVwbgEy","colab_type":"text"},"source":["Time taken for 1 epoch 524.4936063289642 sec"]},{"cell_type":"markdown","metadata":{"id":"TXugRKLwbgEy","colab_type":"text"},"source":["# 载入模型"]},{"cell_type":"code","metadata":{"id":"1ABzkQTmbgEz","colab_type":"code","outputId":"bb038ac5-5108-45a6-bd26-0514f73ca70d","executionInfo":{"status":"ok","timestamp":1591348931943,"user_tz":-480,"elapsed":1189,"user":{"displayName":"罗杰","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14Gg-a_YBdWbEZ2-QLI4_OGvTo7Epwx70DhGLvFws=s64","userId":"01455695120655509307"}},"colab":{"base_uri":"https://localhost:8080/","height":34},"tags":[]},"source":["ckpt.restore(ckpt_manager.latest_checkpoint)\n","if ckpt_manager.latest_checkpoint:\n","    print(\"Restored from {}\".format(ckpt_manager.latest_checkpoint))\n","else:\n","    print(\"Initializing from scratch.\")"],"execution_count":23,"outputs":[{"output_type":"stream","name":"stdout","text":"Restored from C:\\Users\\Administrator.DESKTOP-HN1J6IE\\Desktop\\lecture01_1_2\\code\\data\\checkpoints\\training_checkpoints_pgn_cov_backed\\ckpt-2\n"}]},{"cell_type":"markdown","metadata":{"id":"xavHMNJxbgE0","colab_type":"text"},"source":["# 预测"]},{"cell_type":"code","metadata":{"id":"fpCTVj8rbgE1","colab_type":"code","colab":{}},"source":["def evaluate(model,inputs,params):\n","    print('inputs:{}'.format(inputs))\n","    attention_plot = np.zeros((params['max_enc_len'],params['max_dec_len'],))\n","    \n","    inputs = tf.convert_to_tensor(inputs)\n","\n","    pred_result = ''\n","    hidden = [tf.zeros((1, params['enc_units']))]\n","    enc_output, enc_hidden = model.encoder(inputs, hidden)\n","\n","    dec_hidden = enc_hidden\n","    \n","    dec_input = tf.expand_dims([vocab.START_DECODING_INDEX], 0)\n","\n","    for t in range(params['max_dec_len']):\n","        predictions, dec_hidden, attention_weights = model.decoder(dec_input,\n","                                                                  dec_hidden,\n","                                                                  enc_output)\n","        # storing the attention weights to plot later on\n","        attention_weights = tf.reshape(attention_weights, (-1, ))\n","        attention_plot[t] = attention_weights.numpy()\n","        predicted_id = tf.argmax(predictions[0]).numpy()\n","        pred_result += vocab.id2word[predicted_id] + ' '\n","        if predicted_id == vocab.STOP_DECODING_INDEX:\n","            return pred_result, attention_plot\n","\n","        # the predicted ID is fed back into the model\n","        dec_input = tf.expand_dims([predicted_id], 0)\n","\n","    return pred_result, attention_plot"],"execution_count":24,"outputs":[]},{"cell_type":"code","metadata":{"id":"n0P_5pGzbgE2","colab_type":"code","colab":{}},"source":["def translate(sentence):\n","    \n","    print('Input: %s' % (sentence))\n","    sentence_index = preprocess_sentence(sentence,params['max_dec_len'],vocab.word2id)\n","    pred_result, attention_plot = evaluate(model,sentence_index,params)\n","\n","    print('sentence_index: %s' % (sentence_index))\n","    print('Predicted translation: {}'.format(pred_result))\n","    read_result=[vocab.id2word[index] for index in sentence_index.tolist()[0]]\n","    attention_plot = attention_plot[:len(pred_result.split(' ')), :len(read_result)]\n","    plot_attention(attention_plot, read_result, pred_result.split(' '))"],"execution_count":25,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"Hlvkm-_SbgE4","colab_type":"text"},"source":["## Restore the latest checkpoint and test"]},{"cell_type":"code","metadata":{"id":"nI-5fimVbgE4","colab_type":"code","colab":{}},"source":["sentence='哈佛H6换了正时皮带后报失火故障码是什么问题,技师说：你好！这种情况下我们就需要检验一下正时是否正确，可以通过测量发动机缸压来判断正时|车主说：正时确认对齐了|技师说：这款发动机是不是有平衡轴的？平衡轴也是需要对记号的,检查正时，测量缸压'"],"execution_count":26,"outputs":[]},{"cell_type":"code","metadata":{"id":"ns_UV6jfI41D","colab_type":"code","colab":{},"tags":[]},"source":["translate(sentence)"],"execution_count":27,"outputs":[{"output_type":"stream","name":"stdout","text":"Input: 哈佛H6换了正时皮带后报失火故障码是什么问题,技师说：你好！这种情况下我们就需要检验一下正时是否正确，可以通过测量发动机缸压来判断正时|车主说：正时确认对齐了|技师说：这款发动机是不是有平衡轴的？平衡轴也是需要对记号的,检查正时，测量缸压\n"},{"output_type":"error","ename":"AttributeError","evalue":"'dict' object has no attribute 'word2id'","traceback":["\u001b[1;31m---------------------------------------------------------------------------\u001b[0m","\u001b[1;31mAttributeError\u001b[0m                            Traceback (most recent call last)","\u001b[1;32m<ipython-input-27-657693e5c943>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m \u001b[0mtranslate\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0msentence\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m","\u001b[1;32m<ipython-input-25-b97850e46d66>\u001b[0m in \u001b[0;36mtranslate\u001b[1;34m(sentence)\u001b[0m\n\u001b[0;32m      2\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m      3\u001b[0m     \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Input: %s'\u001b[0m \u001b[1;33m%\u001b[0m \u001b[1;33m(\u001b[0m\u001b[0msentence\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 4\u001b[1;33m     \u001b[0msentence_index\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mpreprocess_sentence\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0msentence\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mparams\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m'max_dec_len'\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mvocab\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mword2id\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m      5\u001b[0m     \u001b[0mpred_result\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mattention_plot\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mevaluate\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0msentence_index\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mparams\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m      6\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n","\u001b[1;32m~\\Desktop\\lecture01_1_2\\code\\utils\\data_loader.py\u001b[0m in \u001b[0;36mpreprocess_sentence\u001b[1;34m(sentence, max_len, vocab)\u001b[0m\n\u001b[0;32m    178\u001b[0m     \u001b[0msentence\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mpad_proc\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0msentence\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mmax_len\u001b[0m \u001b[1;33m-\u001b[0m \u001b[1;36m2\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mvocab\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    179\u001b[0m     \u001b[1;31m# 3. 转换index\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 180\u001b[1;33m     \u001b[0msentence\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtransform_data\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0msentence\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mvocab\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    181\u001b[0m     \u001b[1;32mreturn\u001b[0m \u001b[0mnp\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0marray\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0msentence\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    182\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n","\u001b[1;32m~\\Desktop\\lecture01_1_2\\code\\utils\\data_loader.py\u001b[0m in \u001b[0;36mtransform_data\u001b[1;34m(sentence, vocab)\u001b[0m\n\u001b[0;32m    237\u001b[0m     \u001b[0mwords\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0msentence\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msplit\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m' '\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    238\u001b[0m     \u001b[1;31m# 按照vocab的index进行转换         # 遇到未知词就填充unk的索引\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 239\u001b[1;33m     \u001b[0mids\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[0mvocab\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mword2id\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mword\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mword\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mvocab\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mword2id\u001b[0m \u001b[1;32melse\u001b[0m \u001b[0mvocab\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mUNKNOWN_TOKEN_INDEX\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mword\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mwords\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    240\u001b[0m     \u001b[1;32mreturn\u001b[0m \u001b[0mids\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    241\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n","\u001b[1;32m~\\Desktop\\lecture01_1_2\\code\\utils\\data_loader.py\u001b[0m in \u001b[0;36m<listcomp>\u001b[1;34m(.0)\u001b[0m\n\u001b[0;32m    237\u001b[0m     \u001b[0mwords\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0msentence\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msplit\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m' '\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    238\u001b[0m     \u001b[1;31m# 按照vocab的index进行转换         # 遇到未知词就填充unk的索引\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 239\u001b[1;33m     \u001b[0mids\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[0mvocab\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mword2id\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mword\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mword\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mvocab\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mword2id\u001b[0m \u001b[1;32melse\u001b[0m \u001b[0mvocab\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mUNKNOWN_TOKEN_INDEX\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mword\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mwords\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    240\u001b[0m     \u001b[1;32mreturn\u001b[0m \u001b[0mids\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    241\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n","\u001b[1;31mAttributeError\u001b[0m: 'dict' object has no attribute 'word2id'"]}]},{"cell_type":"markdown","metadata":{"id":"yVbze51JbgE8","colab_type":"text"},"source":["# 批量预测"]},{"cell_type":"code","metadata":{"id":"J2ttQ095bgE8","colab_type":"code","colab":{}},"source":["def batch_predict(inps):\n","    # 判断输入长度\n","    batch_size=len(inps)\n","    # 开辟结果存储list\n","    preidicts=[''] * batch_size\n","    \n","    inps = tf.convert_to_tensor(inps)\n","    # 0. 初始化隐藏层输入\n","    hidden = [tf.zeros((batch_size, units))]\n","    # 1. 构建encoder\n","    enc_output, enc_hidden = model.encoder(inps, hidden)\n","    # 2. 复制\n","    dec_hidden = enc_hidden\n","    # 3. <START> * BATCH_SIZE \n","    dec_input = tf.expand_dims([vocab['<START>']] * batch_size, 1)\n","    \n","    context_vector, _ = model.attention(dec_hidden, enc_output)\n","    # Teacher forcing - feeding the target as the next input\n","    for t in range(max_length_targ):\n","        # 计算上下文\n","        context_vector, attention_weights = model.attention(dec_hidden, enc_output)\n","        # 单步预测\n","        predictions, dec_hidden = model.decoder(dec_input,\n","                                         dec_hidden,\n","                                         enc_output,\n","                                         context_vector)\n","        \n","        # id转换 贪婪搜索\n","        predicted_ids = tf.argmax(predictions,axis=1).numpy()\n","        \n","        \n","        for index,predicted_id in enumerate(predicted_ids):\n","            preidicts[index]+= reverse_vocab[predicted_id] + ' '\n","        \n","        # using teacher forcing\n","        dec_input = tf.expand_dims(predicted_ids, 1)\n","\n","    results=[]\n","    for preidict in preidicts:\n","        # 去掉句子前后空格\n","        preidict=preidict.strip()\n","        # 句子小于max len就结束了 截断\n","        if '<STOP>' in preidict:\n","            # 截断stop\n","            preidict=preidict[:preidict.index('<STOP>')]\n","        # 保存结果\n","        results.append(preidict)\n","    return results"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"QuvBgtt1bgE-","colab_type":"code","colab":{}},"source":["from tqdm import tqdm\n","import math"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"34lp2Sf5bgE_","colab_type":"code","colab":{}},"source":["def model_predict(data_X,batch_size):\n","    # 存储结果\n","    results=[]\n","    # 样本数量\n","    sample_size=len(data_X)\n","    # batch 操作轮数 math.ceil向上取整 小数 +1\n","    # 因为最后一个batch可能不足一个batch size 大小 ,但是依然需要计算  \n","    steps_epoch = math.ceil(sample_size/batch_size)\n","    # [0,steps_epoch)\n","    for i in tqdm(range(steps_epoch)):\n","        batch_data = data_X[i*batch_size:(i+1)*batch_size]\n","        results+=batch_predict(batch_data)\n","    return results"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"scrolled":true,"id":"yPyMABi9bgFA","colab_type":"code","outputId":"4d4398ce-6250-4d3c-b1d3-767d5898d851","colab":{}},"source":["%%time\n","results=model_predict(test_X,batch_size=64)"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"DSCXF1ZEbgFC","colab_type":"code","outputId":"24b4c41b-2916-41ed-f18d-70bba10a748a","colab":{}},"source":["results[1005]"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"2d-Y242ebgFE","colab_type":"code","outputId":"5fe4c73c-2bdc-456b-b072-a2536eed7ba5","colab":{}},"source":["# 读入提交数据\n","test_df=pd.read_csv(test_data_path)\n","test_df.head()"],"execution_count":null,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"qcSOW6UkbgFJ","colab_type":"text"},"source":["# 判断是否有空值"]},{"cell_type":"code","metadata":{"id":"001T4MxNbgFJ","colab_type":"code","colab":{}},"source":["for idx,result in enumerate(results):\n","    if result=='':print(idx)"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"UjCM2fJUbgFK","colab_type":"code","colab":{}},"source":["# 赋值结果\n","test_df['Prediction']=results\n","#　提取ID和预测结果两列\n","test_df=test_df[['QID','Prediction']]"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"Zt_yzRZ-bgFM","colab_type":"code","outputId":"50a247cb-f1f6-46b8-cb34-b9d57eab8102","colab":{}},"source":["test_df.head()"],"execution_count":null,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"YoxcWANMbgFN","colab_type":"text"},"source":["# 结果处理"]},{"cell_type":"code","metadata":{"id":"ZsLK_7gJbgFN","colab_type":"code","colab":{}},"source":["# 判断是否有空值\n","# for predic in test_df['Prediction']:\n","#     if type(predic) != str:\n","#         print(predic)"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"-UmA27HObgFP","colab_type":"code","colab":{}},"source":["test_df['Prediction']=test_df['Prediction'].apply(submit_proc)"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"a0AmjkSubgFR","colab_type":"code","outputId":"3a819965-8af6-4572-bdae-81a034a62871","colab":{}},"source":["test_df.head()"],"execution_count":null,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"11XYrzXgbgFS","colab_type":"text"},"source":["# 保存结果"]},{"cell_type":"code","metadata":{"id":"_1H6v56PbgFS","colab_type":"code","colab":{}},"source":["from utils.file_utils import get_result_filename"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"dyAfUjx0bgFU","colab_type":"code","colab":{}},"source":["# 获取结果存储路径\n","result_save_path = get_result_filename(params[\"batch_size\"],params[\"epochs\"] , params[\"max_enc_len\"], params[\"embedding_dim\"],commit='_4_1_submit_proc_add_masks_loss_seq2seq_code')"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"XUKvUIdTbgFW","colab_type":"code","colab":{}},"source":["# 保存结果.\n","test_df.to_csv(result_save_path,index=None,sep=',')"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"FYp7NW4ebgFY","colab_type":"code","outputId":"e2bc303a-418d-4f11-dad0-f7ea4339333e","colab":{}},"source":["result_save_path"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"pKT4qdpUbgFb","colab_type":"code","outputId":"1c8a1533-45bb-4be0-c017-113bd66aafab","colab":{}},"source":["# 读取结果\n","test_df=pd.read_csv(result_save_path)\n","# 查看格式\n","test_df.head(10)"],"execution_count":45,"outputs":[{"output_type":"error","ename":"NameError","evalue":"name 'result_save_path' is not defined","traceback":["\u001b[1;31m---------------------------------------------------------------------------\u001b[0m","\u001b[1;31mNameError\u001b[0m                                 Traceback (most recent call last)","\u001b[1;32m<ipython-input-45-6e2ed84e4266>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m      1\u001b[0m \u001b[1;31m# 读取结果\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 2\u001b[1;33m \u001b[0mtest_df\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mpd\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mread_csv\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mresult_save_path\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m      3\u001b[0m \u001b[1;31m# 查看格式\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m      4\u001b[0m \u001b[0mtest_df\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mhead\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;36m10\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n","\u001b[1;31mNameError\u001b[0m: name 'result_save_path' is not defined"]}]},{"cell_type":"markdown","metadata":{"id":"3aYZt1wFbgFd","colab_type":"text"},"source":["提交须知\n","请在提交之前仔细阅读“提交须知”。\n","\n","1. 自动评审\n","系统根据选手提交的结果自动评分，提供每天5次的评测与排名机会，实时更新排行榜并按照评测分数从高到低排序。若一天内多次提交结果，新结果版本将覆盖原版本。\n","\n","2. 评分标准\n","评测用到的核心算法为ROUGE(Recall-Oriented Understudy for Gisting Evaluation)，详见Wikipedia；具体用到的指标为ROUGE_L，即：Longest Common Subsequence (LCS) based statistics，关于LCS问题，详见Wikipedia。\n","\n","所有参与评审的模型必须使用飞桨PaddlePaddle。所有参赛个人可无限使用基于AI Studio平台提供的训练资源。\n","\n","3. 特别注意\n","选手需确认输出结果的总行数为20001（含表头），且QID ≤ Q20000，否则成绩无效。"]},{"cell_type":"markdown","metadata":{"id":"YPkpBrj7bgFd","colab_type":"text"},"source":["# ROUGE"]},{"cell_type":"markdown","metadata":{"id":"5Oo6SO7TbgFe","colab_type":"text"},"source":["# 角度1：QA 问题"]},{"cell_type":"markdown","metadata":{"id":"Wp-kdiUTbgFe","colab_type":"text"},"source":["# 角度2： 摘要问题"]},{"cell_type":"markdown","metadata":{"id":"ugzgTbs1bgFe","colab_type":"text"},"source":["# 角度3： 阅读理解问题\n","\n","> 如果看成是阅读理解问题， 那么就是从Conversation中找出能回答Problem的答案， 由于目前的阅读理解数据集的答案长度通常比较短（一般是几个单词），所以state of the art的作法是根据Problem，从Context中选择一段作为答案，模型只要输出答案的开始和结束位置即可。 但是这个任务的report有点长，常常出现几十个甚至上百个词， 而且report中的词好像并不完全是来自于Conversation。 Report中67.7%的词来自于Conversation."]},{"cell_type":"markdown","metadata":{"id":"FJePzYgTbgFf","colab_type":"text"},"source":["## Next steps\n","\n","* [Download a different dataset](http://www.manythings.org/anki/) to experiment with translations, for example, English to German, or English to French.\n","* Experiment with training on a larger dataset, or using more epochs\n","* [Neural Machine Translation (seq2seq) Tutorial](https://github.com/tensorflow/nmt)\n"]},{"cell_type":"code","metadata":{"id":"CIXz-j8SfR1I","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"sk1t4SCXfSXd","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"X-xUQiJTfVX-","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"9GeV8mIXNwWt","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"_o0MMR2VVS4x","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"BXzogZg-VVUn","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"lAzQvFzXViHA","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":[]}]}