{
 "cells": [
  {
   "cell_type": "code",
   "id": "92c9da5866bdcf7",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T01:31:48.551748Z",
     "start_time": "2025-07-06T01:31:47.694504Z"
    }
   },
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "from absl.testing.parameterized import parameters\n",
    "from torch.utils.data import Dataset, DataLoader\n",
    "import numpy as np\n",
    "\n",
    "# 示例文本数据，一首诗\n",
    "text = \"\"\"\n",
    "独立寒秋，湘江北去，橘子洲头。\n",
    "看万山红遍，层林尽染；漫江碧透，百舸争流。\n",
    "鹰击长空，鱼翔浅底，万类霜天竞自由。\n",
    "怅寥廓，问苍茫大地，谁主沉浮？\n",
    "携来百侣曾游，忆往昔峥嵘岁月稠。\n",
    "恰同学少年，风华正茂；书生意气，挥斥方遒。\n",
    "指点江山，激扬文字，粪土当年万户侯。\n",
    "曾记否，到中流击水，浪遏飞舟？\n",
    "\"\"\"\n",
    "\n",
    "# 创建词汇表\n",
    "words = set(text)\n",
    "vocab_size = len(words)\n",
    "word_to_idx = {word: i for i, word in enumerate(words)}\n",
    "idx_to_word = {i: word for i, word in enumerate(words)}\n",
    "\n",
    "print(idx_to_word)"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "{0: '类', 1: '到', 2: '北', 3: '流', 4: '气', 5: '廓', 6: '去', 7: '透', 8: '土', 9: '来', 10: '指', 11: '空', 12: '遍', 13: '谁', 14: '年', 15: '遒', 16: '侣', 17: '头', 18: '层', 19: '染', 20: '林', 21: '挥', 22: '翔', 23: '携', 24: '往', 25: '当', 26: '曾', 27: '底', 28: '昔', 29: '同', 30: '洲', 31: '漫', 32: '浮', 33: '苍', 34: '竞', 35: '游', 36: '生', 37: '否', 38: '水', 39: '？', 40: '。', 41: '立', 42: '子', 43: '记', 44: '沉', 45: '，', 46: '鱼', 47: '大', 48: '粪', 49: '忆', 50: '文', 51: '碧', 52: '主', 53: '扬', 54: '嵘', 55: '书', 56: '恰', 57: '湘', 58: '华', 59: '看', 60: '寒', 61: '；', 62: '江', 63: '红', 64: '击', 65: '霜', 66: '长', 67: '地', 68: '侯', 69: '学', 70: '激', 71: '户', 72: '尽', 73: '百', 74: '\\n', 75: '飞', 76: '寥', 77: '独', 78: '怅', 79: '自', 80: '茂', 81: '风', 82: '点', 83: '少', 84: '山', 85: '橘', 86: '浅', 87: '峥', 88: '字', 89: '遏', 90: '正', 91: '月', 92: '浪', 93: '由', 94: '茫', 95: '稠', 96: '意', 97: '问', 98: '秋', 99: '争', 100: '中', 101: '天', 102: '舟', 103: '万', 104: '鹰', 105: '岁', 106: '舸', 107: '斥', 108: '方'}\n"
     ]
    }
   ],
   "execution_count": 1
  },
  {
   "cell_type": "code",
   "id": "91280acf83012c57",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T01:31:48.638189Z",
     "start_time": "2025-07-06T01:31:48.623800Z"
    }
   },
   "source": [
    "\n",
    "# 超参数设置\n",
    "SEQ_LENGTH = 5  # 输入序列长度\n",
    "BATCH_SIZE = 1\n",
    "HIDDEN_SIZE = 128\n",
    "INPUT_SIZE = 128\n",
    "\n",
    "\n",
    "# 创建训练数据\n",
    "class TextDataset(Dataset):\n",
    "    def __init__(self, text, seq_length):\n",
    "        self.text = text\n",
    "        self.seq_length = seq_length\n",
    "\n",
    "        # 转换为索引序列\n",
    "        self.data = [word_to_idx[ch] for ch in text]\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.data) - self.seq_length\n",
    "\n",
    "    def __getitem__(self, idx):\n",
    "        # 文本里的某个序列 X\n",
    "        input_seq = self.data[idx:idx + self.seq_length]\n",
    "\n",
    "        # 目标序列 Y\n",
    "        target_seq = self.data[idx + 1:idx + self.seq_length + 1]\n",
    "\n",
    "        # 相当于，假如语料为abcdefg, input_seq=abc, target_seq=bcd\n",
    "\n",
    "        return torch.LongTensor(input_seq), torch.LongTensor(target_seq)\n",
    "\n",
    "\n",
    "dataset = TextDataset(text, SEQ_LENGTH)\n",
    "dataloader = DataLoader(dataset, batch_size=BATCH_SIZE, shuffle=False)\n",
    "\n",
    "for input_seq, target_seq in dataloader:\n",
    "    print(input_seq)\n",
    "    print(target_seq)\n",
    "    break"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[74, 77, 41, 60, 98]])\n",
      "tensor([[77, 41, 60, 98, 45]])\n"
     ]
    }
   ],
   "execution_count": 2
  },
  {
   "cell_type": "code",
   "id": "d9e9fd013d3d01ba",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T01:31:49.269534Z",
     "start_time": "2025-07-06T01:31:48.646836Z"
    }
   },
   "source": [
    "# 大都督周瑜（我的微信: dadudu6789）\n",
    "class ZhouyuModel(nn.Module):\n",
    "    def __init__(self, vocab_size, input_size, hidden_size):\n",
    "        super().__init__()\n",
    "\n",
    "        self.hidden_size = hidden_size\n",
    "\n",
    "        # 嵌入层，输入词索引，输出词向量\n",
    "        self.embedding = nn.Embedding(vocab_size, input_size)\n",
    "\n",
    "        # GRU层\n",
    "        self.rnn = nn.GRU(input_size, hidden_size, batch_first=True, num_layers=2)\n",
    "\n",
    "        # 输出层\n",
    "        self.out_linear = nn.Linear(hidden_size, vocab_size)\n",
    "\n",
    "    def forward(self, x, hidden=None):\n",
    "        embedded = self.embedding(x)\n",
    "        outputs, hidden = self.rnn(embedded, hidden)\n",
    "        outputs = self.out_linear(outputs)\n",
    "        return outputs, hidden\n",
    "\n",
    "\n",
    "# 初始化模型\n",
    "model = ZhouyuModel(vocab_size, INPUT_SIZE, HIDDEN_SIZE)\n",
    "criterion = nn.CrossEntropyLoss()\n",
    "optimizer = torch.optim.SGD(model.parameters(), lr=0.005)"
   ],
   "outputs": [],
   "execution_count": 3
  },
  {
   "cell_type": "code",
   "id": "2348428ce74982e4",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T01:32:12.876157Z",
     "start_time": "2025-07-06T01:31:49.278170Z"
    }
   },
   "source": [
    "for epoch in range(100):\n",
    "    for i, (inputs, targets) in enumerate(dataloader):\n",
    "        # 前向传播\n",
    "        outputs, _ = model(inputs)\n",
    "\n",
    "        # 计算损失\n",
    "        # 用每个时间步的输出和每个时间步的标签进行比较，并平均损失\n",
    "        loss = criterion(\n",
    "            outputs.view(-1, vocab_size),  # (batch_size*seq_length, vocab_size)\n",
    "            targets.view(-1)  # (batch_size*seq_length)\n",
    "        )\n",
    "\n",
    "        optimizer.zero_grad()\n",
    "        loss.backward()\n",
    "\n",
    "        # 梯度裁剪防止爆炸\n",
    "        # nn.utils.clip_grad_norm_(model.parameters(), max_norm=1.0)\n",
    "\n",
    "        optimizer.step()\n",
    "\n",
    "        if (i + 1) % 20 == 0:\n",
    "            print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'\n",
    "                  .format(epoch + 1, 100, i + 1, len(dataloader), loss.item()))"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch [1/100], Step [20/143], Loss: 4.6464\n",
      "Epoch [1/100], Step [40/143], Loss: 4.7181\n",
      "Epoch [1/100], Step [60/143], Loss: 4.6583\n",
      "Epoch [1/100], Step [80/143], Loss: 4.6680\n",
      "Epoch [1/100], Step [100/143], Loss: 4.6606\n",
      "Epoch [1/100], Step [120/143], Loss: 4.6911\n",
      "Epoch [1/100], Step [140/143], Loss: 4.7493\n",
      "Epoch [2/100], Step [20/143], Loss: 4.6043\n",
      "Epoch [2/100], Step [40/143], Loss: 4.6781\n",
      "Epoch [2/100], Step [60/143], Loss: 4.6195\n",
      "Epoch [2/100], Step [80/143], Loss: 4.6353\n",
      "Epoch [2/100], Step [100/143], Loss: 4.6447\n",
      "Epoch [2/100], Step [120/143], Loss: 4.6519\n",
      "Epoch [2/100], Step [140/143], Loss: 4.7107\n",
      "Epoch [3/100], Step [20/143], Loss: 4.5612\n",
      "Epoch [3/100], Step [40/143], Loss: 4.6380\n",
      "Epoch [3/100], Step [60/143], Loss: 4.5806\n",
      "Epoch [3/100], Step [80/143], Loss: 4.6022\n",
      "Epoch [3/100], Step [100/143], Loss: 4.6288\n",
      "Epoch [3/100], Step [120/143], Loss: 4.6121\n",
      "Epoch [3/100], Step [140/143], Loss: 4.6718\n",
      "Epoch [4/100], Step [20/143], Loss: 4.5165\n",
      "Epoch [4/100], Step [40/143], Loss: 4.5971\n",
      "Epoch [4/100], Step [60/143], Loss: 4.5409\n",
      "Epoch [4/100], Step [80/143], Loss: 4.5685\n",
      "Epoch [4/100], Step [100/143], Loss: 4.6128\n",
      "Epoch [4/100], Step [120/143], Loss: 4.5708\n",
      "Epoch [4/100], Step [140/143], Loss: 4.6321\n",
      "Epoch [5/100], Step [20/143], Loss: 4.4695\n",
      "Epoch [5/100], Step [40/143], Loss: 4.5546\n",
      "Epoch [5/100], Step [60/143], Loss: 4.5000\n",
      "Epoch [5/100], Step [80/143], Loss: 4.5339\n",
      "Epoch [5/100], Step [100/143], Loss: 4.5968\n",
      "Epoch [5/100], Step [120/143], Loss: 4.5276\n",
      "Epoch [5/100], Step [140/143], Loss: 4.5909\n",
      "Epoch [6/100], Step [20/143], Loss: 4.4193\n",
      "Epoch [6/100], Step [40/143], Loss: 4.5098\n",
      "Epoch [6/100], Step [60/143], Loss: 4.4573\n",
      "Epoch [6/100], Step [80/143], Loss: 4.4980\n",
      "Epoch [6/100], Step [100/143], Loss: 4.5809\n",
      "Epoch [6/100], Step [120/143], Loss: 4.4817\n",
      "Epoch [6/100], Step [140/143], Loss: 4.5476\n",
      "Epoch [7/100], Step [20/143], Loss: 4.3651\n",
      "Epoch [7/100], Step [40/143], Loss: 4.4619\n",
      "Epoch [7/100], Step [60/143], Loss: 4.4121\n",
      "Epoch [7/100], Step [80/143], Loss: 4.4604\n",
      "Epoch [7/100], Step [100/143], Loss: 4.5651\n",
      "Epoch [7/100], Step [120/143], Loss: 4.4325\n",
      "Epoch [7/100], Step [140/143], Loss: 4.5016\n",
      "Epoch [8/100], Step [20/143], Loss: 4.3063\n",
      "Epoch [8/100], Step [40/143], Loss: 4.4100\n",
      "Epoch [8/100], Step [60/143], Loss: 4.3639\n",
      "Epoch [8/100], Step [80/143], Loss: 4.4210\n",
      "Epoch [8/100], Step [100/143], Loss: 4.5497\n",
      "Epoch [8/100], Step [120/143], Loss: 4.3792\n",
      "Epoch [8/100], Step [140/143], Loss: 4.4523\n",
      "Epoch [9/100], Step [20/143], Loss: 4.2423\n",
      "Epoch [9/100], Step [40/143], Loss: 4.3533\n",
      "Epoch [9/100], Step [60/143], Loss: 4.3118\n",
      "Epoch [9/100], Step [80/143], Loss: 4.3792\n",
      "Epoch [9/100], Step [100/143], Loss: 4.5351\n",
      "Epoch [9/100], Step [120/143], Loss: 4.3214\n",
      "Epoch [9/100], Step [140/143], Loss: 4.3993\n",
      "Epoch [10/100], Step [20/143], Loss: 4.1731\n",
      "Epoch [10/100], Step [40/143], Loss: 4.2914\n",
      "Epoch [10/100], Step [60/143], Loss: 4.2554\n",
      "Epoch [10/100], Step [80/143], Loss: 4.3350\n",
      "Epoch [10/100], Step [100/143], Loss: 4.5218\n",
      "Epoch [10/100], Step [120/143], Loss: 4.2592\n",
      "Epoch [10/100], Step [140/143], Loss: 4.3425\n",
      "Epoch [11/100], Step [20/143], Loss: 4.0995\n",
      "Epoch [11/100], Step [40/143], Loss: 4.2246\n",
      "Epoch [11/100], Step [60/143], Loss: 4.1945\n",
      "Epoch [11/100], Step [80/143], Loss: 4.2882\n",
      "Epoch [11/100], Step [100/143], Loss: 4.5103\n",
      "Epoch [11/100], Step [120/143], Loss: 4.1934\n",
      "Epoch [11/100], Step [140/143], Loss: 4.2827\n",
      "Epoch [12/100], Step [20/143], Loss: 4.0241\n",
      "Epoch [12/100], Step [40/143], Loss: 4.1546\n",
      "Epoch [12/100], Step [60/143], Loss: 4.1295\n",
      "Epoch [12/100], Step [80/143], Loss: 4.2391\n",
      "Epoch [12/100], Step [100/143], Loss: 4.5010\n",
      "Epoch [12/100], Step [120/143], Loss: 4.1264\n",
      "Epoch [12/100], Step [140/143], Loss: 4.2213\n",
      "Epoch [13/100], Step [20/143], Loss: 3.9506\n",
      "Epoch [13/100], Step [40/143], Loss: 4.0842\n",
      "Epoch [13/100], Step [60/143], Loss: 4.0617\n",
      "Epoch [13/100], Step [80/143], Loss: 4.1880\n",
      "Epoch [13/100], Step [100/143], Loss: 4.4935\n",
      "Epoch [13/100], Step [120/143], Loss: 4.0610\n",
      "Epoch [13/100], Step [140/143], Loss: 4.1602\n",
      "Epoch [14/100], Step [20/143], Loss: 3.8830\n",
      "Epoch [14/100], Step [40/143], Loss: 4.0171\n",
      "Epoch [14/100], Step [60/143], Loss: 3.9931\n",
      "Epoch [14/100], Step [80/143], Loss: 4.1360\n",
      "Epoch [14/100], Step [100/143], Loss: 4.4858\n",
      "Epoch [14/100], Step [120/143], Loss: 4.0002\n",
      "Epoch [14/100], Step [140/143], Loss: 4.1012\n",
      "Epoch [15/100], Step [20/143], Loss: 3.8235\n",
      "Epoch [15/100], Step [40/143], Loss: 3.9557\n",
      "Epoch [15/100], Step [60/143], Loss: 3.9254\n",
      "Epoch [15/100], Step [80/143], Loss: 4.0838\n",
      "Epoch [15/100], Step [100/143], Loss: 4.4753\n",
      "Epoch [15/100], Step [120/143], Loss: 3.9453\n",
      "Epoch [15/100], Step [140/143], Loss: 4.0454\n",
      "Epoch [16/100], Step [20/143], Loss: 3.7718\n",
      "Epoch [16/100], Step [40/143], Loss: 3.9004\n",
      "Epoch [16/100], Step [60/143], Loss: 3.8593\n",
      "Epoch [16/100], Step [80/143], Loss: 4.0320\n",
      "Epoch [16/100], Step [100/143], Loss: 4.4592\n",
      "Epoch [16/100], Step [120/143], Loss: 3.8958\n",
      "Epoch [16/100], Step [140/143], Loss: 3.9928\n",
      "Epoch [17/100], Step [20/143], Loss: 3.7264\n",
      "Epoch [17/100], Step [40/143], Loss: 3.8502\n",
      "Epoch [17/100], Step [60/143], Loss: 3.7950\n",
      "Epoch [17/100], Step [80/143], Loss: 3.9807\n",
      "Epoch [17/100], Step [100/143], Loss: 4.4359\n",
      "Epoch [17/100], Step [120/143], Loss: 3.8506\n",
      "Epoch [17/100], Step [140/143], Loss: 3.9432\n",
      "Epoch [18/100], Step [20/143], Loss: 3.6859\n",
      "Epoch [18/100], Step [40/143], Loss: 3.8038\n",
      "Epoch [18/100], Step [60/143], Loss: 3.7323\n",
      "Epoch [18/100], Step [80/143], Loss: 3.9295\n",
      "Epoch [18/100], Step [100/143], Loss: 4.4049\n",
      "Epoch [18/100], Step [120/143], Loss: 3.8086\n",
      "Epoch [18/100], Step [140/143], Loss: 3.8964\n",
      "Epoch [19/100], Step [20/143], Loss: 3.6489\n",
      "Epoch [19/100], Step [40/143], Loss: 3.7601\n",
      "Epoch [19/100], Step [60/143], Loss: 3.6710\n",
      "Epoch [19/100], Step [80/143], Loss: 3.8784\n",
      "Epoch [19/100], Step [100/143], Loss: 4.3668\n",
      "Epoch [19/100], Step [120/143], Loss: 3.7689\n",
      "Epoch [19/100], Step [140/143], Loss: 3.8518\n",
      "Epoch [20/100], Step [20/143], Loss: 3.6148\n",
      "Epoch [20/100], Step [40/143], Loss: 3.7184\n",
      "Epoch [20/100], Step [60/143], Loss: 3.6114\n",
      "Epoch [20/100], Step [80/143], Loss: 3.8272\n",
      "Epoch [20/100], Step [100/143], Loss: 4.3224\n",
      "Epoch [20/100], Step [120/143], Loss: 3.7311\n",
      "Epoch [20/100], Step [140/143], Loss: 3.8093\n",
      "Epoch [21/100], Step [20/143], Loss: 3.5828\n",
      "Epoch [21/100], Step [40/143], Loss: 3.6781\n",
      "Epoch [21/100], Step [60/143], Loss: 3.5536\n",
      "Epoch [21/100], Step [80/143], Loss: 3.7758\n",
      "Epoch [21/100], Step [100/143], Loss: 4.2727\n",
      "Epoch [21/100], Step [120/143], Loss: 3.6949\n",
      "Epoch [21/100], Step [140/143], Loss: 3.7685\n",
      "Epoch [22/100], Step [20/143], Loss: 3.5525\n",
      "Epoch [22/100], Step [40/143], Loss: 3.6385\n",
      "Epoch [22/100], Step [60/143], Loss: 3.4977\n",
      "Epoch [22/100], Step [80/143], Loss: 3.7240\n",
      "Epoch [22/100], Step [100/143], Loss: 4.2190\n",
      "Epoch [22/100], Step [120/143], Loss: 3.6601\n",
      "Epoch [22/100], Step [140/143], Loss: 3.7291\n",
      "Epoch [23/100], Step [20/143], Loss: 3.5232\n",
      "Epoch [23/100], Step [40/143], Loss: 3.5994\n",
      "Epoch [23/100], Step [60/143], Loss: 3.4439\n",
      "Epoch [23/100], Step [80/143], Loss: 3.6719\n",
      "Epoch [23/100], Step [100/143], Loss: 4.1620\n",
      "Epoch [23/100], Step [120/143], Loss: 3.6265\n",
      "Epoch [23/100], Step [140/143], Loss: 3.6908\n",
      "Epoch [24/100], Step [20/143], Loss: 3.4946\n",
      "Epoch [24/100], Step [40/143], Loss: 3.5604\n",
      "Epoch [24/100], Step [60/143], Loss: 3.3923\n",
      "Epoch [24/100], Step [80/143], Loss: 3.6194\n",
      "Epoch [24/100], Step [100/143], Loss: 4.1025\n",
      "Epoch [24/100], Step [120/143], Loss: 3.5940\n",
      "Epoch [24/100], Step [140/143], Loss: 3.6532\n",
      "Epoch [25/100], Step [20/143], Loss: 3.4662\n",
      "Epoch [25/100], Step [40/143], Loss: 3.5210\n",
      "Epoch [25/100], Step [60/143], Loss: 3.3428\n",
      "Epoch [25/100], Step [80/143], Loss: 3.5665\n",
      "Epoch [25/100], Step [100/143], Loss: 4.0408\n",
      "Epoch [25/100], Step [120/143], Loss: 3.5621\n",
      "Epoch [25/100], Step [140/143], Loss: 3.6162\n",
      "Epoch [26/100], Step [20/143], Loss: 3.4377\n",
      "Epoch [26/100], Step [40/143], Loss: 3.4811\n",
      "Epoch [26/100], Step [60/143], Loss: 3.2953\n",
      "Epoch [26/100], Step [80/143], Loss: 3.5131\n",
      "Epoch [26/100], Step [100/143], Loss: 3.9772\n",
      "Epoch [26/100], Step [120/143], Loss: 3.5307\n",
      "Epoch [26/100], Step [140/143], Loss: 3.5794\n",
      "Epoch [27/100], Step [20/143], Loss: 3.4089\n",
      "Epoch [27/100], Step [40/143], Loss: 3.4404\n",
      "Epoch [27/100], Step [60/143], Loss: 3.2497\n",
      "Epoch [27/100], Step [80/143], Loss: 3.4593\n",
      "Epoch [27/100], Step [100/143], Loss: 3.9117\n",
      "Epoch [27/100], Step [120/143], Loss: 3.4995\n",
      "Epoch [27/100], Step [140/143], Loss: 3.5428\n",
      "Epoch [28/100], Step [20/143], Loss: 3.3794\n",
      "Epoch [28/100], Step [40/143], Loss: 3.3988\n",
      "Epoch [28/100], Step [60/143], Loss: 3.2058\n",
      "Epoch [28/100], Step [80/143], Loss: 3.4050\n",
      "Epoch [28/100], Step [100/143], Loss: 3.8443\n",
      "Epoch [28/100], Step [120/143], Loss: 3.4681\n",
      "Epoch [28/100], Step [140/143], Loss: 3.5060\n",
      "Epoch [29/100], Step [20/143], Loss: 3.3493\n",
      "Epoch [29/100], Step [40/143], Loss: 3.3562\n",
      "Epoch [29/100], Step [60/143], Loss: 3.1634\n",
      "Epoch [29/100], Step [80/143], Loss: 3.3503\n",
      "Epoch [29/100], Step [100/143], Loss: 3.7751\n",
      "Epoch [29/100], Step [120/143], Loss: 3.4364\n",
      "Epoch [29/100], Step [140/143], Loss: 3.4690\n",
      "Epoch [30/100], Step [20/143], Loss: 3.3183\n",
      "Epoch [30/100], Step [40/143], Loss: 3.3124\n",
      "Epoch [30/100], Step [60/143], Loss: 3.1222\n",
      "Epoch [30/100], Step [80/143], Loss: 3.2953\n",
      "Epoch [30/100], Step [100/143], Loss: 3.7040\n",
      "Epoch [30/100], Step [120/143], Loss: 3.4041\n",
      "Epoch [30/100], Step [140/143], Loss: 3.4316\n",
      "Epoch [31/100], Step [20/143], Loss: 3.2865\n",
      "Epoch [31/100], Step [40/143], Loss: 3.2674\n",
      "Epoch [31/100], Step [60/143], Loss: 3.0820\n",
      "Epoch [31/100], Step [80/143], Loss: 3.2398\n",
      "Epoch [31/100], Step [100/143], Loss: 3.6312\n",
      "Epoch [31/100], Step [120/143], Loss: 3.3711\n",
      "Epoch [31/100], Step [140/143], Loss: 3.3938\n",
      "Epoch [32/100], Step [20/143], Loss: 3.2538\n",
      "Epoch [32/100], Step [40/143], Loss: 3.2212\n",
      "Epoch [32/100], Step [60/143], Loss: 3.0427\n",
      "Epoch [32/100], Step [80/143], Loss: 3.1840\n",
      "Epoch [32/100], Step [100/143], Loss: 3.5569\n",
      "Epoch [32/100], Step [120/143], Loss: 3.3371\n",
      "Epoch [32/100], Step [140/143], Loss: 3.3553\n",
      "Epoch [33/100], Step [20/143], Loss: 3.2202\n",
      "Epoch [33/100], Step [40/143], Loss: 3.1738\n",
      "Epoch [33/100], Step [60/143], Loss: 3.0039\n",
      "Epoch [33/100], Step [80/143], Loss: 3.1279\n",
      "Epoch [33/100], Step [100/143], Loss: 3.4814\n",
      "Epoch [33/100], Step [120/143], Loss: 3.3022\n",
      "Epoch [33/100], Step [140/143], Loss: 3.3162\n",
      "Epoch [34/100], Step [20/143], Loss: 3.1857\n",
      "Epoch [34/100], Step [40/143], Loss: 3.1251\n",
      "Epoch [34/100], Step [60/143], Loss: 2.9656\n",
      "Epoch [34/100], Step [80/143], Loss: 3.0715\n",
      "Epoch [34/100], Step [100/143], Loss: 3.4049\n",
      "Epoch [34/100], Step [120/143], Loss: 3.2661\n",
      "Epoch [34/100], Step [140/143], Loss: 3.2764\n",
      "Epoch [35/100], Step [20/143], Loss: 3.1503\n",
      "Epoch [35/100], Step [40/143], Loss: 3.0752\n",
      "Epoch [35/100], Step [60/143], Loss: 2.9274\n",
      "Epoch [35/100], Step [80/143], Loss: 3.0148\n",
      "Epoch [35/100], Step [100/143], Loss: 3.3278\n",
      "Epoch [35/100], Step [120/143], Loss: 3.2289\n",
      "Epoch [35/100], Step [140/143], Loss: 3.2358\n",
      "Epoch [36/100], Step [20/143], Loss: 3.1141\n",
      "Epoch [36/100], Step [40/143], Loss: 3.0241\n",
      "Epoch [36/100], Step [60/143], Loss: 2.8892\n",
      "Epoch [36/100], Step [80/143], Loss: 2.9578\n",
      "Epoch [36/100], Step [100/143], Loss: 3.2506\n",
      "Epoch [36/100], Step [120/143], Loss: 3.1905\n",
      "Epoch [36/100], Step [140/143], Loss: 3.1943\n",
      "Epoch [37/100], Step [20/143], Loss: 3.0770\n",
      "Epoch [37/100], Step [40/143], Loss: 2.9718\n",
      "Epoch [37/100], Step [60/143], Loss: 2.8510\n",
      "Epoch [37/100], Step [80/143], Loss: 2.9005\n",
      "Epoch [37/100], Step [100/143], Loss: 3.1738\n",
      "Epoch [37/100], Step [120/143], Loss: 3.1509\n",
      "Epoch [37/100], Step [140/143], Loss: 3.1519\n",
      "Epoch [38/100], Step [20/143], Loss: 3.0391\n",
      "Epoch [38/100], Step [40/143], Loss: 2.9183\n",
      "Epoch [38/100], Step [60/143], Loss: 2.8125\n",
      "Epoch [38/100], Step [80/143], Loss: 2.8428\n",
      "Epoch [38/100], Step [100/143], Loss: 3.0976\n",
      "Epoch [38/100], Step [120/143], Loss: 3.1100\n",
      "Epoch [38/100], Step [140/143], Loss: 3.1086\n",
      "Epoch [39/100], Step [20/143], Loss: 3.0004\n",
      "Epoch [39/100], Step [40/143], Loss: 2.8638\n",
      "Epoch [39/100], Step [60/143], Loss: 2.7736\n",
      "Epoch [39/100], Step [80/143], Loss: 2.7849\n",
      "Epoch [39/100], Step [100/143], Loss: 3.0225\n",
      "Epoch [39/100], Step [120/143], Loss: 3.0680\n",
      "Epoch [39/100], Step [140/143], Loss: 3.0644\n",
      "Epoch [40/100], Step [20/143], Loss: 2.9609\n",
      "Epoch [40/100], Step [40/143], Loss: 2.8082\n",
      "Epoch [40/100], Step [60/143], Loss: 2.7343\n",
      "Epoch [40/100], Step [80/143], Loss: 2.7267\n",
      "Epoch [40/100], Step [100/143], Loss: 2.9489\n",
      "Epoch [40/100], Step [120/143], Loss: 3.0247\n",
      "Epoch [40/100], Step [140/143], Loss: 3.0191\n",
      "Epoch [41/100], Step [20/143], Loss: 2.9206\n",
      "Epoch [41/100], Step [40/143], Loss: 2.7516\n",
      "Epoch [41/100], Step [60/143], Loss: 2.6944\n",
      "Epoch [41/100], Step [80/143], Loss: 2.6683\n",
      "Epoch [41/100], Step [100/143], Loss: 2.8769\n",
      "Epoch [41/100], Step [120/143], Loss: 2.9802\n",
      "Epoch [41/100], Step [140/143], Loss: 2.9730\n",
      "Epoch [42/100], Step [20/143], Loss: 2.8795\n",
      "Epoch [42/100], Step [40/143], Loss: 2.6942\n",
      "Epoch [42/100], Step [60/143], Loss: 2.6539\n",
      "Epoch [42/100], Step [80/143], Loss: 2.6096\n",
      "Epoch [42/100], Step [100/143], Loss: 2.8068\n",
      "Epoch [42/100], Step [120/143], Loss: 2.9345\n",
      "Epoch [42/100], Step [140/143], Loss: 2.9258\n",
      "Epoch [43/100], Step [20/143], Loss: 2.8376\n",
      "Epoch [43/100], Step [40/143], Loss: 2.6359\n",
      "Epoch [43/100], Step [60/143], Loss: 2.6127\n",
      "Epoch [43/100], Step [80/143], Loss: 2.5508\n",
      "Epoch [43/100], Step [100/143], Loss: 2.7386\n",
      "Epoch [43/100], Step [120/143], Loss: 2.8877\n",
      "Epoch [43/100], Step [140/143], Loss: 2.8777\n",
      "Epoch [44/100], Step [20/143], Loss: 2.7950\n",
      "Epoch [44/100], Step [40/143], Loss: 2.5769\n",
      "Epoch [44/100], Step [60/143], Loss: 2.5708\n",
      "Epoch [44/100], Step [80/143], Loss: 2.4918\n",
      "Epoch [44/100], Step [100/143], Loss: 2.6724\n",
      "Epoch [44/100], Step [120/143], Loss: 2.8397\n",
      "Epoch [44/100], Step [140/143], Loss: 2.8286\n",
      "Epoch [45/100], Step [20/143], Loss: 2.7517\n",
      "Epoch [45/100], Step [40/143], Loss: 2.5173\n",
      "Epoch [45/100], Step [60/143], Loss: 2.5280\n",
      "Epoch [45/100], Step [80/143], Loss: 2.4327\n",
      "Epoch [45/100], Step [100/143], Loss: 2.6082\n",
      "Epoch [45/100], Step [120/143], Loss: 2.7907\n",
      "Epoch [45/100], Step [140/143], Loss: 2.7786\n",
      "Epoch [46/100], Step [20/143], Loss: 2.7076\n",
      "Epoch [46/100], Step [40/143], Loss: 2.4572\n",
      "Epoch [46/100], Step [60/143], Loss: 2.4845\n",
      "Epoch [46/100], Step [80/143], Loss: 2.3737\n",
      "Epoch [46/100], Step [100/143], Loss: 2.5460\n",
      "Epoch [46/100], Step [120/143], Loss: 2.7406\n",
      "Epoch [46/100], Step [140/143], Loss: 2.7278\n",
      "Epoch [47/100], Step [20/143], Loss: 2.6628\n",
      "Epoch [47/100], Step [40/143], Loss: 2.3967\n",
      "Epoch [47/100], Step [60/143], Loss: 2.4402\n",
      "Epoch [47/100], Step [80/143], Loss: 2.3148\n",
      "Epoch [47/100], Step [100/143], Loss: 2.4857\n",
      "Epoch [47/100], Step [120/143], Loss: 2.6895\n",
      "Epoch [47/100], Step [140/143], Loss: 2.6760\n",
      "Epoch [48/100], Step [20/143], Loss: 2.6174\n",
      "Epoch [48/100], Step [40/143], Loss: 2.3360\n",
      "Epoch [48/100], Step [60/143], Loss: 2.3951\n",
      "Epoch [48/100], Step [80/143], Loss: 2.2560\n",
      "Epoch [48/100], Step [100/143], Loss: 2.4272\n",
      "Epoch [48/100], Step [120/143], Loss: 2.6374\n",
      "Epoch [48/100], Step [140/143], Loss: 2.6235\n",
      "Epoch [49/100], Step [20/143], Loss: 2.5714\n",
      "Epoch [49/100], Step [40/143], Loss: 2.2753\n",
      "Epoch [49/100], Step [60/143], Loss: 2.3492\n",
      "Epoch [49/100], Step [80/143], Loss: 2.1976\n",
      "Epoch [49/100], Step [100/143], Loss: 2.3705\n",
      "Epoch [49/100], Step [120/143], Loss: 2.5844\n",
      "Epoch [49/100], Step [140/143], Loss: 2.5702\n",
      "Epoch [50/100], Step [20/143], Loss: 2.5249\n",
      "Epoch [50/100], Step [40/143], Loss: 2.2147\n",
      "Epoch [50/100], Step [60/143], Loss: 2.3025\n",
      "Epoch [50/100], Step [80/143], Loss: 2.1395\n",
      "Epoch [50/100], Step [100/143], Loss: 2.3155\n",
      "Epoch [50/100], Step [120/143], Loss: 2.5306\n",
      "Epoch [50/100], Step [140/143], Loss: 2.5162\n",
      "Epoch [51/100], Step [20/143], Loss: 2.4779\n",
      "Epoch [51/100], Step [40/143], Loss: 2.1544\n",
      "Epoch [51/100], Step [60/143], Loss: 2.2551\n",
      "Epoch [51/100], Step [80/143], Loss: 2.0818\n",
      "Epoch [51/100], Step [100/143], Loss: 2.2621\n",
      "Epoch [51/100], Step [120/143], Loss: 2.4760\n",
      "Epoch [51/100], Step [140/143], Loss: 2.4616\n",
      "Epoch [52/100], Step [20/143], Loss: 2.4304\n",
      "Epoch [52/100], Step [40/143], Loss: 2.0946\n",
      "Epoch [52/100], Step [60/143], Loss: 2.2070\n",
      "Epoch [52/100], Step [80/143], Loss: 2.0247\n",
      "Epoch [52/100], Step [100/143], Loss: 2.2104\n",
      "Epoch [52/100], Step [120/143], Loss: 2.4208\n",
      "Epoch [52/100], Step [140/143], Loss: 2.4065\n",
      "Epoch [53/100], Step [20/143], Loss: 2.3827\n",
      "Epoch [53/100], Step [40/143], Loss: 2.0356\n",
      "Epoch [53/100], Step [60/143], Loss: 2.1582\n",
      "Epoch [53/100], Step [80/143], Loss: 1.9682\n",
      "Epoch [53/100], Step [100/143], Loss: 2.1603\n",
      "Epoch [53/100], Step [120/143], Loss: 2.3650\n",
      "Epoch [53/100], Step [140/143], Loss: 2.3509\n",
      "Epoch [54/100], Step [20/143], Loss: 2.3347\n",
      "Epoch [54/100], Step [40/143], Loss: 1.9774\n",
      "Epoch [54/100], Step [60/143], Loss: 2.1089\n",
      "Epoch [54/100], Step [80/143], Loss: 1.9124\n",
      "Epoch [54/100], Step [100/143], Loss: 2.1117\n",
      "Epoch [54/100], Step [120/143], Loss: 2.3088\n",
      "Epoch [54/100], Step [140/143], Loss: 2.2951\n",
      "Epoch [55/100], Step [20/143], Loss: 2.2866\n",
      "Epoch [55/100], Step [40/143], Loss: 1.9202\n",
      "Epoch [55/100], Step [60/143], Loss: 2.0592\n",
      "Epoch [55/100], Step [80/143], Loss: 1.8574\n",
      "Epoch [55/100], Step [100/143], Loss: 2.0647\n",
      "Epoch [55/100], Step [120/143], Loss: 2.2522\n",
      "Epoch [55/100], Step [140/143], Loss: 2.2390\n",
      "Epoch [56/100], Step [20/143], Loss: 2.2383\n",
      "Epoch [56/100], Step [40/143], Loss: 1.8642\n",
      "Epoch [56/100], Step [60/143], Loss: 2.0091\n",
      "Epoch [56/100], Step [80/143], Loss: 1.8032\n",
      "Epoch [56/100], Step [100/143], Loss: 2.0190\n",
      "Epoch [56/100], Step [120/143], Loss: 2.1956\n",
      "Epoch [56/100], Step [140/143], Loss: 2.1828\n",
      "Epoch [57/100], Step [20/143], Loss: 2.1902\n",
      "Epoch [57/100], Step [40/143], Loss: 1.8096\n",
      "Epoch [57/100], Step [60/143], Loss: 1.9587\n",
      "Epoch [57/100], Step [80/143], Loss: 1.7498\n",
      "Epoch [57/100], Step [100/143], Loss: 1.9748\n",
      "Epoch [57/100], Step [120/143], Loss: 2.1389\n",
      "Epoch [57/100], Step [140/143], Loss: 2.1266\n",
      "Epoch [58/100], Step [20/143], Loss: 2.1421\n",
      "Epoch [58/100], Step [40/143], Loss: 1.7564\n",
      "Epoch [58/100], Step [60/143], Loss: 1.9082\n",
      "Epoch [58/100], Step [80/143], Loss: 1.6975\n",
      "Epoch [58/100], Step [100/143], Loss: 1.9320\n",
      "Epoch [58/100], Step [120/143], Loss: 2.0824\n",
      "Epoch [58/100], Step [140/143], Loss: 2.0707\n",
      "Epoch [59/100], Step [20/143], Loss: 2.0943\n",
      "Epoch [59/100], Step [40/143], Loss: 1.7046\n",
      "Epoch [59/100], Step [60/143], Loss: 1.8576\n",
      "Epoch [59/100], Step [80/143], Loss: 1.6462\n",
      "Epoch [59/100], Step [100/143], Loss: 1.8904\n",
      "Epoch [59/100], Step [120/143], Loss: 2.0262\n",
      "Epoch [59/100], Step [140/143], Loss: 2.0150\n",
      "Epoch [60/100], Step [20/143], Loss: 2.0468\n",
      "Epoch [60/100], Step [40/143], Loss: 1.6545\n",
      "Epoch [60/100], Step [60/143], Loss: 1.8072\n",
      "Epoch [60/100], Step [80/143], Loss: 1.5960\n",
      "Epoch [60/100], Step [100/143], Loss: 1.8501\n",
      "Epoch [60/100], Step [120/143], Loss: 1.9704\n",
      "Epoch [60/100], Step [140/143], Loss: 1.9598\n",
      "Epoch [61/100], Step [20/143], Loss: 1.9997\n",
      "Epoch [61/100], Step [40/143], Loss: 1.6059\n",
      "Epoch [61/100], Step [60/143], Loss: 1.7569\n",
      "Epoch [61/100], Step [80/143], Loss: 1.5469\n",
      "Epoch [61/100], Step [100/143], Loss: 1.8109\n",
      "Epoch [61/100], Step [120/143], Loss: 1.9153\n",
      "Epoch [61/100], Step [140/143], Loss: 1.9052\n",
      "Epoch [62/100], Step [20/143], Loss: 1.9531\n",
      "Epoch [62/100], Step [40/143], Loss: 1.5589\n",
      "Epoch [62/100], Step [60/143], Loss: 1.7070\n",
      "Epoch [62/100], Step [80/143], Loss: 1.4990\n",
      "Epoch [62/100], Step [100/143], Loss: 1.7728\n",
      "Epoch [62/100], Step [120/143], Loss: 1.8609\n",
      "Epoch [62/100], Step [140/143], Loss: 1.8513\n",
      "Epoch [63/100], Step [20/143], Loss: 1.9071\n",
      "Epoch [63/100], Step [40/143], Loss: 1.5135\n",
      "Epoch [63/100], Step [60/143], Loss: 1.6575\n",
      "Epoch [63/100], Step [80/143], Loss: 1.4524\n",
      "Epoch [63/100], Step [100/143], Loss: 1.7357\n",
      "Epoch [63/100], Step [120/143], Loss: 1.8073\n",
      "Epoch [63/100], Step [140/143], Loss: 1.7984\n",
      "Epoch [64/100], Step [20/143], Loss: 1.8617\n",
      "Epoch [64/100], Step [40/143], Loss: 1.4697\n",
      "Epoch [64/100], Step [60/143], Loss: 1.6085\n",
      "Epoch [64/100], Step [80/143], Loss: 1.4070\n",
      "Epoch [64/100], Step [100/143], Loss: 1.6996\n",
      "Epoch [64/100], Step [120/143], Loss: 1.7548\n",
      "Epoch [64/100], Step [140/143], Loss: 1.7466\n",
      "Epoch [65/100], Step [20/143], Loss: 1.8170\n",
      "Epoch [65/100], Step [40/143], Loss: 1.4273\n",
      "Epoch [65/100], Step [60/143], Loss: 1.5603\n",
      "Epoch [65/100], Step [80/143], Loss: 1.3630\n",
      "Epoch [65/100], Step [100/143], Loss: 1.6644\n",
      "Epoch [65/100], Step [120/143], Loss: 1.7033\n",
      "Epoch [65/100], Step [140/143], Loss: 1.6959\n",
      "Epoch [66/100], Step [20/143], Loss: 1.7732\n",
      "Epoch [66/100], Step [40/143], Loss: 1.3865\n",
      "Epoch [66/100], Step [60/143], Loss: 1.5129\n",
      "Epoch [66/100], Step [80/143], Loss: 1.3203\n",
      "Epoch [66/100], Step [100/143], Loss: 1.6300\n",
      "Epoch [66/100], Step [120/143], Loss: 1.6531\n",
      "Epoch [66/100], Step [140/143], Loss: 1.6467\n",
      "Epoch [67/100], Step [20/143], Loss: 1.7301\n",
      "Epoch [67/100], Step [40/143], Loss: 1.3470\n",
      "Epoch [67/100], Step [60/143], Loss: 1.4664\n",
      "Epoch [67/100], Step [80/143], Loss: 1.2790\n",
      "Epoch [67/100], Step [100/143], Loss: 1.5964\n",
      "Epoch [67/100], Step [120/143], Loss: 1.6040\n",
      "Epoch [67/100], Step [140/143], Loss: 1.5988\n",
      "Epoch [68/100], Step [20/143], Loss: 1.6880\n",
      "Epoch [68/100], Step [40/143], Loss: 1.3089\n",
      "Epoch [68/100], Step [60/143], Loss: 1.4208\n",
      "Epoch [68/100], Step [80/143], Loss: 1.2390\n",
      "Epoch [68/100], Step [100/143], Loss: 1.5636\n",
      "Epoch [68/100], Step [120/143], Loss: 1.5563\n",
      "Epoch [68/100], Step [140/143], Loss: 1.5526\n",
      "Epoch [69/100], Step [20/143], Loss: 1.6468\n",
      "Epoch [69/100], Step [40/143], Loss: 1.2721\n",
      "Epoch [69/100], Step [60/143], Loss: 1.3764\n",
      "Epoch [69/100], Step [80/143], Loss: 1.2004\n",
      "Epoch [69/100], Step [100/143], Loss: 1.5314\n",
      "Epoch [69/100], Step [120/143], Loss: 1.5098\n",
      "Epoch [69/100], Step [140/143], Loss: 1.5079\n",
      "Epoch [70/100], Step [20/143], Loss: 1.6066\n",
      "Epoch [70/100], Step [40/143], Loss: 1.2365\n",
      "Epoch [70/100], Step [60/143], Loss: 1.3330\n",
      "Epoch [70/100], Step [80/143], Loss: 1.1632\n",
      "Epoch [70/100], Step [100/143], Loss: 1.4999\n",
      "Epoch [70/100], Step [120/143], Loss: 1.4648\n",
      "Epoch [70/100], Step [140/143], Loss: 1.4650\n",
      "Epoch [71/100], Step [20/143], Loss: 1.5674\n",
      "Epoch [71/100], Step [40/143], Loss: 1.2021\n",
      "Epoch [71/100], Step [60/143], Loss: 1.2909\n",
      "Epoch [71/100], Step [80/143], Loss: 1.1274\n",
      "Epoch [71/100], Step [100/143], Loss: 1.4691\n",
      "Epoch [71/100], Step [120/143], Loss: 1.4212\n",
      "Epoch [71/100], Step [140/143], Loss: 1.4238\n",
      "Epoch [72/100], Step [20/143], Loss: 1.5292\n",
      "Epoch [72/100], Step [40/143], Loss: 1.1687\n",
      "Epoch [72/100], Step [60/143], Loss: 1.2500\n",
      "Epoch [72/100], Step [80/143], Loss: 1.0928\n",
      "Epoch [72/100], Step [100/143], Loss: 1.4388\n",
      "Epoch [72/100], Step [120/143], Loss: 1.3789\n",
      "Epoch [72/100], Step [140/143], Loss: 1.3843\n",
      "Epoch [73/100], Step [20/143], Loss: 1.4921\n",
      "Epoch [73/100], Step [40/143], Loss: 1.1364\n",
      "Epoch [73/100], Step [60/143], Loss: 1.2104\n",
      "Epoch [73/100], Step [80/143], Loss: 1.0596\n",
      "Epoch [73/100], Step [100/143], Loss: 1.4091\n",
      "Epoch [73/100], Step [120/143], Loss: 1.3380\n",
      "Epoch [73/100], Step [140/143], Loss: 1.3465\n",
      "Epoch [74/100], Step [20/143], Loss: 1.4560\n",
      "Epoch [74/100], Step [40/143], Loss: 1.1051\n",
      "Epoch [74/100], Step [60/143], Loss: 1.1720\n",
      "Epoch [74/100], Step [80/143], Loss: 1.0277\n",
      "Epoch [74/100], Step [100/143], Loss: 1.3800\n",
      "Epoch [74/100], Step [120/143], Loss: 1.2986\n",
      "Epoch [74/100], Step [140/143], Loss: 1.3104\n",
      "Epoch [75/100], Step [20/143], Loss: 1.4209\n",
      "Epoch [75/100], Step [40/143], Loss: 1.0747\n",
      "Epoch [75/100], Step [60/143], Loss: 1.1349\n",
      "Epoch [75/100], Step [80/143], Loss: 0.9970\n",
      "Epoch [75/100], Step [100/143], Loss: 1.3513\n",
      "Epoch [75/100], Step [120/143], Loss: 1.2604\n",
      "Epoch [75/100], Step [140/143], Loss: 1.2760\n",
      "Epoch [76/100], Step [20/143], Loss: 1.3869\n",
      "Epoch [76/100], Step [40/143], Loss: 1.0452\n",
      "Epoch [76/100], Step [60/143], Loss: 1.0991\n",
      "Epoch [76/100], Step [80/143], Loss: 0.9675\n",
      "Epoch [76/100], Step [100/143], Loss: 1.3232\n",
      "Epoch [76/100], Step [120/143], Loss: 1.2236\n",
      "Epoch [76/100], Step [140/143], Loss: 1.2431\n",
      "Epoch [77/100], Step [20/143], Loss: 1.3539\n",
      "Epoch [77/100], Step [40/143], Loss: 1.0166\n",
      "Epoch [77/100], Step [60/143], Loss: 1.0645\n",
      "Epoch [77/100], Step [80/143], Loss: 0.9392\n",
      "Epoch [77/100], Step [100/143], Loss: 1.2956\n",
      "Epoch [77/100], Step [120/143], Loss: 1.1882\n",
      "Epoch [77/100], Step [140/143], Loss: 1.2118\n",
      "Epoch [78/100], Step [20/143], Loss: 1.3218\n",
      "Epoch [78/100], Step [40/143], Loss: 0.9887\n",
      "Epoch [78/100], Step [60/143], Loss: 1.0312\n",
      "Epoch [78/100], Step [80/143], Loss: 0.9120\n",
      "Epoch [78/100], Step [100/143], Loss: 1.2685\n",
      "Epoch [78/100], Step [120/143], Loss: 1.1539\n",
      "Epoch [78/100], Step [140/143], Loss: 1.1818\n",
      "Epoch [79/100], Step [20/143], Loss: 1.2908\n",
      "Epoch [79/100], Step [40/143], Loss: 0.9616\n",
      "Epoch [79/100], Step [60/143], Loss: 0.9991\n",
      "Epoch [79/100], Step [80/143], Loss: 0.8859\n",
      "Epoch [79/100], Step [100/143], Loss: 1.2418\n",
      "Epoch [79/100], Step [120/143], Loss: 1.1209\n",
      "Epoch [79/100], Step [140/143], Loss: 1.1533\n",
      "Epoch [80/100], Step [20/143], Loss: 1.2607\n",
      "Epoch [80/100], Step [40/143], Loss: 0.9353\n",
      "Epoch [80/100], Step [60/143], Loss: 0.9682\n",
      "Epoch [80/100], Step [80/143], Loss: 0.8608\n",
      "Epoch [80/100], Step [100/143], Loss: 1.2157\n",
      "Epoch [80/100], Step [120/143], Loss: 1.0891\n",
      "Epoch [80/100], Step [140/143], Loss: 1.1260\n",
      "Epoch [81/100], Step [20/143], Loss: 1.2315\n",
      "Epoch [81/100], Step [40/143], Loss: 0.9096\n",
      "Epoch [81/100], Step [60/143], Loss: 0.9384\n",
      "Epoch [81/100], Step [80/143], Loss: 0.8366\n",
      "Epoch [81/100], Step [100/143], Loss: 1.1900\n",
      "Epoch [81/100], Step [120/143], Loss: 1.0584\n",
      "Epoch [81/100], Step [140/143], Loss: 1.0999\n",
      "Epoch [82/100], Step [20/143], Loss: 1.2032\n",
      "Epoch [82/100], Step [40/143], Loss: 0.8846\n",
      "Epoch [82/100], Step [60/143], Loss: 0.9097\n",
      "Epoch [82/100], Step [80/143], Loss: 0.8134\n",
      "Epoch [82/100], Step [100/143], Loss: 1.1647\n",
      "Epoch [82/100], Step [120/143], Loss: 1.0288\n",
      "Epoch [82/100], Step [140/143], Loss: 1.0748\n",
      "Epoch [83/100], Step [20/143], Loss: 1.1758\n",
      "Epoch [83/100], Step [40/143], Loss: 0.8603\n",
      "Epoch [83/100], Step [60/143], Loss: 0.8820\n",
      "Epoch [83/100], Step [80/143], Loss: 0.7910\n",
      "Epoch [83/100], Step [100/143], Loss: 1.1400\n",
      "Epoch [83/100], Step [120/143], Loss: 1.0002\n",
      "Epoch [83/100], Step [140/143], Loss: 1.0509\n",
      "Epoch [84/100], Step [20/143], Loss: 1.1491\n",
      "Epoch [84/100], Step [40/143], Loss: 0.8366\n",
      "Epoch [84/100], Step [60/143], Loss: 0.8554\n",
      "Epoch [84/100], Step [80/143], Loss: 0.7695\n",
      "Epoch [84/100], Step [100/143], Loss: 1.1156\n",
      "Epoch [84/100], Step [120/143], Loss: 0.9726\n",
      "Epoch [84/100], Step [140/143], Loss: 1.0278\n",
      "Epoch [85/100], Step [20/143], Loss: 1.1233\n",
      "Epoch [85/100], Step [40/143], Loss: 0.8135\n",
      "Epoch [85/100], Step [60/143], Loss: 0.8297\n",
      "Epoch [85/100], Step [80/143], Loss: 0.7488\n",
      "Epoch [85/100], Step [100/143], Loss: 1.0917\n",
      "Epoch [85/100], Step [120/143], Loss: 0.9460\n",
      "Epoch [85/100], Step [140/143], Loss: 1.0057\n",
      "Epoch [86/100], Step [20/143], Loss: 1.0983\n",
      "Epoch [86/100], Step [40/143], Loss: 0.7910\n",
      "Epoch [86/100], Step [60/143], Loss: 0.8050\n",
      "Epoch [86/100], Step [80/143], Loss: 0.7288\n",
      "Epoch [86/100], Step [100/143], Loss: 1.0683\n",
      "Epoch [86/100], Step [120/143], Loss: 0.9203\n",
      "Epoch [86/100], Step [140/143], Loss: 0.9843\n",
      "Epoch [87/100], Step [20/143], Loss: 1.0740\n",
      "Epoch [87/100], Step [40/143], Loss: 0.7691\n",
      "Epoch [87/100], Step [60/143], Loss: 0.7811\n",
      "Epoch [87/100], Step [80/143], Loss: 0.7095\n",
      "Epoch [87/100], Step [100/143], Loss: 1.0453\n",
      "Epoch [87/100], Step [120/143], Loss: 0.8955\n",
      "Epoch [87/100], Step [140/143], Loss: 0.9637\n",
      "Epoch [88/100], Step [20/143], Loss: 1.0503\n",
      "Epoch [88/100], Step [40/143], Loss: 0.7477\n",
      "Epoch [88/100], Step [60/143], Loss: 0.7581\n",
      "Epoch [88/100], Step [80/143], Loss: 0.6909\n",
      "Epoch [88/100], Step [100/143], Loss: 1.0227\n",
      "Epoch [88/100], Step [120/143], Loss: 0.8714\n",
      "Epoch [88/100], Step [140/143], Loss: 0.9439\n",
      "Epoch [89/100], Step [20/143], Loss: 1.0274\n",
      "Epoch [89/100], Step [40/143], Loss: 0.7269\n",
      "Epoch [89/100], Step [60/143], Loss: 0.7360\n",
      "Epoch [89/100], Step [80/143], Loss: 0.6728\n",
      "Epoch [89/100], Step [100/143], Loss: 1.0006\n",
      "Epoch [89/100], Step [120/143], Loss: 0.8482\n",
      "Epoch [89/100], Step [140/143], Loss: 0.9246\n",
      "Epoch [90/100], Step [20/143], Loss: 1.0051\n",
      "Epoch [90/100], Step [40/143], Loss: 0.7066\n",
      "Epoch [90/100], Step [60/143], Loss: 0.7146\n",
      "Epoch [90/100], Step [80/143], Loss: 0.6554\n",
      "Epoch [90/100], Step [100/143], Loss: 0.9789\n",
      "Epoch [90/100], Step [120/143], Loss: 0.8257\n",
      "Epoch [90/100], Step [140/143], Loss: 0.9060\n",
      "Epoch [91/100], Step [20/143], Loss: 0.9835\n",
      "Epoch [91/100], Step [40/143], Loss: 0.6869\n",
      "Epoch [91/100], Step [60/143], Loss: 0.6940\n",
      "Epoch [91/100], Step [80/143], Loss: 0.6386\n",
      "Epoch [91/100], Step [100/143], Loss: 0.9576\n",
      "Epoch [91/100], Step [120/143], Loss: 0.8040\n",
      "Epoch [91/100], Step [140/143], Loss: 0.8880\n",
      "Epoch [92/100], Step [20/143], Loss: 0.9624\n",
      "Epoch [92/100], Step [40/143], Loss: 0.6676\n",
      "Epoch [92/100], Step [60/143], Loss: 0.6742\n",
      "Epoch [92/100], Step [80/143], Loss: 0.6223\n",
      "Epoch [92/100], Step [100/143], Loss: 0.9368\n",
      "Epoch [92/100], Step [120/143], Loss: 0.7830\n",
      "Epoch [92/100], Step [140/143], Loss: 0.8705\n",
      "Epoch [93/100], Step [20/143], Loss: 0.9420\n",
      "Epoch [93/100], Step [40/143], Loss: 0.6489\n",
      "Epoch [93/100], Step [60/143], Loss: 0.6550\n",
      "Epoch [93/100], Step [80/143], Loss: 0.6065\n",
      "Epoch [93/100], Step [100/143], Loss: 0.9163\n",
      "Epoch [93/100], Step [120/143], Loss: 0.7626\n",
      "Epoch [93/100], Step [140/143], Loss: 0.8535\n",
      "Epoch [94/100], Step [20/143], Loss: 0.9220\n",
      "Epoch [94/100], Step [40/143], Loss: 0.6307\n",
      "Epoch [94/100], Step [60/143], Loss: 0.6365\n",
      "Epoch [94/100], Step [80/143], Loss: 0.5911\n",
      "Epoch [94/100], Step [100/143], Loss: 0.8963\n",
      "Epoch [94/100], Step [120/143], Loss: 0.7428\n",
      "Epoch [94/100], Step [140/143], Loss: 0.8369\n",
      "Epoch [95/100], Step [20/143], Loss: 0.9027\n",
      "Epoch [95/100], Step [40/143], Loss: 0.6130\n",
      "Epoch [95/100], Step [60/143], Loss: 0.6187\n",
      "Epoch [95/100], Step [80/143], Loss: 0.5763\n",
      "Epoch [95/100], Step [100/143], Loss: 0.8767\n",
      "Epoch [95/100], Step [120/143], Loss: 0.7237\n",
      "Epoch [95/100], Step [140/143], Loss: 0.8209\n",
      "Epoch [96/100], Step [20/143], Loss: 0.8838\n",
      "Epoch [96/100], Step [40/143], Loss: 0.5957\n",
      "Epoch [96/100], Step [60/143], Loss: 0.6014\n",
      "Epoch [96/100], Step [80/143], Loss: 0.5618\n",
      "Epoch [96/100], Step [100/143], Loss: 0.8575\n",
      "Epoch [96/100], Step [120/143], Loss: 0.7052\n",
      "Epoch [96/100], Step [140/143], Loss: 0.8052\n",
      "Epoch [97/100], Step [20/143], Loss: 0.8655\n",
      "Epoch [97/100], Step [40/143], Loss: 0.5790\n",
      "Epoch [97/100], Step [60/143], Loss: 0.5848\n",
      "Epoch [97/100], Step [80/143], Loss: 0.5478\n",
      "Epoch [97/100], Step [100/143], Loss: 0.8387\n",
      "Epoch [97/100], Step [120/143], Loss: 0.6872\n",
      "Epoch [97/100], Step [140/143], Loss: 0.7900\n",
      "Epoch [98/100], Step [20/143], Loss: 0.8476\n",
      "Epoch [98/100], Step [40/143], Loss: 0.5627\n",
      "Epoch [98/100], Step [60/143], Loss: 0.5688\n",
      "Epoch [98/100], Step [80/143], Loss: 0.5342\n",
      "Epoch [98/100], Step [100/143], Loss: 0.8203\n",
      "Epoch [98/100], Step [120/143], Loss: 0.6698\n",
      "Epoch [98/100], Step [140/143], Loss: 0.7752\n",
      "Epoch [99/100], Step [20/143], Loss: 0.8303\n",
      "Epoch [99/100], Step [40/143], Loss: 0.5469\n",
      "Epoch [99/100], Step [60/143], Loss: 0.5533\n",
      "Epoch [99/100], Step [80/143], Loss: 0.5210\n",
      "Epoch [99/100], Step [100/143], Loss: 0.8023\n",
      "Epoch [99/100], Step [120/143], Loss: 0.6529\n",
      "Epoch [99/100], Step [140/143], Loss: 0.7607\n",
      "Epoch [100/100], Step [20/143], Loss: 0.8134\n",
      "Epoch [100/100], Step [40/143], Loss: 0.5316\n",
      "Epoch [100/100], Step [60/143], Loss: 0.5383\n",
      "Epoch [100/100], Step [80/143], Loss: 0.5081\n",
      "Epoch [100/100], Step [100/143], Loss: 0.7847\n",
      "Epoch [100/100], Step [120/143], Loss: 0.6366\n",
      "Epoch [100/100], Step [140/143], Loss: 0.7466\n"
     ]
    }
   ],
   "execution_count": 4
  },
  {
   "cell_type": "code",
   "id": "8a4e36b033918def",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T01:32:20.892642Z",
     "start_time": "2025-07-06T01:32:20.883571Z"
    }
   },
   "source": [
    "model.eval()\n",
    "\n",
    "def generate_text(content, steps, temperature=0.8):\n",
    "\n",
    "    words = [word for word in content]\n",
    "\n",
    "    hidden = None\n",
    "    for _ in range(steps):\n",
    "        # inputs = [word_to_idx[word] for word in words[-SEQ_LENGTH:]] # 取输入的最后SEQ_LENGTH个词的索引\n",
    "        inputs = [word_to_idx[word] for word in words[-1:]] # 取输入的最后SEQ_LENGTH个词的索引\n",
    "        inputs = torch.LongTensor(inputs)\n",
    "\n",
    "        # 输入形状调整\n",
    "        inputs = inputs.view(1, -1)  # (1, seq_len)\n",
    "\n",
    "        # 前向传播\n",
    "        with torch.no_grad():\n",
    "            # output中包含了每个时间步的输出，推理预测时，只需要取最后一个时间步的输出即可，比如输入“鹰击”，相当于有两个时间步，但是我们只需要第2个时间步的输出，而输出是词汇表中各个词的概率\n",
    "            # 而hidden表示隐藏层，在推理预测时，因为我们会连续预测，外层有一个for循环，所以hidden需要保存，以便下一次循环使用\n",
    "            outputs, hidden = model(inputs, hidden)\n",
    "            last_output = outputs[0, -1, :]  # 取最后一个时间步的输出\n",
    "\n",
    "        # 应用温度采样\n",
    "        # last_output / temperature，相当于将last_output缩小，比如[8,2,2] / 2 = [4,1,1]，使得三个选项对应的数字之间的差别变小了\n",
    "        # 当然如果temperature<1，那么就是放大差别，比如[8,2,2] / 0.5 = [16,4,4]\n",
    "        # probs为做了softmax之后的概率\n",
    "        probs = torch.softmax(last_output / temperature, dim=-1)\n",
    "\n",
    "        # 多项式采样，probs是一个概率，比如是[0.3,0.2,0.5]，那么就是从0,1,2中随机选一个，那么2被选中的概率就是50%\n",
    "        # 谁的概率大，随被采样的概率就大\n",
    "        result_idx = torch.multinomial(probs, 1).item()\n",
    "\n",
    "        #  取概率最大的索引\n",
    "        # result_idx = torch.argmax(probs).item()\n",
    "\n",
    "\n",
    "        # 更新输入序列\n",
    "        words.append(idx_to_word[result_idx])\n",
    "\n",
    "    return ''.join(words)\n",
    "\n",
    "\n",
    "# 20表示预测20次, temperature越大，越随机\n",
    "print(generate_text(\"鹰\", 20, temperature=0.1))"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "鹰击长空，鱼翔浅底，万类霜天竞自由。\n",
      "怅寥\n"
     ]
    }
   ],
   "execution_count": 10
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.18"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
