{
 "cells": [
  {
   "cell_type": "code",
   "id": "92c9da5866bdcf7",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T01:52:14.964576Z",
     "start_time": "2025-07-06T01:52:14.959798Z"
    }
   },
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "from absl.testing.parameterized import parameters\n",
    "from torch.utils.data import Dataset, DataLoader\n",
    "import numpy as np\n",
    "\n",
    "# 示例文本数据，一首诗\n",
    "text = \"\"\"\n",
    "独立寒秋，湘江北去，橘子洲头。\n",
    "看万山红遍，层林尽染；漫江碧透，百舸争流。\n",
    "鹰击长空，鱼翔浅底，万类霜天竞自由。\n",
    "怅寥廓，问苍茫大地，谁主沉浮？\n",
    "携来百侣曾游，忆往昔峥嵘岁月稠。\n",
    "恰同学少年，风华正茂；书生意气，挥斥方遒。\n",
    "指点江山，激扬文字，粪土当年万户侯。\n",
    "曾记否，到中流击水，浪遏飞舟？\n",
    "\"\"\"\n",
    "\n",
    "# 创建词汇表\n",
    "words = set(text)\n",
    "vocab_size = len(words)\n",
    "word_to_idx = {word: i for i, word in enumerate(words)}\n",
    "idx_to_word = {i: word for i, word in enumerate(words)}\n",
    "\n",
    "print(idx_to_word)"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "{0: '遏', 1: '头', 2: '月', 3: '茂', 4: '洲', 5: '鹰', 6: '江', 7: '寥', 8: '华', 9: '秋', 10: '尽', 11: '\\n', 12: '底', 13: '流', 14: '方', 15: '风', 16: '竞', 17: '峥', 18: '到', 19: '文', 20: '，', 21: '浅', 22: '红', 23: '岁', 24: '携', 25: '学', 26: '染', 27: '鱼', 28: '扬', 29: '橘', 30: '来', 31: '看', 32: '长', 33: '年', 34: '粪', 35: '气', 36: '由', 37: '湘', 38: '昔', 39: '嵘', 40: '遒', 41: '大', 42: '少', 43: '舟', 44: '土', 45: '字', 46: '林', 47: '类', 48: '空', 49: '万', 50: '正', 51: '忆', 52: '浪', 53: '谁', 54: '意', 55: '侣', 56: '廓', 57: '独', 58: '否', 59: '激', 60: '；', 61: '问', 62: '侯', 63: '子', 64: '户', 65: '地', 66: '恰', 67: '游', 68: '漫', 69: '天', 70: '去', 71: '击', 72: '同', 73: '中', 74: '透', 75: '寒', 76: '挥', 77: '争', 78: '飞', 79: '主', 80: '浮', 81: '舸', 82: '怅', 83: '。', 84: '书', 85: '往', 86: '层', 87: '茫', 88: '生', 89: '斥', 90: '霜', 91: '自', 92: '翔', 93: '立', 94: '碧', 95: '？', 96: '水', 97: '指', 98: '曾', 99: '稠', 100: '记', 101: '苍', 102: '点', 103: '当', 104: '山', 105: '百', 106: '沉', 107: '北', 108: '遍'}\n"
     ]
    }
   ],
   "execution_count": 6
  },
  {
   "cell_type": "code",
   "id": "91280acf83012c57",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T01:52:14.986523Z",
     "start_time": "2025-07-06T01:52:14.981683Z"
    }
   },
   "source": [
    "\n",
    "# 超参数设置\n",
    "SEQ_LENGTH = 5  # 输入序列长度\n",
    "BATCH_SIZE = 1\n",
    "HIDDEN_SIZE = 128\n",
    "INPUT_SIZE = 128\n",
    "\n",
    "\n",
    "# 创建训练数据\n",
    "class TextDataset(Dataset):\n",
    "    def __init__(self, text, seq_length):\n",
    "        self.text = text\n",
    "        self.seq_length = seq_length\n",
    "\n",
    "        # 转换为索引序列\n",
    "        self.data = [word_to_idx[ch] for ch in text]\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.data) - self.seq_length\n",
    "\n",
    "    def __getitem__(self, idx):\n",
    "        # 文本里的某个序列 X\n",
    "        input_seq = self.data[idx:idx + self.seq_length]\n",
    "\n",
    "        # 目标序列 Y\n",
    "        target_seq = self.data[idx + 1:idx + self.seq_length + 1]\n",
    "\n",
    "        # 相当于，假如语料为abcdefg, input_seq=abc, target_seq=bcd\n",
    "\n",
    "        return torch.LongTensor(input_seq), torch.LongTensor(target_seq)\n",
    "\n",
    "\n",
    "dataset = TextDataset(text, SEQ_LENGTH)\n",
    "dataloader = DataLoader(dataset, batch_size=BATCH_SIZE, shuffle=False)\n",
    "\n",
    "for input_seq, target_seq in dataloader:\n",
    "    print(input_seq)\n",
    "    print(target_seq)\n",
    "    break"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[11, 57, 93, 75,  9]])\n",
      "tensor([[57, 93, 75,  9, 20]])\n"
     ]
    }
   ],
   "execution_count": 7
  },
  {
   "cell_type": "code",
   "id": "d9e9fd013d3d01ba",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T01:52:15.020712Z",
     "start_time": "2025-07-06T01:52:15.016625Z"
    }
   },
   "source": [
    "# 大都督周瑜（我的微信: it_zhouyu）\n",
    "class ZhouyuModel(nn.Module):\n",
    "    def __init__(self, vocab_size, input_size, hidden_size):\n",
    "        super().__init__()\n",
    "\n",
    "        self.hidden_size = hidden_size\n",
    "\n",
    "        # 嵌入层，输入词索引，输出词向量\n",
    "        self.embedding = nn.Embedding(vocab_size, input_size)\n",
    "\n",
    "        # LSTM层\n",
    "        self.lstm = nn.LSTM(input_size, hidden_size, batch_first=True, num_layers=1)\n",
    "\n",
    "        # 输出层\n",
    "        self.out_linear = nn.Linear(hidden_size, vocab_size)\n",
    "\n",
    "    def forward(self, x, state=None):\n",
    "        embedded = self.embedding(x)\n",
    "        outputs, (hidden, cell) = self.lstm(embedded, state)\n",
    "        outputs = self.out_linear(outputs)\n",
    "        return outputs, (hidden, cell)\n",
    "\n",
    "\n",
    "# 初始化模型\n",
    "model = ZhouyuModel(vocab_size, INPUT_SIZE, HIDDEN_SIZE)\n",
    "criterion = nn.CrossEntropyLoss()\n",
    "optimizer = torch.optim.SGD(model.parameters(), lr=0.005)"
   ],
   "outputs": [],
   "execution_count": 8
  },
  {
   "cell_type": "code",
   "id": "2348428ce74982e4",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T01:52:30.449284Z",
     "start_time": "2025-07-06T01:52:15.036901Z"
    }
   },
   "source": [
    "for epoch in range(100):\n",
    "    for i, (inputs, targets) in enumerate(dataloader):\n",
    "        # 前向传播\n",
    "        outputs, _ = model(inputs)\n",
    "\n",
    "        # 计算损失\n",
    "        # 用每个时间步的输出和每个时间步的标签进行比较，并平均损失\n",
    "        loss = criterion(\n",
    "            outputs.view(-1, vocab_size),  # (batch_size*seq_length, vocab_size)\n",
    "            targets.view(-1)  # (batch_size*seq_length)\n",
    "        )\n",
    "\n",
    "        optimizer.zero_grad()\n",
    "        loss.backward()\n",
    "\n",
    "        # 梯度裁剪防止爆炸\n",
    "        # nn.utils.clip_grad_norm_(model.parameters(), max_norm=1.0)\n",
    "\n",
    "        optimizer.step()\n",
    "\n",
    "        if (i + 1) % 20 == 0:\n",
    "            print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'\n",
    "                  .format(epoch + 1, 100, i + 1, len(dataloader), loss.item()))"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch [1/100], Step [20/143], Loss: 4.7222\n",
      "Epoch [1/100], Step [40/143], Loss: 4.6618\n",
      "Epoch [1/100], Step [60/143], Loss: 4.6219\n",
      "Epoch [1/100], Step [80/143], Loss: 4.7059\n",
      "Epoch [1/100], Step [100/143], Loss: 4.7206\n",
      "Epoch [1/100], Step [120/143], Loss: 4.5916\n",
      "Epoch [1/100], Step [140/143], Loss: 4.6788\n",
      "Epoch [2/100], Step [20/143], Loss: 4.6787\n",
      "Epoch [2/100], Step [40/143], Loss: 4.6191\n",
      "Epoch [2/100], Step [60/143], Loss: 4.5911\n",
      "Epoch [2/100], Step [80/143], Loss: 4.6740\n",
      "Epoch [2/100], Step [100/143], Loss: 4.7023\n",
      "Epoch [2/100], Step [120/143], Loss: 4.5613\n",
      "Epoch [2/100], Step [140/143], Loss: 4.6417\n",
      "Epoch [3/100], Step [20/143], Loss: 4.6354\n",
      "Epoch [3/100], Step [40/143], Loss: 4.5765\n",
      "Epoch [3/100], Step [60/143], Loss: 4.5601\n",
      "Epoch [3/100], Step [80/143], Loss: 4.6423\n",
      "Epoch [3/100], Step [100/143], Loss: 4.6840\n",
      "Epoch [3/100], Step [120/143], Loss: 4.5310\n",
      "Epoch [3/100], Step [140/143], Loss: 4.6046\n",
      "Epoch [4/100], Step [20/143], Loss: 4.5919\n",
      "Epoch [4/100], Step [40/143], Loss: 4.5338\n",
      "Epoch [4/100], Step [60/143], Loss: 4.5290\n",
      "Epoch [4/100], Step [80/143], Loss: 4.6104\n",
      "Epoch [4/100], Step [100/143], Loss: 4.6659\n",
      "Epoch [4/100], Step [120/143], Loss: 4.5006\n",
      "Epoch [4/100], Step [140/143], Loss: 4.5675\n",
      "Epoch [5/100], Step [20/143], Loss: 4.5481\n",
      "Epoch [5/100], Step [40/143], Loss: 4.4907\n",
      "Epoch [5/100], Step [60/143], Loss: 4.4975\n",
      "Epoch [5/100], Step [80/143], Loss: 4.5785\n",
      "Epoch [5/100], Step [100/143], Loss: 4.6478\n",
      "Epoch [5/100], Step [120/143], Loss: 4.4701\n",
      "Epoch [5/100], Step [140/143], Loss: 4.5303\n",
      "Epoch [6/100], Step [20/143], Loss: 4.5039\n",
      "Epoch [6/100], Step [40/143], Loss: 4.4471\n",
      "Epoch [6/100], Step [60/143], Loss: 4.4658\n",
      "Epoch [6/100], Step [80/143], Loss: 4.5462\n",
      "Epoch [6/100], Step [100/143], Loss: 4.6298\n",
      "Epoch [6/100], Step [120/143], Loss: 4.4393\n",
      "Epoch [6/100], Step [140/143], Loss: 4.4927\n",
      "Epoch [7/100], Step [20/143], Loss: 4.4589\n",
      "Epoch [7/100], Step [40/143], Loss: 4.4029\n",
      "Epoch [7/100], Step [60/143], Loss: 4.4335\n",
      "Epoch [7/100], Step [80/143], Loss: 4.5137\n",
      "Epoch [7/100], Step [100/143], Loss: 4.6117\n",
      "Epoch [7/100], Step [120/143], Loss: 4.4082\n",
      "Epoch [7/100], Step [140/143], Loss: 4.4547\n",
      "Epoch [8/100], Step [20/143], Loss: 4.4132\n",
      "Epoch [8/100], Step [40/143], Loss: 4.3580\n",
      "Epoch [8/100], Step [60/143], Loss: 4.4008\n",
      "Epoch [8/100], Step [80/143], Loss: 4.4807\n",
      "Epoch [8/100], Step [100/143], Loss: 4.5937\n",
      "Epoch [8/100], Step [120/143], Loss: 4.3768\n",
      "Epoch [8/100], Step [140/143], Loss: 4.4162\n",
      "Epoch [9/100], Step [20/143], Loss: 4.3665\n",
      "Epoch [9/100], Step [40/143], Loss: 4.3122\n",
      "Epoch [9/100], Step [60/143], Loss: 4.3675\n",
      "Epoch [9/100], Step [80/143], Loss: 4.4472\n",
      "Epoch [9/100], Step [100/143], Loss: 4.5756\n",
      "Epoch [9/100], Step [120/143], Loss: 4.3451\n",
      "Epoch [9/100], Step [140/143], Loss: 4.3771\n",
      "Epoch [10/100], Step [20/143], Loss: 4.3187\n",
      "Epoch [10/100], Step [40/143], Loss: 4.2653\n",
      "Epoch [10/100], Step [60/143], Loss: 4.3336\n",
      "Epoch [10/100], Step [80/143], Loss: 4.4131\n",
      "Epoch [10/100], Step [100/143], Loss: 4.5574\n",
      "Epoch [10/100], Step [120/143], Loss: 4.3129\n",
      "Epoch [10/100], Step [140/143], Loss: 4.3372\n",
      "Epoch [11/100], Step [20/143], Loss: 4.2696\n",
      "Epoch [11/100], Step [40/143], Loss: 4.2174\n",
      "Epoch [11/100], Step [60/143], Loss: 4.2990\n",
      "Epoch [11/100], Step [80/143], Loss: 4.3783\n",
      "Epoch [11/100], Step [100/143], Loss: 4.5392\n",
      "Epoch [11/100], Step [120/143], Loss: 4.2803\n",
      "Epoch [11/100], Step [140/143], Loss: 4.2965\n",
      "Epoch [12/100], Step [20/143], Loss: 4.2193\n",
      "Epoch [12/100], Step [40/143], Loss: 4.1682\n",
      "Epoch [12/100], Step [60/143], Loss: 4.2636\n",
      "Epoch [12/100], Step [80/143], Loss: 4.3427\n",
      "Epoch [12/100], Step [100/143], Loss: 4.5208\n",
      "Epoch [12/100], Step [120/143], Loss: 4.2472\n",
      "Epoch [12/100], Step [140/143], Loss: 4.2548\n",
      "Epoch [13/100], Step [20/143], Loss: 4.1676\n",
      "Epoch [13/100], Step [40/143], Loss: 4.1178\n",
      "Epoch [13/100], Step [60/143], Loss: 4.2273\n",
      "Epoch [13/100], Step [80/143], Loss: 4.3063\n",
      "Epoch [13/100], Step [100/143], Loss: 4.5023\n",
      "Epoch [13/100], Step [120/143], Loss: 4.2136\n",
      "Epoch [13/100], Step [140/143], Loss: 4.2122\n",
      "Epoch [14/100], Step [20/143], Loss: 4.1146\n",
      "Epoch [14/100], Step [40/143], Loss: 4.0661\n",
      "Epoch [14/100], Step [60/143], Loss: 4.1901\n",
      "Epoch [14/100], Step [80/143], Loss: 4.2689\n",
      "Epoch [14/100], Step [100/143], Loss: 4.4836\n",
      "Epoch [14/100], Step [120/143], Loss: 4.1794\n",
      "Epoch [14/100], Step [140/143], Loss: 4.1685\n",
      "Epoch [15/100], Step [20/143], Loss: 4.0604\n",
      "Epoch [15/100], Step [40/143], Loss: 4.0131\n",
      "Epoch [15/100], Step [60/143], Loss: 4.1520\n",
      "Epoch [15/100], Step [80/143], Loss: 4.2304\n",
      "Epoch [15/100], Step [100/143], Loss: 4.4647\n",
      "Epoch [15/100], Step [120/143], Loss: 4.1446\n",
      "Epoch [15/100], Step [140/143], Loss: 4.1237\n",
      "Epoch [16/100], Step [20/143], Loss: 4.0052\n",
      "Epoch [16/100], Step [40/143], Loss: 3.9590\n",
      "Epoch [16/100], Step [60/143], Loss: 4.1129\n",
      "Epoch [16/100], Step [80/143], Loss: 4.1909\n",
      "Epoch [16/100], Step [100/143], Loss: 4.4455\n",
      "Epoch [16/100], Step [120/143], Loss: 4.1091\n",
      "Epoch [16/100], Step [140/143], Loss: 4.0777\n",
      "Epoch [17/100], Step [20/143], Loss: 3.9491\n",
      "Epoch [17/100], Step [40/143], Loss: 3.9038\n",
      "Epoch [17/100], Step [60/143], Loss: 4.0728\n",
      "Epoch [17/100], Step [80/143], Loss: 4.1503\n",
      "Epoch [17/100], Step [100/143], Loss: 4.4260\n",
      "Epoch [17/100], Step [120/143], Loss: 4.0730\n",
      "Epoch [17/100], Step [140/143], Loss: 4.0307\n",
      "Epoch [18/100], Step [20/143], Loss: 3.8927\n",
      "Epoch [18/100], Step [40/143], Loss: 3.8477\n",
      "Epoch [18/100], Step [60/143], Loss: 4.0316\n",
      "Epoch [18/100], Step [80/143], Loss: 4.1086\n",
      "Epoch [18/100], Step [100/143], Loss: 4.4062\n",
      "Epoch [18/100], Step [120/143], Loss: 4.0362\n",
      "Epoch [18/100], Step [140/143], Loss: 3.9827\n",
      "Epoch [19/100], Step [20/143], Loss: 3.8362\n",
      "Epoch [19/100], Step [40/143], Loss: 3.7910\n",
      "Epoch [19/100], Step [60/143], Loss: 3.9894\n",
      "Epoch [19/100], Step [80/143], Loss: 4.0657\n",
      "Epoch [19/100], Step [100/143], Loss: 4.3860\n",
      "Epoch [19/100], Step [120/143], Loss: 3.9987\n",
      "Epoch [19/100], Step [140/143], Loss: 3.9337\n",
      "Epoch [20/100], Step [20/143], Loss: 3.7802\n",
      "Epoch [20/100], Step [40/143], Loss: 3.7340\n",
      "Epoch [20/100], Step [60/143], Loss: 3.9462\n",
      "Epoch [20/100], Step [80/143], Loss: 4.0219\n",
      "Epoch [20/100], Step [100/143], Loss: 4.3653\n",
      "Epoch [20/100], Step [120/143], Loss: 3.9605\n",
      "Epoch [20/100], Step [140/143], Loss: 3.8838\n",
      "Epoch [21/100], Step [20/143], Loss: 3.7253\n",
      "Epoch [21/100], Step [40/143], Loss: 3.6771\n",
      "Epoch [21/100], Step [60/143], Loss: 3.9021\n",
      "Epoch [21/100], Step [80/143], Loss: 3.9772\n",
      "Epoch [21/100], Step [100/143], Loss: 4.3442\n",
      "Epoch [21/100], Step [120/143], Loss: 3.9216\n",
      "Epoch [21/100], Step [140/143], Loss: 3.8333\n",
      "Epoch [22/100], Step [20/143], Loss: 3.6720\n",
      "Epoch [22/100], Step [40/143], Loss: 3.6206\n",
      "Epoch [22/100], Step [60/143], Loss: 3.8573\n",
      "Epoch [22/100], Step [80/143], Loss: 3.9318\n",
      "Epoch [22/100], Step [100/143], Loss: 4.3225\n",
      "Epoch [22/100], Step [120/143], Loss: 3.8821\n",
      "Epoch [22/100], Step [140/143], Loss: 3.7824\n",
      "Epoch [23/100], Step [20/143], Loss: 3.6208\n",
      "Epoch [23/100], Step [40/143], Loss: 3.5650\n",
      "Epoch [23/100], Step [60/143], Loss: 3.8119\n",
      "Epoch [23/100], Step [80/143], Loss: 3.8861\n",
      "Epoch [23/100], Step [100/143], Loss: 4.3003\n",
      "Epoch [23/100], Step [120/143], Loss: 3.8420\n",
      "Epoch [23/100], Step [140/143], Loss: 3.7313\n",
      "Epoch [24/100], Step [20/143], Loss: 3.5721\n",
      "Epoch [24/100], Step [40/143], Loss: 3.5105\n",
      "Epoch [24/100], Step [60/143], Loss: 3.7662\n",
      "Epoch [24/100], Step [80/143], Loss: 3.8402\n",
      "Epoch [24/100], Step [100/143], Loss: 4.2775\n",
      "Epoch [24/100], Step [120/143], Loss: 3.8014\n",
      "Epoch [24/100], Step [140/143], Loss: 3.6804\n",
      "Epoch [25/100], Step [20/143], Loss: 3.5259\n",
      "Epoch [25/100], Step [40/143], Loss: 3.4574\n",
      "Epoch [25/100], Step [60/143], Loss: 3.7205\n",
      "Epoch [25/100], Step [80/143], Loss: 3.7945\n",
      "Epoch [25/100], Step [100/143], Loss: 4.2541\n",
      "Epoch [25/100], Step [120/143], Loss: 3.7605\n",
      "Epoch [25/100], Step [140/143], Loss: 3.6298\n",
      "Epoch [26/100], Step [20/143], Loss: 3.4823\n",
      "Epoch [26/100], Step [40/143], Loss: 3.4058\n",
      "Epoch [26/100], Step [60/143], Loss: 3.6751\n",
      "Epoch [26/100], Step [80/143], Loss: 3.7494\n",
      "Epoch [26/100], Step [100/143], Loss: 4.2300\n",
      "Epoch [26/100], Step [120/143], Loss: 3.7193\n",
      "Epoch [26/100], Step [140/143], Loss: 3.5799\n",
      "Epoch [27/100], Step [20/143], Loss: 3.4410\n",
      "Epoch [27/100], Step [40/143], Loss: 3.3557\n",
      "Epoch [27/100], Step [60/143], Loss: 3.6301\n",
      "Epoch [27/100], Step [80/143], Loss: 3.7049\n",
      "Epoch [27/100], Step [100/143], Loss: 4.2053\n",
      "Epoch [27/100], Step [120/143], Loss: 3.6779\n",
      "Epoch [27/100], Step [140/143], Loss: 3.5307\n",
      "Epoch [28/100], Step [20/143], Loss: 3.4016\n",
      "Epoch [28/100], Step [40/143], Loss: 3.3070\n",
      "Epoch [28/100], Step [60/143], Loss: 3.5859\n",
      "Epoch [28/100], Step [80/143], Loss: 3.6612\n",
      "Epoch [28/100], Step [100/143], Loss: 4.1800\n",
      "Epoch [28/100], Step [120/143], Loss: 3.6364\n",
      "Epoch [28/100], Step [140/143], Loss: 3.4825\n",
      "Epoch [29/100], Step [20/143], Loss: 3.3637\n",
      "Epoch [29/100], Step [40/143], Loss: 3.2595\n",
      "Epoch [29/100], Step [60/143], Loss: 3.5425\n",
      "Epoch [29/100], Step [80/143], Loss: 3.6186\n",
      "Epoch [29/100], Step [100/143], Loss: 4.1539\n",
      "Epoch [29/100], Step [120/143], Loss: 3.5948\n",
      "Epoch [29/100], Step [140/143], Loss: 3.4353\n",
      "Epoch [30/100], Step [20/143], Loss: 3.3268\n",
      "Epoch [30/100], Step [40/143], Loss: 3.2130\n",
      "Epoch [30/100], Step [60/143], Loss: 3.5000\n",
      "Epoch [30/100], Step [80/143], Loss: 3.5769\n",
      "Epoch [30/100], Step [100/143], Loss: 4.1272\n",
      "Epoch [30/100], Step [120/143], Loss: 3.5532\n",
      "Epoch [30/100], Step [140/143], Loss: 3.3890\n",
      "Epoch [31/100], Step [20/143], Loss: 3.2905\n",
      "Epoch [31/100], Step [40/143], Loss: 3.1673\n",
      "Epoch [31/100], Step [60/143], Loss: 3.4584\n",
      "Epoch [31/100], Step [80/143], Loss: 3.5363\n",
      "Epoch [31/100], Step [100/143], Loss: 4.0997\n",
      "Epoch [31/100], Step [120/143], Loss: 3.5116\n",
      "Epoch [31/100], Step [140/143], Loss: 3.3436\n",
      "Epoch [32/100], Step [20/143], Loss: 3.2544\n",
      "Epoch [32/100], Step [40/143], Loss: 3.1221\n",
      "Epoch [32/100], Step [60/143], Loss: 3.4177\n",
      "Epoch [32/100], Step [80/143], Loss: 3.4966\n",
      "Epoch [32/100], Step [100/143], Loss: 4.0715\n",
      "Epoch [32/100], Step [120/143], Loss: 3.4699\n",
      "Epoch [32/100], Step [140/143], Loss: 3.2991\n",
      "Epoch [33/100], Step [20/143], Loss: 3.2183\n",
      "Epoch [33/100], Step [40/143], Loss: 3.0772\n",
      "Epoch [33/100], Step [60/143], Loss: 3.3779\n",
      "Epoch [33/100], Step [80/143], Loss: 3.4577\n",
      "Epoch [33/100], Step [100/143], Loss: 4.0426\n",
      "Epoch [33/100], Step [120/143], Loss: 3.4283\n",
      "Epoch [33/100], Step [140/143], Loss: 3.2552\n",
      "Epoch [34/100], Step [20/143], Loss: 3.1819\n",
      "Epoch [34/100], Step [40/143], Loss: 3.0325\n",
      "Epoch [34/100], Step [60/143], Loss: 3.3387\n",
      "Epoch [34/100], Step [80/143], Loss: 3.4196\n",
      "Epoch [34/100], Step [100/143], Loss: 4.0130\n",
      "Epoch [34/100], Step [120/143], Loss: 3.3869\n",
      "Epoch [34/100], Step [140/143], Loss: 3.2120\n",
      "Epoch [35/100], Step [20/143], Loss: 3.1451\n",
      "Epoch [35/100], Step [40/143], Loss: 2.9879\n",
      "Epoch [35/100], Step [60/143], Loss: 3.3002\n",
      "Epoch [35/100], Step [80/143], Loss: 3.3822\n",
      "Epoch [35/100], Step [100/143], Loss: 3.9827\n",
      "Epoch [35/100], Step [120/143], Loss: 3.3455\n",
      "Epoch [35/100], Step [140/143], Loss: 3.1692\n",
      "Epoch [36/100], Step [20/143], Loss: 3.1078\n",
      "Epoch [36/100], Step [40/143], Loss: 2.9431\n",
      "Epoch [36/100], Step [60/143], Loss: 3.2622\n",
      "Epoch [36/100], Step [80/143], Loss: 3.3453\n",
      "Epoch [36/100], Step [100/143], Loss: 3.9516\n",
      "Epoch [36/100], Step [120/143], Loss: 3.3043\n",
      "Epoch [36/100], Step [140/143], Loss: 3.1267\n",
      "Epoch [37/100], Step [20/143], Loss: 3.0699\n",
      "Epoch [37/100], Step [40/143], Loss: 2.8982\n",
      "Epoch [37/100], Step [60/143], Loss: 3.2246\n",
      "Epoch [37/100], Step [80/143], Loss: 3.3089\n",
      "Epoch [37/100], Step [100/143], Loss: 3.9197\n",
      "Epoch [37/100], Step [120/143], Loss: 3.2633\n",
      "Epoch [37/100], Step [140/143], Loss: 3.0846\n",
      "Epoch [38/100], Step [20/143], Loss: 3.0314\n",
      "Epoch [38/100], Step [40/143], Loss: 2.8532\n",
      "Epoch [38/100], Step [60/143], Loss: 3.1873\n",
      "Epoch [38/100], Step [80/143], Loss: 3.2728\n",
      "Epoch [38/100], Step [100/143], Loss: 3.8870\n",
      "Epoch [38/100], Step [120/143], Loss: 3.2225\n",
      "Epoch [38/100], Step [140/143], Loss: 3.0425\n",
      "Epoch [39/100], Step [20/143], Loss: 2.9924\n",
      "Epoch [39/100], Step [40/143], Loss: 2.8079\n",
      "Epoch [39/100], Step [60/143], Loss: 3.1503\n",
      "Epoch [39/100], Step [80/143], Loss: 3.2369\n",
      "Epoch [39/100], Step [100/143], Loss: 3.8536\n",
      "Epoch [39/100], Step [120/143], Loss: 3.1819\n",
      "Epoch [39/100], Step [140/143], Loss: 3.0005\n",
      "Epoch [40/100], Step [20/143], Loss: 2.9528\n",
      "Epoch [40/100], Step [40/143], Loss: 2.7623\n",
      "Epoch [40/100], Step [60/143], Loss: 3.1134\n",
      "Epoch [40/100], Step [80/143], Loss: 3.2012\n",
      "Epoch [40/100], Step [100/143], Loss: 3.8194\n",
      "Epoch [40/100], Step [120/143], Loss: 3.1416\n",
      "Epoch [40/100], Step [140/143], Loss: 2.9585\n",
      "Epoch [41/100], Step [20/143], Loss: 2.9127\n",
      "Epoch [41/100], Step [40/143], Loss: 2.7165\n",
      "Epoch [41/100], Step [60/143], Loss: 3.0766\n",
      "Epoch [41/100], Step [80/143], Loss: 3.1657\n",
      "Epoch [41/100], Step [100/143], Loss: 3.7843\n",
      "Epoch [41/100], Step [120/143], Loss: 3.1016\n",
      "Epoch [41/100], Step [140/143], Loss: 2.9165\n",
      "Epoch [42/100], Step [20/143], Loss: 2.8722\n",
      "Epoch [42/100], Step [40/143], Loss: 2.6704\n",
      "Epoch [42/100], Step [60/143], Loss: 3.0399\n",
      "Epoch [42/100], Step [80/143], Loss: 3.1302\n",
      "Epoch [42/100], Step [100/143], Loss: 3.7484\n",
      "Epoch [42/100], Step [120/143], Loss: 3.0618\n",
      "Epoch [42/100], Step [140/143], Loss: 2.8743\n",
      "Epoch [43/100], Step [20/143], Loss: 2.8313\n",
      "Epoch [43/100], Step [40/143], Loss: 2.6240\n",
      "Epoch [43/100], Step [60/143], Loss: 3.0032\n",
      "Epoch [43/100], Step [80/143], Loss: 3.0947\n",
      "Epoch [43/100], Step [100/143], Loss: 3.7116\n",
      "Epoch [43/100], Step [120/143], Loss: 3.0222\n",
      "Epoch [43/100], Step [140/143], Loss: 2.8321\n",
      "Epoch [44/100], Step [20/143], Loss: 2.7901\n",
      "Epoch [44/100], Step [40/143], Loss: 2.5774\n",
      "Epoch [44/100], Step [60/143], Loss: 2.9664\n",
      "Epoch [44/100], Step [80/143], Loss: 3.0591\n",
      "Epoch [44/100], Step [100/143], Loss: 3.6740\n",
      "Epoch [44/100], Step [120/143], Loss: 2.9828\n",
      "Epoch [44/100], Step [140/143], Loss: 2.7896\n",
      "Epoch [45/100], Step [20/143], Loss: 2.7485\n",
      "Epoch [45/100], Step [40/143], Loss: 2.5305\n",
      "Epoch [45/100], Step [60/143], Loss: 2.9295\n",
      "Epoch [45/100], Step [80/143], Loss: 3.0234\n",
      "Epoch [45/100], Step [100/143], Loss: 3.6354\n",
      "Epoch [45/100], Step [120/143], Loss: 2.9436\n",
      "Epoch [45/100], Step [140/143], Loss: 2.7469\n",
      "Epoch [46/100], Step [20/143], Loss: 2.7067\n",
      "Epoch [46/100], Step [40/143], Loss: 2.4834\n",
      "Epoch [46/100], Step [60/143], Loss: 2.8924\n",
      "Epoch [46/100], Step [80/143], Loss: 2.9876\n",
      "Epoch [46/100], Step [100/143], Loss: 3.5960\n",
      "Epoch [46/100], Step [120/143], Loss: 2.9045\n",
      "Epoch [46/100], Step [140/143], Loss: 2.7040\n",
      "Epoch [47/100], Step [20/143], Loss: 2.6646\n",
      "Epoch [47/100], Step [40/143], Loss: 2.4360\n",
      "Epoch [47/100], Step [60/143], Loss: 2.8552\n",
      "Epoch [47/100], Step [80/143], Loss: 2.9516\n",
      "Epoch [47/100], Step [100/143], Loss: 3.5557\n",
      "Epoch [47/100], Step [120/143], Loss: 2.8656\n",
      "Epoch [47/100], Step [140/143], Loss: 2.6609\n",
      "Epoch [48/100], Step [20/143], Loss: 2.6223\n",
      "Epoch [48/100], Step [40/143], Loss: 2.3885\n",
      "Epoch [48/100], Step [60/143], Loss: 2.8178\n",
      "Epoch [48/100], Step [80/143], Loss: 2.9154\n",
      "Epoch [48/100], Step [100/143], Loss: 3.5145\n",
      "Epoch [48/100], Step [120/143], Loss: 2.8267\n",
      "Epoch [48/100], Step [140/143], Loss: 2.6176\n",
      "Epoch [49/100], Step [20/143], Loss: 2.5798\n",
      "Epoch [49/100], Step [40/143], Loss: 2.3407\n",
      "Epoch [49/100], Step [60/143], Loss: 2.7801\n",
      "Epoch [49/100], Step [80/143], Loss: 2.8789\n",
      "Epoch [49/100], Step [100/143], Loss: 3.4724\n",
      "Epoch [49/100], Step [120/143], Loss: 2.7878\n",
      "Epoch [49/100], Step [140/143], Loss: 2.5740\n",
      "Epoch [50/100], Step [20/143], Loss: 2.5371\n",
      "Epoch [50/100], Step [40/143], Loss: 2.2928\n",
      "Epoch [50/100], Step [60/143], Loss: 2.7422\n",
      "Epoch [50/100], Step [80/143], Loss: 2.8421\n",
      "Epoch [50/100], Step [100/143], Loss: 3.4293\n",
      "Epoch [50/100], Step [120/143], Loss: 2.7489\n",
      "Epoch [50/100], Step [140/143], Loss: 2.5301\n",
      "Epoch [51/100], Step [20/143], Loss: 2.4943\n",
      "Epoch [51/100], Step [40/143], Loss: 2.2448\n",
      "Epoch [51/100], Step [60/143], Loss: 2.7040\n",
      "Epoch [51/100], Step [80/143], Loss: 2.8050\n",
      "Epoch [51/100], Step [100/143], Loss: 3.3854\n",
      "Epoch [51/100], Step [120/143], Loss: 2.7098\n",
      "Epoch [51/100], Step [140/143], Loss: 2.4860\n",
      "Epoch [52/100], Step [20/143], Loss: 2.4513\n",
      "Epoch [52/100], Step [40/143], Loss: 2.1967\n",
      "Epoch [52/100], Step [60/143], Loss: 2.6655\n",
      "Epoch [52/100], Step [80/143], Loss: 2.7676\n",
      "Epoch [52/100], Step [100/143], Loss: 3.3405\n",
      "Epoch [52/100], Step [120/143], Loss: 2.6707\n",
      "Epoch [52/100], Step [140/143], Loss: 2.4416\n",
      "Epoch [53/100], Step [20/143], Loss: 2.4082\n",
      "Epoch [53/100], Step [40/143], Loss: 2.1486\n",
      "Epoch [53/100], Step [60/143], Loss: 2.6266\n",
      "Epoch [53/100], Step [80/143], Loss: 2.7298\n",
      "Epoch [53/100], Step [100/143], Loss: 3.2948\n",
      "Epoch [53/100], Step [120/143], Loss: 2.6314\n",
      "Epoch [53/100], Step [140/143], Loss: 2.3970\n",
      "Epoch [54/100], Step [20/143], Loss: 2.3650\n",
      "Epoch [54/100], Step [40/143], Loss: 2.1005\n",
      "Epoch [54/100], Step [60/143], Loss: 2.5874\n",
      "Epoch [54/100], Step [80/143], Loss: 2.6916\n",
      "Epoch [54/100], Step [100/143], Loss: 3.2482\n",
      "Epoch [54/100], Step [120/143], Loss: 2.5919\n",
      "Epoch [54/100], Step [140/143], Loss: 2.3522\n",
      "Epoch [55/100], Step [20/143], Loss: 2.3217\n",
      "Epoch [55/100], Step [40/143], Loss: 2.0524\n",
      "Epoch [55/100], Step [60/143], Loss: 2.5479\n",
      "Epoch [55/100], Step [80/143], Loss: 2.6531\n",
      "Epoch [55/100], Step [100/143], Loss: 3.2007\n",
      "Epoch [55/100], Step [120/143], Loss: 2.5520\n",
      "Epoch [55/100], Step [140/143], Loss: 2.3072\n",
      "Epoch [56/100], Step [20/143], Loss: 2.2782\n",
      "Epoch [56/100], Step [40/143], Loss: 2.0045\n",
      "Epoch [56/100], Step [60/143], Loss: 2.5079\n",
      "Epoch [56/100], Step [80/143], Loss: 2.6141\n",
      "Epoch [56/100], Step [100/143], Loss: 3.1524\n",
      "Epoch [56/100], Step [120/143], Loss: 2.5119\n",
      "Epoch [56/100], Step [140/143], Loss: 2.2620\n",
      "Epoch [57/100], Step [20/143], Loss: 2.2347\n",
      "Epoch [57/100], Step [40/143], Loss: 1.9567\n",
      "Epoch [57/100], Step [60/143], Loss: 2.4676\n",
      "Epoch [57/100], Step [80/143], Loss: 2.5748\n",
      "Epoch [57/100], Step [100/143], Loss: 3.1032\n",
      "Epoch [57/100], Step [120/143], Loss: 2.4715\n",
      "Epoch [57/100], Step [140/143], Loss: 2.2166\n",
      "Epoch [58/100], Step [20/143], Loss: 2.1911\n",
      "Epoch [58/100], Step [40/143], Loss: 1.9092\n",
      "Epoch [58/100], Step [60/143], Loss: 2.4269\n",
      "Epoch [58/100], Step [80/143], Loss: 2.5350\n",
      "Epoch [58/100], Step [100/143], Loss: 3.0533\n",
      "Epoch [58/100], Step [120/143], Loss: 2.4307\n",
      "Epoch [58/100], Step [140/143], Loss: 2.1711\n",
      "Epoch [59/100], Step [20/143], Loss: 2.1475\n",
      "Epoch [59/100], Step [40/143], Loss: 1.8619\n",
      "Epoch [59/100], Step [60/143], Loss: 2.3857\n",
      "Epoch [59/100], Step [80/143], Loss: 2.4948\n",
      "Epoch [59/100], Step [100/143], Loss: 3.0026\n",
      "Epoch [59/100], Step [120/143], Loss: 2.3895\n",
      "Epoch [59/100], Step [140/143], Loss: 2.1255\n",
      "Epoch [60/100], Step [20/143], Loss: 2.1038\n",
      "Epoch [60/100], Step [40/143], Loss: 1.8151\n",
      "Epoch [60/100], Step [60/143], Loss: 2.3442\n",
      "Epoch [60/100], Step [80/143], Loss: 2.4542\n",
      "Epoch [60/100], Step [100/143], Loss: 2.9512\n",
      "Epoch [60/100], Step [120/143], Loss: 2.3479\n",
      "Epoch [60/100], Step [140/143], Loss: 2.0799\n",
      "Epoch [61/100], Step [20/143], Loss: 2.0602\n",
      "Epoch [61/100], Step [40/143], Loss: 1.7687\n",
      "Epoch [61/100], Step [60/143], Loss: 2.3023\n",
      "Epoch [61/100], Step [80/143], Loss: 2.4132\n",
      "Epoch [61/100], Step [100/143], Loss: 2.8991\n",
      "Epoch [61/100], Step [120/143], Loss: 2.3058\n",
      "Epoch [61/100], Step [140/143], Loss: 2.0343\n",
      "Epoch [62/100], Step [20/143], Loss: 2.0166\n",
      "Epoch [62/100], Step [40/143], Loss: 1.7228\n",
      "Epoch [62/100], Step [60/143], Loss: 2.2599\n",
      "Epoch [62/100], Step [80/143], Loss: 2.3718\n",
      "Epoch [62/100], Step [100/143], Loss: 2.8463\n",
      "Epoch [62/100], Step [120/143], Loss: 2.2634\n",
      "Epoch [62/100], Step [140/143], Loss: 1.9887\n",
      "Epoch [63/100], Step [20/143], Loss: 1.9731\n",
      "Epoch [63/100], Step [40/143], Loss: 1.6775\n",
      "Epoch [63/100], Step [60/143], Loss: 2.2172\n",
      "Epoch [63/100], Step [80/143], Loss: 2.3299\n",
      "Epoch [63/100], Step [100/143], Loss: 2.7930\n",
      "Epoch [63/100], Step [120/143], Loss: 2.2205\n",
      "Epoch [63/100], Step [140/143], Loss: 1.9432\n",
      "Epoch [64/100], Step [20/143], Loss: 1.9298\n",
      "Epoch [64/100], Step [40/143], Loss: 1.6329\n",
      "Epoch [64/100], Step [60/143], Loss: 2.1741\n",
      "Epoch [64/100], Step [80/143], Loss: 2.2877\n",
      "Epoch [64/100], Step [100/143], Loss: 2.7391\n",
      "Epoch [64/100], Step [120/143], Loss: 2.1771\n",
      "Epoch [64/100], Step [140/143], Loss: 1.8979\n",
      "Epoch [65/100], Step [20/143], Loss: 1.8866\n",
      "Epoch [65/100], Step [40/143], Loss: 1.5889\n",
      "Epoch [65/100], Step [60/143], Loss: 2.1307\n",
      "Epoch [65/100], Step [80/143], Loss: 2.2451\n",
      "Epoch [65/100], Step [100/143], Loss: 2.6848\n",
      "Epoch [65/100], Step [120/143], Loss: 2.1334\n",
      "Epoch [65/100], Step [140/143], Loss: 1.8529\n",
      "Epoch [66/100], Step [20/143], Loss: 1.8437\n",
      "Epoch [66/100], Step [40/143], Loss: 1.5458\n",
      "Epoch [66/100], Step [60/143], Loss: 2.0869\n",
      "Epoch [66/100], Step [80/143], Loss: 2.2021\n",
      "Epoch [66/100], Step [100/143], Loss: 2.6300\n",
      "Epoch [66/100], Step [120/143], Loss: 2.0892\n",
      "Epoch [66/100], Step [140/143], Loss: 1.8081\n",
      "Epoch [67/100], Step [20/143], Loss: 1.8011\n",
      "Epoch [67/100], Step [40/143], Loss: 1.5035\n",
      "Epoch [67/100], Step [60/143], Loss: 2.0429\n",
      "Epoch [67/100], Step [80/143], Loss: 2.1588\n",
      "Epoch [67/100], Step [100/143], Loss: 2.5749\n",
      "Epoch [67/100], Step [120/143], Loss: 2.0447\n",
      "Epoch [67/100], Step [140/143], Loss: 1.7638\n",
      "Epoch [68/100], Step [20/143], Loss: 1.7588\n",
      "Epoch [68/100], Step [40/143], Loss: 1.4621\n",
      "Epoch [68/100], Step [60/143], Loss: 1.9986\n",
      "Epoch [68/100], Step [80/143], Loss: 2.1152\n",
      "Epoch [68/100], Step [100/143], Loss: 2.5196\n",
      "Epoch [68/100], Step [120/143], Loss: 1.9999\n",
      "Epoch [68/100], Step [140/143], Loss: 1.7200\n",
      "Epoch [69/100], Step [20/143], Loss: 1.7169\n",
      "Epoch [69/100], Step [40/143], Loss: 1.4216\n",
      "Epoch [69/100], Step [60/143], Loss: 1.9540\n",
      "Epoch [69/100], Step [80/143], Loss: 2.0714\n",
      "Epoch [69/100], Step [100/143], Loss: 2.4640\n",
      "Epoch [69/100], Step [120/143], Loss: 1.9547\n",
      "Epoch [69/100], Step [140/143], Loss: 1.6766\n",
      "Epoch [70/100], Step [20/143], Loss: 1.6755\n",
      "Epoch [70/100], Step [40/143], Loss: 1.3820\n",
      "Epoch [70/100], Step [60/143], Loss: 1.9093\n",
      "Epoch [70/100], Step [80/143], Loss: 2.0273\n",
      "Epoch [70/100], Step [100/143], Loss: 2.4082\n",
      "Epoch [70/100], Step [120/143], Loss: 1.9093\n",
      "Epoch [70/100], Step [140/143], Loss: 1.6340\n",
      "Epoch [71/100], Step [20/143], Loss: 1.6346\n",
      "Epoch [71/100], Step [40/143], Loss: 1.3435\n",
      "Epoch [71/100], Step [60/143], Loss: 1.8645\n",
      "Epoch [71/100], Step [80/143], Loss: 1.9830\n",
      "Epoch [71/100], Step [100/143], Loss: 2.3524\n",
      "Epoch [71/100], Step [120/143], Loss: 1.8637\n",
      "Epoch [71/100], Step [140/143], Loss: 1.5920\n",
      "Epoch [72/100], Step [20/143], Loss: 1.5943\n",
      "Epoch [72/100], Step [40/143], Loss: 1.3060\n",
      "Epoch [72/100], Step [60/143], Loss: 1.8197\n",
      "Epoch [72/100], Step [80/143], Loss: 1.9385\n",
      "Epoch [72/100], Step [100/143], Loss: 2.2967\n",
      "Epoch [72/100], Step [120/143], Loss: 1.8180\n",
      "Epoch [72/100], Step [140/143], Loss: 1.5508\n",
      "Epoch [73/100], Step [20/143], Loss: 1.5547\n",
      "Epoch [73/100], Step [40/143], Loss: 1.2695\n",
      "Epoch [73/100], Step [60/143], Loss: 1.7748\n",
      "Epoch [73/100], Step [80/143], Loss: 1.8939\n",
      "Epoch [73/100], Step [100/143], Loss: 2.2410\n",
      "Epoch [73/100], Step [120/143], Loss: 1.7722\n",
      "Epoch [73/100], Step [140/143], Loss: 1.5105\n",
      "Epoch [74/100], Step [20/143], Loss: 1.5158\n",
      "Epoch [74/100], Step [40/143], Loss: 1.2340\n",
      "Epoch [74/100], Step [60/143], Loss: 1.7300\n",
      "Epoch [74/100], Step [80/143], Loss: 1.8493\n",
      "Epoch [74/100], Step [100/143], Loss: 2.1854\n",
      "Epoch [74/100], Step [120/143], Loss: 1.7265\n",
      "Epoch [74/100], Step [140/143], Loss: 1.4710\n",
      "Epoch [75/100], Step [20/143], Loss: 1.4776\n",
      "Epoch [75/100], Step [40/143], Loss: 1.1996\n",
      "Epoch [75/100], Step [60/143], Loss: 1.6854\n",
      "Epoch [75/100], Step [80/143], Loss: 1.8046\n",
      "Epoch [75/100], Step [100/143], Loss: 2.1301\n",
      "Epoch [75/100], Step [120/143], Loss: 1.6808\n",
      "Epoch [75/100], Step [140/143], Loss: 1.4326\n",
      "Epoch [76/100], Step [20/143], Loss: 1.4402\n",
      "Epoch [76/100], Step [40/143], Loss: 1.1662\n",
      "Epoch [76/100], Step [60/143], Loss: 1.6411\n",
      "Epoch [76/100], Step [80/143], Loss: 1.7601\n",
      "Epoch [76/100], Step [100/143], Loss: 2.0752\n",
      "Epoch [76/100], Step [120/143], Loss: 1.6353\n",
      "Epoch [76/100], Step [140/143], Loss: 1.3952\n",
      "Epoch [77/100], Step [20/143], Loss: 1.4036\n",
      "Epoch [77/100], Step [40/143], Loss: 1.1338\n",
      "Epoch [77/100], Step [60/143], Loss: 1.5970\n",
      "Epoch [77/100], Step [80/143], Loss: 1.7156\n",
      "Epoch [77/100], Step [100/143], Loss: 2.0205\n",
      "Epoch [77/100], Step [120/143], Loss: 1.5901\n",
      "Epoch [77/100], Step [140/143], Loss: 1.3588\n",
      "Epoch [78/100], Step [20/143], Loss: 1.3678\n",
      "Epoch [78/100], Step [40/143], Loss: 1.1023\n",
      "Epoch [78/100], Step [60/143], Loss: 1.5533\n",
      "Epoch [78/100], Step [80/143], Loss: 1.6713\n",
      "Epoch [78/100], Step [100/143], Loss: 1.9664\n",
      "Epoch [78/100], Step [120/143], Loss: 1.5452\n",
      "Epoch [78/100], Step [140/143], Loss: 1.3236\n",
      "Epoch [79/100], Step [20/143], Loss: 1.3330\n",
      "Epoch [79/100], Step [40/143], Loss: 1.0719\n",
      "Epoch [79/100], Step [60/143], Loss: 1.5101\n",
      "Epoch [79/100], Step [80/143], Loss: 1.6273\n",
      "Epoch [79/100], Step [100/143], Loss: 1.9127\n",
      "Epoch [79/100], Step [120/143], Loss: 1.5007\n",
      "Epoch [79/100], Step [140/143], Loss: 1.2895\n",
      "Epoch [80/100], Step [20/143], Loss: 1.2990\n",
      "Epoch [80/100], Step [40/143], Loss: 1.0424\n",
      "Epoch [80/100], Step [60/143], Loss: 1.4675\n",
      "Epoch [80/100], Step [80/143], Loss: 1.5835\n",
      "Epoch [80/100], Step [100/143], Loss: 1.8596\n",
      "Epoch [80/100], Step [120/143], Loss: 1.4567\n",
      "Epoch [80/100], Step [140/143], Loss: 1.2565\n",
      "Epoch [81/100], Step [20/143], Loss: 1.2660\n",
      "Epoch [81/100], Step [40/143], Loss: 1.0138\n",
      "Epoch [81/100], Step [60/143], Loss: 1.4256\n",
      "Epoch [81/100], Step [80/143], Loss: 1.5402\n",
      "Epoch [81/100], Step [100/143], Loss: 1.8072\n",
      "Epoch [81/100], Step [120/143], Loss: 1.4134\n",
      "Epoch [81/100], Step [140/143], Loss: 1.2247\n",
      "Epoch [82/100], Step [20/143], Loss: 1.2339\n",
      "Epoch [82/100], Step [40/143], Loss: 0.9861\n",
      "Epoch [82/100], Step [60/143], Loss: 1.3843\n",
      "Epoch [82/100], Step [80/143], Loss: 1.4974\n",
      "Epoch [82/100], Step [100/143], Loss: 1.7554\n",
      "Epoch [82/100], Step [120/143], Loss: 1.3707\n",
      "Epoch [82/100], Step [140/143], Loss: 1.1941\n",
      "Epoch [83/100], Step [20/143], Loss: 1.2027\n",
      "Epoch [83/100], Step [40/143], Loss: 0.9592\n",
      "Epoch [83/100], Step [60/143], Loss: 1.3438\n",
      "Epoch [83/100], Step [80/143], Loss: 1.4550\n",
      "Epoch [83/100], Step [100/143], Loss: 1.7044\n",
      "Epoch [83/100], Step [120/143], Loss: 1.3288\n",
      "Epoch [83/100], Step [140/143], Loss: 1.1646\n",
      "Epoch [84/100], Step [20/143], Loss: 1.1724\n",
      "Epoch [84/100], Step [40/143], Loss: 0.9332\n",
      "Epoch [84/100], Step [60/143], Loss: 1.3043\n",
      "Epoch [84/100], Step [80/143], Loss: 1.4133\n",
      "Epoch [84/100], Step [100/143], Loss: 1.6542\n",
      "Epoch [84/100], Step [120/143], Loss: 1.2877\n",
      "Epoch [84/100], Step [140/143], Loss: 1.1362\n",
      "Epoch [85/100], Step [20/143], Loss: 1.1431\n",
      "Epoch [85/100], Step [40/143], Loss: 0.9080\n",
      "Epoch [85/100], Step [60/143], Loss: 1.2656\n",
      "Epoch [85/100], Step [80/143], Loss: 1.3722\n",
      "Epoch [85/100], Step [100/143], Loss: 1.6048\n",
      "Epoch [85/100], Step [120/143], Loss: 1.2475\n",
      "Epoch [85/100], Step [140/143], Loss: 1.1090\n",
      "Epoch [86/100], Step [20/143], Loss: 1.1146\n",
      "Epoch [86/100], Step [40/143], Loss: 0.8835\n",
      "Epoch [86/100], Step [60/143], Loss: 1.2279\n",
      "Epoch [86/100], Step [80/143], Loss: 1.3319\n",
      "Epoch [86/100], Step [100/143], Loss: 1.5563\n",
      "Epoch [86/100], Step [120/143], Loss: 1.2083\n",
      "Epoch [86/100], Step [140/143], Loss: 1.0828\n",
      "Epoch [87/100], Step [20/143], Loss: 1.0871\n",
      "Epoch [87/100], Step [40/143], Loss: 0.8598\n",
      "Epoch [87/100], Step [60/143], Loss: 1.1913\n",
      "Epoch [87/100], Step [80/143], Loss: 1.2924\n",
      "Epoch [87/100], Step [100/143], Loss: 1.5088\n",
      "Epoch [87/100], Step [120/143], Loss: 1.1701\n",
      "Epoch [87/100], Step [140/143], Loss: 1.0576\n",
      "Epoch [88/100], Step [20/143], Loss: 1.0604\n",
      "Epoch [88/100], Step [40/143], Loss: 0.8368\n",
      "Epoch [88/100], Step [60/143], Loss: 1.1557\n",
      "Epoch [88/100], Step [80/143], Loss: 1.2537\n",
      "Epoch [88/100], Step [100/143], Loss: 1.4622\n",
      "Epoch [88/100], Step [120/143], Loss: 1.1329\n",
      "Epoch [88/100], Step [140/143], Loss: 1.0335\n",
      "Epoch [89/100], Step [20/143], Loss: 1.0346\n",
      "Epoch [89/100], Step [40/143], Loss: 0.8144\n",
      "Epoch [89/100], Step [60/143], Loss: 1.1211\n",
      "Epoch [89/100], Step [80/143], Loss: 1.2159\n",
      "Epoch [89/100], Step [100/143], Loss: 1.4167\n",
      "Epoch [89/100], Step [120/143], Loss: 1.0967\n",
      "Epoch [89/100], Step [140/143], Loss: 1.0103\n",
      "Epoch [90/100], Step [20/143], Loss: 1.0096\n",
      "Epoch [90/100], Step [40/143], Loss: 0.7928\n",
      "Epoch [90/100], Step [60/143], Loss: 1.0877\n",
      "Epoch [90/100], Step [80/143], Loss: 1.1790\n",
      "Epoch [90/100], Step [100/143], Loss: 1.3722\n",
      "Epoch [90/100], Step [120/143], Loss: 1.0617\n",
      "Epoch [90/100], Step [140/143], Loss: 0.9880\n",
      "Epoch [91/100], Step [20/143], Loss: 0.9854\n",
      "Epoch [91/100], Step [40/143], Loss: 0.7718\n",
      "Epoch [91/100], Step [60/143], Loss: 1.0554\n",
      "Epoch [91/100], Step [80/143], Loss: 1.1431\n",
      "Epoch [91/100], Step [100/143], Loss: 1.3288\n",
      "Epoch [91/100], Step [120/143], Loss: 1.0278\n",
      "Epoch [91/100], Step [140/143], Loss: 0.9666\n",
      "Epoch [92/100], Step [20/143], Loss: 0.9621\n",
      "Epoch [92/100], Step [40/143], Loss: 0.7514\n",
      "Epoch [92/100], Step [60/143], Loss: 1.0242\n",
      "Epoch [92/100], Step [80/143], Loss: 1.1083\n",
      "Epoch [92/100], Step [100/143], Loss: 1.2865\n",
      "Epoch [92/100], Step [120/143], Loss: 0.9950\n",
      "Epoch [92/100], Step [140/143], Loss: 0.9460\n",
      "Epoch [93/100], Step [20/143], Loss: 0.9394\n",
      "Epoch [93/100], Step [40/143], Loss: 0.7316\n",
      "Epoch [93/100], Step [60/143], Loss: 0.9941\n",
      "Epoch [93/100], Step [80/143], Loss: 1.0744\n",
      "Epoch [93/100], Step [100/143], Loss: 1.2453\n",
      "Epoch [93/100], Step [120/143], Loss: 0.9633\n",
      "Epoch [93/100], Step [140/143], Loss: 0.9263\n",
      "Epoch [94/100], Step [20/143], Loss: 0.9176\n",
      "Epoch [94/100], Step [40/143], Loss: 0.7124\n",
      "Epoch [94/100], Step [60/143], Loss: 0.9650\n",
      "Epoch [94/100], Step [80/143], Loss: 1.0416\n",
      "Epoch [94/100], Step [100/143], Loss: 1.2053\n",
      "Epoch [94/100], Step [120/143], Loss: 0.9327\n",
      "Epoch [94/100], Step [140/143], Loss: 0.9072\n",
      "Epoch [95/100], Step [20/143], Loss: 0.8964\n",
      "Epoch [95/100], Step [40/143], Loss: 0.6938\n",
      "Epoch [95/100], Step [60/143], Loss: 0.9371\n",
      "Epoch [95/100], Step [80/143], Loss: 1.0099\n",
      "Epoch [95/100], Step [100/143], Loss: 1.1664\n",
      "Epoch [95/100], Step [120/143], Loss: 0.9032\n",
      "Epoch [95/100], Step [140/143], Loss: 0.8889\n",
      "Epoch [96/100], Step [20/143], Loss: 0.8760\n",
      "Epoch [96/100], Step [40/143], Loss: 0.6757\n",
      "Epoch [96/100], Step [60/143], Loss: 0.9101\n",
      "Epoch [96/100], Step [80/143], Loss: 0.9792\n",
      "Epoch [96/100], Step [100/143], Loss: 1.1287\n",
      "Epoch [96/100], Step [120/143], Loss: 0.8747\n",
      "Epoch [96/100], Step [140/143], Loss: 0.8712\n",
      "Epoch [97/100], Step [20/143], Loss: 0.8562\n",
      "Epoch [97/100], Step [40/143], Loss: 0.6581\n",
      "Epoch [97/100], Step [60/143], Loss: 0.8842\n",
      "Epoch [97/100], Step [80/143], Loss: 0.9495\n",
      "Epoch [97/100], Step [100/143], Loss: 1.0922\n",
      "Epoch [97/100], Step [120/143], Loss: 0.8473\n",
      "Epoch [97/100], Step [140/143], Loss: 0.8541\n",
      "Epoch [98/100], Step [20/143], Loss: 0.8370\n",
      "Epoch [98/100], Step [40/143], Loss: 0.6411\n",
      "Epoch [98/100], Step [60/143], Loss: 0.8592\n",
      "Epoch [98/100], Step [80/143], Loss: 0.9209\n",
      "Epoch [98/100], Step [100/143], Loss: 1.0568\n",
      "Epoch [98/100], Step [120/143], Loss: 0.8209\n",
      "Epoch [98/100], Step [140/143], Loss: 0.8377\n",
      "Epoch [99/100], Step [20/143], Loss: 0.8185\n",
      "Epoch [99/100], Step [40/143], Loss: 0.6247\n",
      "Epoch [99/100], Step [60/143], Loss: 0.8351\n",
      "Epoch [99/100], Step [80/143], Loss: 0.8933\n",
      "Epoch [99/100], Step [100/143], Loss: 1.0226\n",
      "Epoch [99/100], Step [120/143], Loss: 0.7955\n",
      "Epoch [99/100], Step [140/143], Loss: 0.8217\n",
      "Epoch [100/100], Step [20/143], Loss: 0.8006\n",
      "Epoch [100/100], Step [40/143], Loss: 0.6087\n",
      "Epoch [100/100], Step [60/143], Loss: 0.8120\n",
      "Epoch [100/100], Step [80/143], Loss: 0.8667\n",
      "Epoch [100/100], Step [100/143], Loss: 0.9896\n",
      "Epoch [100/100], Step [120/143], Loss: 0.7710\n",
      "Epoch [100/100], Step [140/143], Loss: 0.8063\n"
     ]
    }
   ],
   "execution_count": 9
  },
  {
   "cell_type": "code",
   "id": "8a4e36b033918def",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T01:53:31.389018Z",
     "start_time": "2025-07-06T01:53:31.377856Z"
    }
   },
   "source": [
    "model.eval()\n",
    "\n",
    "def generate_text(content, steps, temperature=0.8):\n",
    "\n",
    "    words = [word for word in content]\n",
    "\n",
    "    state = None\n",
    "    for _ in range(steps):\n",
    "        # inputs = [word_to_idx[word] for word in words[-SEQ_LENGTH:]] # 取输入的最后SEQ_LENGTH个词的索引\n",
    "        inputs = [word_to_idx[word] for word in words[-1:]] # 取输入的最后SEQ_LENGTH个词的索引\n",
    "        inputs = torch.LongTensor(inputs)\n",
    "\n",
    "        # 输入形状调整\n",
    "        inputs = inputs.view(1, -1)  # (1, seq_len)\n",
    "\n",
    "        # 前向传播\n",
    "        with torch.no_grad():\n",
    "            # output中包含了每个时间步的输出，推理预测时，只需要取最后一个时间步的输出即可，比如输入“鹰击”，相当于有两个时间步，但是我们只需要第2个时间步的输出，而输出是词汇表中各个词的概率\n",
    "            # 而hidden表示隐藏层，在推理预测时，因为我们会连续预测，外层有一个for循环，所以hidden需要保存，以便下一次循环使用\n",
    "            outputs, state = model(inputs, state)\n",
    "            last_output = outputs[0, -1, :]  # 取最后一个时间步的输出\n",
    "\n",
    "        # 应用温度采样\n",
    "        # last_output / temperature，相当于将last_output缩小，比如[8,2,2] / 2 = [4,1,1]，使得三个选项对应的数字之间的差别变小了\n",
    "        # 当然如果temperature<1，那么就是放大差别，比如[8,2,2] / 0.5 = [16,4,4]\n",
    "        # probs为做了softmax之后的概率\n",
    "        probs = torch.softmax(last_output / temperature, dim=-1)\n",
    "\n",
    "        # 多项式采样，probs是一个概率，比如是[0.3,0.2,0.5]，那么就是从0,1,2中随机选一个，那么2被选中的概率就是50%\n",
    "        # 谁的概率大，随被采样的概率就大\n",
    "        result_idx = torch.multinomial(probs, 1).item()\n",
    "\n",
    "        #  取概率最大的索引\n",
    "        # result_idx = torch.argmax(probs).item()\n",
    "\n",
    "\n",
    "        # 更新输入序列\n",
    "        words.append(idx_to_word[result_idx])\n",
    "\n",
    "    return ''.join(words)\n",
    "\n",
    "\n",
    "# 20表示预测20次, temperature越大，越随机\n",
    "print(generate_text(\"鹰\", 20, temperature=0.1))"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "鹰击长空，鱼翔浅底，万类霜天竞自由。\n",
      "曾记\n"
     ]
    }
   ],
   "execution_count": 13
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.18"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
