{
 "cells": [
  {
   "cell_type": "code",
   "id": "92c9da5866bdcf7",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T00:36:00.980429Z",
     "start_time": "2025-07-06T00:36:00.975610Z"
    }
   },
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "from absl.testing.parameterized import parameters\n",
    "from torch.utils.data import Dataset, DataLoader\n",
    "import numpy as np\n",
    "\n",
    "# 示例文本数据，一首诗\n",
    "text = \"\"\"\n",
    "独立寒秋，湘江北去，橘子洲头。\n",
    "看万山红遍，层林尽染；漫江碧透，百舸争流。\n",
    "鹰击长空，鱼翔浅底，万类霜天竞自由。\n",
    "怅寥廓，问苍茫大地，谁主沉浮？\n",
    "携来百侣曾游，忆往昔峥嵘岁月稠。\n",
    "恰同学少年，风华正茂；书生意气，挥斥方遒。\n",
    "指点江山，激扬文字，粪土当年万户侯。\n",
    "曾记否，到中流击水，浪遏飞舟？\n",
    "\"\"\"\n",
    "\n",
    "# 创建词汇表\n",
    "words = set(text)\n",
    "vocab_size = len(words)\n",
    "word_to_idx = {word: i for i, word in enumerate(words)}\n",
    "idx_to_word = {i: word for i, word in enumerate(words)}\n",
    "\n",
    "print(idx_to_word)"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "{0: '尽', 1: '风', 2: '主', 3: '立', 4: '携', 5: '字', 6: '万', 7: '遏', 8: '地', 9: '橘', 10: '水', 11: '林', 12: '点', 13: '峥', 14: '生', 15: '百', 16: '秋', 17: '茫', 18: '层', 19: '独', 20: '挥', 21: '苍', 22: '寒', 23: '漫', 24: '大', 25: '嵘', 26: '翔', 27: '子', 28: '由', 29: '茂', 30: '透', 31: '学', 32: '染', 33: '？', 34: '昔', 35: '侯', 36: '谁', 37: '，', 38: '类', 39: '方', 40: '江', 41: '舟', 42: '争', 43: '浮', 44: '。', 45: '击', 46: '恰', 47: '激', 48: '土', 49: '北', 50: '浅', 51: '流', 52: '沉', 53: '当', 54: '意', 55: '书', 56: '飞', 57: '到', 58: '记', 59: '寥', 60: '月', 61: '扬', 62: '文', 63: '头', 64: '正', 65: '少', 66: '问', 67: '舸', 68: '怅', 69: '粪', 70: '看', 71: '侣', 72: '浪', 73: '竞', 74: '否', 75: '同', 76: '去', 77: '往', 78: '天', 79: '\\n', 80: '忆', 81: '碧', 82: '稠', 83: '霜', 84: '；', 85: '长', 86: '洲', 87: '指', 88: '游', 89: '斥', 90: '自', 91: '鹰', 92: '遒', 93: '廓', 94: '中', 95: '气', 96: '鱼', 97: '华', 98: '空', 99: '岁', 100: '底', 101: '遍', 102: '山', 103: '湘', 104: '户', 105: '红', 106: '曾', 107: '年', 108: '来'}\n"
     ]
    }
   ],
   "execution_count": 18
  },
  {
   "cell_type": "code",
   "id": "91280acf83012c57",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T00:36:03.326569Z",
     "start_time": "2025-07-06T00:36:03.321993Z"
    }
   },
   "source": [
    "\n",
    "# 超参数设置\n",
    "SEQ_LENGTH = 5  # 输入序列长度\n",
    "BATCH_SIZE = 1\n",
    "HIDDEN_SIZE = 128\n",
    "INPUT_SIZE = 128\n",
    "\n",
    "\n",
    "# 创建训练数据\n",
    "class TextDataset(Dataset):\n",
    "    def __init__(self, text, seq_length):\n",
    "        self.text = text\n",
    "        self.seq_length = seq_length\n",
    "\n",
    "        # 转换为索引序列\n",
    "        self.data = [word_to_idx[ch] for ch in text]\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.data) - self.seq_length\n",
    "\n",
    "    def __getitem__(self, idx):\n",
    "        # 文本里的某个序列 X\n",
    "        input_seq = self.data[idx:idx + self.seq_length]\n",
    "\n",
    "        # 目标序列 Y\n",
    "        target_seq = self.data[idx + 1:idx + self.seq_length + 1]\n",
    "\n",
    "        # 相当于，假如语料为abcdefg, input_seq=abc, target_seq=bcd\n",
    "\n",
    "        return torch.LongTensor(input_seq), torch.LongTensor(target_seq)\n",
    "\n",
    "\n",
    "dataset = TextDataset(text, SEQ_LENGTH)\n",
    "dataloader = DataLoader(dataset, batch_size=BATCH_SIZE, shuffle=False)\n",
    "\n",
    "for input_seq, target_seq in dataloader:\n",
    "    print(input_seq)\n",
    "    print(target_seq)\n",
    "    break"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[79, 19,  3, 22, 16]])\n",
      "tensor([[19,  3, 22, 16, 37]])\n"
     ]
    }
   ],
   "execution_count": 19
  },
  {
   "cell_type": "code",
   "id": "d9e9fd013d3d01ba",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T00:36:03.348131Z",
     "start_time": "2025-07-06T00:36:03.343806Z"
    }
   },
   "source": [
    "# 大都督周瑜（我的微信: dadudu6789）\n",
    "class ZhouyuModel(nn.Module):\n",
    "    def __init__(self, vocab_size, input_size, hidden_size):\n",
    "        super().__init__()\n",
    "\n",
    "        self.hidden_size = hidden_size\n",
    "\n",
    "        # 嵌入层，输入词索引，输出词向量\n",
    "        self.embedding = nn.Embedding(vocab_size, input_size)\n",
    "\n",
    "        # RNN层\n",
    "        # Deep RNN\n",
    "        self.rnn = nn.RNN(input_size, hidden_size, batch_first=True, num_layers=2)\n",
    "\n",
    "        # 输出层\n",
    "        self.out_linear = nn.Linear(hidden_size, vocab_size)\n",
    "\n",
    "    def forward(self, x, hidden=None):\n",
    "        embedded = self.embedding(x)\n",
    "        outputs, hidden = self.rnn(embedded, hidden)\n",
    "        outputs = self.out_linear(outputs)\n",
    "        return outputs, hidden\n",
    "\n",
    "\n",
    "# 初始化模型\n",
    "model = ZhouyuModel(vocab_size, INPUT_SIZE, HIDDEN_SIZE)\n",
    "criterion = nn.CrossEntropyLoss()\n",
    "optimizer = torch.optim.SGD(model.parameters(), lr=0.005)"
   ],
   "outputs": [],
   "execution_count": 20
  },
  {
   "cell_type": "code",
   "id": "2348428ce74982e4",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T00:36:13.179422Z",
     "start_time": "2025-07-06T00:36:03.358158Z"
    }
   },
   "source": [
    "for epoch in range(100):\n",
    "    for i, (inputs, targets) in enumerate(dataloader):\n",
    "        # 前向传播\n",
    "        outputs, _ = model(inputs)\n",
    "\n",
    "        # 计算损失\n",
    "        # 用每个时间步的输出和每个时间步的标签进行比较，并平均损失\n",
    "        loss = criterion(\n",
    "            outputs.view(-1, vocab_size),  # (batch_size*seq_length, vocab_size)\n",
    "            targets.view(-1)  # (batch_size*seq_length)\n",
    "        )\n",
    "\n",
    "        optimizer.zero_grad()\n",
    "        loss.backward()\n",
    "\n",
    "        # 梯度裁剪防止爆炸\n",
    "        # nn.utils.clip_grad_norm_(model.parameters(), max_norm=1.0)\n",
    "\n",
    "        optimizer.step()\n",
    "\n",
    "        if (i + 1) % 20 == 0:\n",
    "            print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'\n",
    "                  .format(epoch + 1, 100, i + 1, len(dataloader), loss.item()))"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch [1/100], Step [20/143], Loss: 4.6859\n",
      "Epoch [1/100], Step [40/143], Loss: 4.6145\n",
      "Epoch [1/100], Step [60/143], Loss: 4.5537\n",
      "Epoch [1/100], Step [80/143], Loss: 4.5314\n",
      "Epoch [1/100], Step [100/143], Loss: 4.7114\n",
      "Epoch [1/100], Step [120/143], Loss: 4.5774\n",
      "Epoch [1/100], Step [140/143], Loss: 4.6863\n",
      "Epoch [2/100], Step [20/143], Loss: 4.5050\n",
      "Epoch [2/100], Step [40/143], Loss: 4.4109\n",
      "Epoch [2/100], Step [60/143], Loss: 4.3690\n",
      "Epoch [2/100], Step [80/143], Loss: 4.3142\n",
      "Epoch [2/100], Step [100/143], Loss: 4.5549\n",
      "Epoch [2/100], Step [120/143], Loss: 4.3512\n",
      "Epoch [2/100], Step [140/143], Loss: 4.5097\n",
      "Epoch [3/100], Step [20/143], Loss: 4.3103\n",
      "Epoch [3/100], Step [40/143], Loss: 4.1962\n",
      "Epoch [3/100], Step [60/143], Loss: 4.1713\n",
      "Epoch [3/100], Step [80/143], Loss: 4.0908\n",
      "Epoch [3/100], Step [100/143], Loss: 4.3999\n",
      "Epoch [3/100], Step [120/143], Loss: 4.1050\n",
      "Epoch [3/100], Step [140/143], Loss: 4.3153\n",
      "Epoch [4/100], Step [20/143], Loss: 4.0886\n",
      "Epoch [4/100], Step [40/143], Loss: 3.9576\n",
      "Epoch [4/100], Step [60/143], Loss: 3.9472\n",
      "Epoch [4/100], Step [80/143], Loss: 3.8498\n",
      "Epoch [4/100], Step [100/143], Loss: 4.2461\n",
      "Epoch [4/100], Step [120/143], Loss: 3.8252\n",
      "Epoch [4/100], Step [140/143], Loss: 4.0898\n",
      "Epoch [5/100], Step [20/143], Loss: 3.8277\n",
      "Epoch [5/100], Step [40/143], Loss: 3.6866\n",
      "Epoch [5/100], Step [60/143], Loss: 3.6867\n",
      "Epoch [5/100], Step [80/143], Loss: 3.5806\n",
      "Epoch [5/100], Step [100/143], Loss: 4.0928\n",
      "Epoch [5/100], Step [120/143], Loss: 3.5126\n",
      "Epoch [5/100], Step [140/143], Loss: 3.8268\n",
      "Epoch [6/100], Step [20/143], Loss: 3.5353\n",
      "Epoch [6/100], Step [40/143], Loss: 3.3953\n",
      "Epoch [6/100], Step [60/143], Loss: 3.4018\n",
      "Epoch [6/100], Step [80/143], Loss: 3.2874\n",
      "Epoch [6/100], Step [100/143], Loss: 3.9351\n",
      "Epoch [6/100], Step [120/143], Loss: 3.2105\n",
      "Epoch [6/100], Step [140/143], Loss: 3.5472\n",
      "Epoch [7/100], Step [20/143], Loss: 3.2662\n",
      "Epoch [7/100], Step [40/143], Loss: 3.1290\n",
      "Epoch [7/100], Step [60/143], Loss: 3.1371\n",
      "Epoch [7/100], Step [80/143], Loss: 3.0054\n",
      "Epoch [7/100], Step [100/143], Loss: 3.7632\n",
      "Epoch [7/100], Step [120/143], Loss: 2.9691\n",
      "Epoch [7/100], Step [140/143], Loss: 3.2930\n",
      "Epoch [8/100], Step [20/143], Loss: 3.0588\n",
      "Epoch [8/100], Step [40/143], Loss: 2.9098\n",
      "Epoch [8/100], Step [60/143], Loss: 2.9202\n",
      "Epoch [8/100], Step [80/143], Loss: 2.7614\n",
      "Epoch [8/100], Step [100/143], Loss: 3.5756\n",
      "Epoch [8/100], Step [120/143], Loss: 2.7781\n",
      "Epoch [8/100], Step [140/143], Loss: 3.0782\n",
      "Epoch [9/100], Step [20/143], Loss: 2.8896\n",
      "Epoch [9/100], Step [40/143], Loss: 2.7189\n",
      "Epoch [9/100], Step [60/143], Loss: 2.7414\n",
      "Epoch [9/100], Step [80/143], Loss: 2.5517\n",
      "Epoch [9/100], Step [100/143], Loss: 3.3790\n",
      "Epoch [9/100], Step [120/143], Loss: 2.6119\n",
      "Epoch [9/100], Step [140/143], Loss: 2.8924\n",
      "Epoch [10/100], Step [20/143], Loss: 2.7358\n",
      "Epoch [10/100], Step [40/143], Loss: 2.5405\n",
      "Epoch [10/100], Step [60/143], Loss: 2.5854\n",
      "Epoch [10/100], Step [80/143], Loss: 2.3658\n",
      "Epoch [10/100], Step [100/143], Loss: 3.1770\n",
      "Epoch [10/100], Step [120/143], Loss: 2.4577\n",
      "Epoch [10/100], Step [140/143], Loss: 2.7254\n",
      "Epoch [11/100], Step [20/143], Loss: 2.5886\n",
      "Epoch [11/100], Step [40/143], Loss: 2.3682\n",
      "Epoch [11/100], Step [60/143], Loss: 2.4431\n",
      "Epoch [11/100], Step [80/143], Loss: 2.1964\n",
      "Epoch [11/100], Step [100/143], Loss: 2.9711\n",
      "Epoch [11/100], Step [120/143], Loss: 2.3107\n",
      "Epoch [11/100], Step [140/143], Loss: 2.5717\n",
      "Epoch [12/100], Step [20/143], Loss: 2.4451\n",
      "Epoch [12/100], Step [40/143], Loss: 2.2000\n",
      "Epoch [12/100], Step [60/143], Loss: 2.3099\n",
      "Epoch [12/100], Step [80/143], Loss: 2.0386\n",
      "Epoch [12/100], Step [100/143], Loss: 2.7623\n",
      "Epoch [12/100], Step [120/143], Loss: 2.1693\n",
      "Epoch [12/100], Step [140/143], Loss: 2.4283\n",
      "Epoch [13/100], Step [20/143], Loss: 2.3046\n",
      "Epoch [13/100], Step [40/143], Loss: 2.0355\n",
      "Epoch [13/100], Step [60/143], Loss: 2.1830\n",
      "Epoch [13/100], Step [80/143], Loss: 1.8896\n",
      "Epoch [13/100], Step [100/143], Loss: 2.5528\n",
      "Epoch [13/100], Step [120/143], Loss: 2.0333\n",
      "Epoch [13/100], Step [140/143], Loss: 2.2935\n",
      "Epoch [14/100], Step [20/143], Loss: 2.1671\n",
      "Epoch [14/100], Step [40/143], Loss: 1.8757\n",
      "Epoch [14/100], Step [60/143], Loss: 2.0608\n",
      "Epoch [14/100], Step [80/143], Loss: 1.7481\n",
      "Epoch [14/100], Step [100/143], Loss: 2.3457\n",
      "Epoch [14/100], Step [120/143], Loss: 1.9025\n",
      "Epoch [14/100], Step [140/143], Loss: 2.1659\n",
      "Epoch [15/100], Step [20/143], Loss: 2.0333\n",
      "Epoch [15/100], Step [40/143], Loss: 1.7224\n",
      "Epoch [15/100], Step [60/143], Loss: 1.9424\n",
      "Epoch [15/100], Step [80/143], Loss: 1.6139\n",
      "Epoch [15/100], Step [100/143], Loss: 2.1442\n",
      "Epoch [15/100], Step [120/143], Loss: 1.7772\n",
      "Epoch [15/100], Step [140/143], Loss: 2.0444\n",
      "Epoch [16/100], Step [20/143], Loss: 1.9039\n",
      "Epoch [16/100], Step [40/143], Loss: 1.5775\n",
      "Epoch [16/100], Step [60/143], Loss: 1.8272\n",
      "Epoch [16/100], Step [80/143], Loss: 1.4871\n",
      "Epoch [16/100], Step [100/143], Loss: 1.9512\n",
      "Epoch [16/100], Step [120/143], Loss: 1.6576\n",
      "Epoch [16/100], Step [140/143], Loss: 1.9281\n",
      "Epoch [17/100], Step [20/143], Loss: 1.7796\n",
      "Epoch [17/100], Step [40/143], Loss: 1.4425\n",
      "Epoch [17/100], Step [60/143], Loss: 1.7153\n",
      "Epoch [17/100], Step [80/143], Loss: 1.3683\n",
      "Epoch [17/100], Step [100/143], Loss: 1.7692\n",
      "Epoch [17/100], Step [120/143], Loss: 1.5439\n",
      "Epoch [17/100], Step [140/143], Loss: 1.8165\n",
      "Epoch [18/100], Step [20/143], Loss: 1.6611\n",
      "Epoch [18/100], Step [40/143], Loss: 1.3185\n",
      "Epoch [18/100], Step [60/143], Loss: 1.6071\n",
      "Epoch [18/100], Step [80/143], Loss: 1.2582\n",
      "Epoch [18/100], Step [100/143], Loss: 1.5997\n",
      "Epoch [18/100], Step [120/143], Loss: 1.4366\n",
      "Epoch [18/100], Step [140/143], Loss: 1.7092\n",
      "Epoch [19/100], Step [20/143], Loss: 1.5489\n",
      "Epoch [19/100], Step [40/143], Loss: 1.2058\n",
      "Epoch [19/100], Step [60/143], Loss: 1.5032\n",
      "Epoch [19/100], Step [80/143], Loss: 1.1570\n",
      "Epoch [19/100], Step [100/143], Loss: 1.4440\n",
      "Epoch [19/100], Step [120/143], Loss: 1.3357\n",
      "Epoch [19/100], Step [140/143], Loss: 1.6060\n",
      "Epoch [20/100], Step [20/143], Loss: 1.4434\n",
      "Epoch [20/100], Step [40/143], Loss: 1.1041\n",
      "Epoch [20/100], Step [60/143], Loss: 1.4041\n",
      "Epoch [20/100], Step [80/143], Loss: 1.0648\n",
      "Epoch [20/100], Step [100/143], Loss: 1.3023\n",
      "Epoch [20/100], Step [120/143], Loss: 1.2415\n",
      "Epoch [20/100], Step [140/143], Loss: 1.5071\n",
      "Epoch [21/100], Step [20/143], Loss: 1.3451\n",
      "Epoch [21/100], Step [40/143], Loss: 1.0127\n",
      "Epoch [21/100], Step [60/143], Loss: 1.3105\n",
      "Epoch [21/100], Step [80/143], Loss: 0.9814\n",
      "Epoch [21/100], Step [100/143], Loss: 1.1748\n",
      "Epoch [21/100], Step [120/143], Loss: 1.1540\n",
      "Epoch [21/100], Step [140/143], Loss: 1.4126\n",
      "Epoch [22/100], Step [20/143], Loss: 1.2538\n",
      "Epoch [22/100], Step [40/143], Loss: 0.9308\n",
      "Epoch [22/100], Step [60/143], Loss: 1.2228\n",
      "Epoch [22/100], Step [80/143], Loss: 0.9064\n",
      "Epoch [22/100], Step [100/143], Loss: 1.0611\n",
      "Epoch [22/100], Step [120/143], Loss: 1.0731\n",
      "Epoch [22/100], Step [140/143], Loss: 1.3227\n",
      "Epoch [23/100], Step [20/143], Loss: 1.1696\n",
      "Epoch [23/100], Step [40/143], Loss: 0.8573\n",
      "Epoch [23/100], Step [60/143], Loss: 1.1412\n",
      "Epoch [23/100], Step [80/143], Loss: 0.8392\n",
      "Epoch [23/100], Step [100/143], Loss: 0.9603\n",
      "Epoch [23/100], Step [120/143], Loss: 0.9985\n",
      "Epoch [23/100], Step [140/143], Loss: 1.2377\n",
      "Epoch [24/100], Step [20/143], Loss: 1.0923\n",
      "Epoch [24/100], Step [40/143], Loss: 0.7911\n",
      "Epoch [24/100], Step [60/143], Loss: 1.0656\n",
      "Epoch [24/100], Step [80/143], Loss: 0.7788\n",
      "Epoch [24/100], Step [100/143], Loss: 0.8716\n",
      "Epoch [24/100], Step [120/143], Loss: 0.9299\n",
      "Epoch [24/100], Step [140/143], Loss: 1.1578\n",
      "Epoch [25/100], Step [20/143], Loss: 1.0214\n",
      "Epoch [25/100], Step [40/143], Loss: 0.7315\n",
      "Epoch [25/100], Step [60/143], Loss: 0.9958\n",
      "Epoch [25/100], Step [80/143], Loss: 0.7245\n",
      "Epoch [25/100], Step [100/143], Loss: 0.7937\n",
      "Epoch [25/100], Step [120/143], Loss: 0.8669\n",
      "Epoch [25/100], Step [140/143], Loss: 1.0831\n",
      "Epoch [26/100], Step [20/143], Loss: 0.9567\n",
      "Epoch [26/100], Step [40/143], Loss: 0.6776\n",
      "Epoch [26/100], Step [60/143], Loss: 0.9316\n",
      "Epoch [26/100], Step [80/143], Loss: 0.6757\n",
      "Epoch [26/100], Step [100/143], Loss: 0.7255\n",
      "Epoch [26/100], Step [120/143], Loss: 0.8091\n",
      "Epoch [26/100], Step [140/143], Loss: 1.0137\n",
      "Epoch [27/100], Step [20/143], Loss: 0.8976\n",
      "Epoch [27/100], Step [40/143], Loss: 0.6287\n",
      "Epoch [27/100], Step [60/143], Loss: 0.8725\n",
      "Epoch [27/100], Step [80/143], Loss: 0.6315\n",
      "Epoch [27/100], Step [100/143], Loss: 0.6657\n",
      "Epoch [27/100], Step [120/143], Loss: 0.7561\n",
      "Epoch [27/100], Step [140/143], Loss: 0.9494\n",
      "Epoch [28/100], Step [20/143], Loss: 0.8437\n",
      "Epoch [28/100], Step [40/143], Loss: 0.5842\n",
      "Epoch [28/100], Step [60/143], Loss: 0.8182\n",
      "Epoch [28/100], Step [80/143], Loss: 0.5914\n",
      "Epoch [28/100], Step [100/143], Loss: 0.6133\n",
      "Epoch [28/100], Step [120/143], Loss: 0.7075\n",
      "Epoch [28/100], Step [140/143], Loss: 0.8902\n",
      "Epoch [29/100], Step [20/143], Loss: 0.7946\n",
      "Epoch [29/100], Step [40/143], Loss: 0.5436\n",
      "Epoch [29/100], Step [60/143], Loss: 0.7681\n",
      "Epoch [29/100], Step [80/143], Loss: 0.5548\n",
      "Epoch [29/100], Step [100/143], Loss: 0.5671\n",
      "Epoch [29/100], Step [120/143], Loss: 0.6628\n",
      "Epoch [29/100], Step [140/143], Loss: 0.8357\n",
      "Epoch [30/100], Step [20/143], Loss: 0.7498\n",
      "Epoch [30/100], Step [40/143], Loss: 0.5066\n",
      "Epoch [30/100], Step [60/143], Loss: 0.7219\n",
      "Epoch [30/100], Step [80/143], Loss: 0.5213\n",
      "Epoch [30/100], Step [100/143], Loss: 0.5264\n",
      "Epoch [30/100], Step [120/143], Loss: 0.6217\n",
      "Epoch [30/100], Step [140/143], Loss: 0.7858\n",
      "Epoch [31/100], Step [20/143], Loss: 0.7089\n",
      "Epoch [31/100], Step [40/143], Loss: 0.4727\n",
      "Epoch [31/100], Step [60/143], Loss: 0.6793\n",
      "Epoch [31/100], Step [80/143], Loss: 0.4905\n",
      "Epoch [31/100], Step [100/143], Loss: 0.4904\n",
      "Epoch [31/100], Step [120/143], Loss: 0.5838\n",
      "Epoch [31/100], Step [140/143], Loss: 0.7401\n",
      "Epoch [32/100], Step [20/143], Loss: 0.6716\n",
      "Epoch [32/100], Step [40/143], Loss: 0.4416\n",
      "Epoch [32/100], Step [60/143], Loss: 0.6398\n",
      "Epoch [32/100], Step [80/143], Loss: 0.4621\n",
      "Epoch [32/100], Step [100/143], Loss: 0.4582\n",
      "Epoch [32/100], Step [120/143], Loss: 0.5488\n",
      "Epoch [32/100], Step [140/143], Loss: 0.6982\n",
      "Epoch [33/100], Step [20/143], Loss: 0.6375\n",
      "Epoch [33/100], Step [40/143], Loss: 0.4132\n",
      "Epoch [33/100], Step [60/143], Loss: 0.6033\n",
      "Epoch [33/100], Step [80/143], Loss: 0.4359\n",
      "Epoch [33/100], Step [100/143], Loss: 0.4295\n",
      "Epoch [33/100], Step [120/143], Loss: 0.5165\n",
      "Epoch [33/100], Step [140/143], Loss: 0.6599\n",
      "Epoch [34/100], Step [20/143], Loss: 0.6064\n",
      "Epoch [34/100], Step [40/143], Loss: 0.3872\n",
      "Epoch [34/100], Step [60/143], Loss: 0.5695\n",
      "Epoch [34/100], Step [80/143], Loss: 0.4117\n",
      "Epoch [34/100], Step [100/143], Loss: 0.4037\n",
      "Epoch [34/100], Step [120/143], Loss: 0.4866\n",
      "Epoch [34/100], Step [140/143], Loss: 0.6249\n",
      "Epoch [35/100], Step [20/143], Loss: 0.5779\n",
      "Epoch [35/100], Step [40/143], Loss: 0.3633\n",
      "Epoch [35/100], Step [60/143], Loss: 0.5381\n",
      "Epoch [35/100], Step [80/143], Loss: 0.3892\n",
      "Epoch [35/100], Step [100/143], Loss: 0.3805\n",
      "Epoch [35/100], Step [120/143], Loss: 0.4590\n",
      "Epoch [35/100], Step [140/143], Loss: 0.5928\n",
      "Epoch [36/100], Step [20/143], Loss: 0.5519\n",
      "Epoch [36/100], Step [40/143], Loss: 0.3414\n",
      "Epoch [36/100], Step [60/143], Loss: 0.5090\n",
      "Epoch [36/100], Step [80/143], Loss: 0.3683\n",
      "Epoch [36/100], Step [100/143], Loss: 0.3594\n",
      "Epoch [36/100], Step [120/143], Loss: 0.4333\n",
      "Epoch [36/100], Step [140/143], Loss: 0.5634\n",
      "Epoch [37/100], Step [20/143], Loss: 0.5281\n",
      "Epoch [37/100], Step [40/143], Loss: 0.3213\n",
      "Epoch [37/100], Step [60/143], Loss: 0.4820\n",
      "Epoch [37/100], Step [80/143], Loss: 0.3490\n",
      "Epoch [37/100], Step [100/143], Loss: 0.3402\n",
      "Epoch [37/100], Step [120/143], Loss: 0.4095\n",
      "Epoch [37/100], Step [140/143], Loss: 0.5365\n",
      "Epoch [38/100], Step [20/143], Loss: 0.5063\n",
      "Epoch [38/100], Step [40/143], Loss: 0.3028\n",
      "Epoch [38/100], Step [60/143], Loss: 0.4569\n",
      "Epoch [38/100], Step [80/143], Loss: 0.3309\n",
      "Epoch [38/100], Step [100/143], Loss: 0.3228\n",
      "Epoch [38/100], Step [120/143], Loss: 0.3875\n",
      "Epoch [38/100], Step [140/143], Loss: 0.5118\n",
      "Epoch [39/100], Step [20/143], Loss: 0.4862\n",
      "Epoch [39/100], Step [40/143], Loss: 0.2858\n",
      "Epoch [39/100], Step [60/143], Loss: 0.4336\n",
      "Epoch [39/100], Step [80/143], Loss: 0.3142\n",
      "Epoch [39/100], Step [100/143], Loss: 0.3068\n",
      "Epoch [39/100], Step [120/143], Loss: 0.3670\n",
      "Epoch [39/100], Step [140/143], Loss: 0.4892\n",
      "Epoch [40/100], Step [20/143], Loss: 0.4678\n",
      "Epoch [40/100], Step [40/143], Loss: 0.2702\n",
      "Epoch [40/100], Step [60/143], Loss: 0.4119\n",
      "Epoch [40/100], Step [80/143], Loss: 0.2986\n",
      "Epoch [40/100], Step [100/143], Loss: 0.2921\n",
      "Epoch [40/100], Step [120/143], Loss: 0.3480\n",
      "Epoch [40/100], Step [140/143], Loss: 0.4685\n",
      "Epoch [41/100], Step [20/143], Loss: 0.4509\n",
      "Epoch [41/100], Step [40/143], Loss: 0.2558\n",
      "Epoch [41/100], Step [60/143], Loss: 0.3918\n",
      "Epoch [41/100], Step [80/143], Loss: 0.2840\n",
      "Epoch [41/100], Step [100/143], Loss: 0.2785\n",
      "Epoch [41/100], Step [120/143], Loss: 0.3303\n",
      "Epoch [41/100], Step [140/143], Loss: 0.4494\n",
      "Epoch [42/100], Step [20/143], Loss: 0.4354\n",
      "Epoch [42/100], Step [40/143], Loss: 0.2426\n",
      "Epoch [42/100], Step [60/143], Loss: 0.3730\n",
      "Epoch [42/100], Step [80/143], Loss: 0.2705\n",
      "Epoch [42/100], Step [100/143], Loss: 0.2660\n",
      "Epoch [42/100], Step [120/143], Loss: 0.3138\n",
      "Epoch [42/100], Step [140/143], Loss: 0.4319\n",
      "Epoch [43/100], Step [20/143], Loss: 0.4210\n",
      "Epoch [43/100], Step [40/143], Loss: 0.2304\n",
      "Epoch [43/100], Step [60/143], Loss: 0.3556\n",
      "Epoch [43/100], Step [80/143], Loss: 0.2579\n",
      "Epoch [43/100], Step [100/143], Loss: 0.2545\n",
      "Epoch [43/100], Step [120/143], Loss: 0.2985\n",
      "Epoch [43/100], Step [140/143], Loss: 0.4159\n",
      "Epoch [44/100], Step [20/143], Loss: 0.4077\n",
      "Epoch [44/100], Step [40/143], Loss: 0.2191\n",
      "Epoch [44/100], Step [60/143], Loss: 0.3393\n",
      "Epoch [44/100], Step [80/143], Loss: 0.2461\n",
      "Epoch [44/100], Step [100/143], Loss: 0.2437\n",
      "Epoch [44/100], Step [120/143], Loss: 0.2843\n",
      "Epoch [44/100], Step [140/143], Loss: 0.4011\n",
      "Epoch [45/100], Step [20/143], Loss: 0.3955\n",
      "Epoch [45/100], Step [40/143], Loss: 0.2086\n",
      "Epoch [45/100], Step [60/143], Loss: 0.3241\n",
      "Epoch [45/100], Step [80/143], Loss: 0.2351\n",
      "Epoch [45/100], Step [100/143], Loss: 0.2337\n",
      "Epoch [45/100], Step [120/143], Loss: 0.2711\n",
      "Epoch [45/100], Step [140/143], Loss: 0.3875\n",
      "Epoch [46/100], Step [20/143], Loss: 0.3841\n",
      "Epoch [46/100], Step [40/143], Loss: 0.1989\n",
      "Epoch [46/100], Step [60/143], Loss: 0.3100\n",
      "Epoch [46/100], Step [80/143], Loss: 0.2248\n",
      "Epoch [46/100], Step [100/143], Loss: 0.2244\n",
      "Epoch [46/100], Step [120/143], Loss: 0.2587\n",
      "Epoch [46/100], Step [140/143], Loss: 0.3750\n",
      "Epoch [47/100], Step [20/143], Loss: 0.3736\n",
      "Epoch [47/100], Step [40/143], Loss: 0.1899\n",
      "Epoch [47/100], Step [60/143], Loss: 0.2968\n",
      "Epoch [47/100], Step [80/143], Loss: 0.2151\n",
      "Epoch [47/100], Step [100/143], Loss: 0.2157\n",
      "Epoch [47/100], Step [120/143], Loss: 0.2472\n",
      "Epoch [47/100], Step [140/143], Loss: 0.3634\n",
      "Epoch [48/100], Step [20/143], Loss: 0.3639\n",
      "Epoch [48/100], Step [40/143], Loss: 0.1816\n",
      "Epoch [48/100], Step [60/143], Loss: 0.2844\n",
      "Epoch [48/100], Step [80/143], Loss: 0.2061\n",
      "Epoch [48/100], Step [100/143], Loss: 0.2076\n",
      "Epoch [48/100], Step [120/143], Loss: 0.2365\n",
      "Epoch [48/100], Step [140/143], Loss: 0.3528\n",
      "Epoch [49/100], Step [20/143], Loss: 0.3548\n",
      "Epoch [49/100], Step [40/143], Loss: 0.1738\n",
      "Epoch [49/100], Step [60/143], Loss: 0.2728\n",
      "Epoch [49/100], Step [80/143], Loss: 0.1977\n",
      "Epoch [49/100], Step [100/143], Loss: 0.2000\n",
      "Epoch [49/100], Step [120/143], Loss: 0.2265\n",
      "Epoch [49/100], Step [140/143], Loss: 0.3430\n",
      "Epoch [50/100], Step [20/143], Loss: 0.3463\n",
      "Epoch [50/100], Step [40/143], Loss: 0.1666\n",
      "Epoch [50/100], Step [60/143], Loss: 0.2620\n",
      "Epoch [50/100], Step [80/143], Loss: 0.1898\n",
      "Epoch [50/100], Step [100/143], Loss: 0.1928\n",
      "Epoch [50/100], Step [120/143], Loss: 0.2171\n",
      "Epoch [50/100], Step [140/143], Loss: 0.3339\n",
      "Epoch [51/100], Step [20/143], Loss: 0.3385\n",
      "Epoch [51/100], Step [40/143], Loss: 0.1598\n",
      "Epoch [51/100], Step [60/143], Loss: 0.2518\n",
      "Epoch [51/100], Step [80/143], Loss: 0.1824\n",
      "Epoch [51/100], Step [100/143], Loss: 0.1861\n",
      "Epoch [51/100], Step [120/143], Loss: 0.2084\n",
      "Epoch [51/100], Step [140/143], Loss: 0.3255\n",
      "Epoch [52/100], Step [20/143], Loss: 0.3311\n",
      "Epoch [52/100], Step [40/143], Loss: 0.1535\n",
      "Epoch [52/100], Step [60/143], Loss: 0.2422\n",
      "Epoch [52/100], Step [80/143], Loss: 0.1754\n",
      "Epoch [52/100], Step [100/143], Loss: 0.1797\n",
      "Epoch [52/100], Step [120/143], Loss: 0.2002\n",
      "Epoch [52/100], Step [140/143], Loss: 0.3177\n",
      "Epoch [53/100], Step [20/143], Loss: 0.3243\n",
      "Epoch [53/100], Step [40/143], Loss: 0.1476\n",
      "Epoch [53/100], Step [60/143], Loss: 0.2332\n",
      "Epoch [53/100], Step [80/143], Loss: 0.1689\n",
      "Epoch [53/100], Step [100/143], Loss: 0.1737\n",
      "Epoch [53/100], Step [120/143], Loss: 0.1925\n",
      "Epoch [53/100], Step [140/143], Loss: 0.3105\n",
      "Epoch [54/100], Step [20/143], Loss: 0.3179\n",
      "Epoch [54/100], Step [40/143], Loss: 0.1421\n",
      "Epoch [54/100], Step [60/143], Loss: 0.2248\n",
      "Epoch [54/100], Step [80/143], Loss: 0.1627\n",
      "Epoch [54/100], Step [100/143], Loss: 0.1681\n",
      "Epoch [54/100], Step [120/143], Loss: 0.1853\n",
      "Epoch [54/100], Step [140/143], Loss: 0.3038\n",
      "Epoch [55/100], Step [20/143], Loss: 0.3119\n",
      "Epoch [55/100], Step [40/143], Loss: 0.1369\n",
      "Epoch [55/100], Step [60/143], Loss: 0.2168\n",
      "Epoch [55/100], Step [80/143], Loss: 0.1569\n",
      "Epoch [55/100], Step [100/143], Loss: 0.1628\n",
      "Epoch [55/100], Step [120/143], Loss: 0.1785\n",
      "Epoch [55/100], Step [140/143], Loss: 0.2976\n",
      "Epoch [56/100], Step [20/143], Loss: 0.3063\n",
      "Epoch [56/100], Step [40/143], Loss: 0.1321\n",
      "Epoch [56/100], Step [60/143], Loss: 0.2093\n",
      "Epoch [56/100], Step [80/143], Loss: 0.1514\n",
      "Epoch [56/100], Step [100/143], Loss: 0.1577\n",
      "Epoch [56/100], Step [120/143], Loss: 0.1722\n",
      "Epoch [56/100], Step [140/143], Loss: 0.2918\n",
      "Epoch [57/100], Step [20/143], Loss: 0.3010\n",
      "Epoch [57/100], Step [40/143], Loss: 0.1275\n",
      "Epoch [57/100], Step [60/143], Loss: 0.2022\n",
      "Epoch [57/100], Step [80/143], Loss: 0.1463\n",
      "Epoch [57/100], Step [100/143], Loss: 0.1529\n",
      "Epoch [57/100], Step [120/143], Loss: 0.1662\n",
      "Epoch [57/100], Step [140/143], Loss: 0.2864\n",
      "Epoch [58/100], Step [20/143], Loss: 0.2961\n",
      "Epoch [58/100], Step [40/143], Loss: 0.1232\n",
      "Epoch [58/100], Step [60/143], Loss: 0.1955\n",
      "Epoch [58/100], Step [80/143], Loss: 0.1414\n",
      "Epoch [58/100], Step [100/143], Loss: 0.1484\n",
      "Epoch [58/100], Step [120/143], Loss: 0.1605\n",
      "Epoch [58/100], Step [140/143], Loss: 0.2814\n",
      "Epoch [59/100], Step [20/143], Loss: 0.2914\n",
      "Epoch [59/100], Step [40/143], Loss: 0.1191\n",
      "Epoch [59/100], Step [60/143], Loss: 0.1891\n",
      "Epoch [59/100], Step [80/143], Loss: 0.1368\n",
      "Epoch [59/100], Step [100/143], Loss: 0.1441\n",
      "Epoch [59/100], Step [120/143], Loss: 0.1552\n",
      "Epoch [59/100], Step [140/143], Loss: 0.2767\n",
      "Epoch [60/100], Step [20/143], Loss: 0.2870\n",
      "Epoch [60/100], Step [40/143], Loss: 0.1153\n",
      "Epoch [60/100], Step [60/143], Loss: 0.1831\n",
      "Epoch [60/100], Step [80/143], Loss: 0.1324\n",
      "Epoch [60/100], Step [100/143], Loss: 0.1400\n",
      "Epoch [60/100], Step [120/143], Loss: 0.1502\n",
      "Epoch [60/100], Step [140/143], Loss: 0.2723\n",
      "Epoch [61/100], Step [20/143], Loss: 0.2829\n",
      "Epoch [61/100], Step [40/143], Loss: 0.1116\n",
      "Epoch [61/100], Step [60/143], Loss: 0.1774\n",
      "Epoch [61/100], Step [80/143], Loss: 0.1283\n",
      "Epoch [61/100], Step [100/143], Loss: 0.1361\n",
      "Epoch [61/100], Step [120/143], Loss: 0.1454\n",
      "Epoch [61/100], Step [140/143], Loss: 0.2682\n",
      "Epoch [62/100], Step [20/143], Loss: 0.2790\n",
      "Epoch [62/100], Step [40/143], Loss: 0.1082\n",
      "Epoch [62/100], Step [60/143], Loss: 0.1720\n",
      "Epoch [62/100], Step [80/143], Loss: 0.1243\n",
      "Epoch [62/100], Step [100/143], Loss: 0.1324\n",
      "Epoch [62/100], Step [120/143], Loss: 0.1409\n",
      "Epoch [62/100], Step [140/143], Loss: 0.2644\n",
      "Epoch [63/100], Step [20/143], Loss: 0.2754\n",
      "Epoch [63/100], Step [40/143], Loss: 0.1050\n",
      "Epoch [63/100], Step [60/143], Loss: 0.1669\n",
      "Epoch [63/100], Step [80/143], Loss: 0.1206\n",
      "Epoch [63/100], Step [100/143], Loss: 0.1289\n",
      "Epoch [63/100], Step [120/143], Loss: 0.1367\n",
      "Epoch [63/100], Step [140/143], Loss: 0.2608\n",
      "Epoch [64/100], Step [20/143], Loss: 0.2719\n",
      "Epoch [64/100], Step [40/143], Loss: 0.1019\n",
      "Epoch [64/100], Step [60/143], Loss: 0.1620\n",
      "Epoch [64/100], Step [80/143], Loss: 0.1171\n",
      "Epoch [64/100], Step [100/143], Loss: 0.1255\n",
      "Epoch [64/100], Step [120/143], Loss: 0.1326\n",
      "Epoch [64/100], Step [140/143], Loss: 0.2574\n",
      "Epoch [65/100], Step [20/143], Loss: 0.2687\n",
      "Epoch [65/100], Step [40/143], Loss: 0.0990\n",
      "Epoch [65/100], Step [60/143], Loss: 0.1574\n",
      "Epoch [65/100], Step [80/143], Loss: 0.1137\n",
      "Epoch [65/100], Step [100/143], Loss: 0.1223\n",
      "Epoch [65/100], Step [120/143], Loss: 0.1288\n",
      "Epoch [65/100], Step [140/143], Loss: 0.2542\n",
      "Epoch [66/100], Step [20/143], Loss: 0.2656\n",
      "Epoch [66/100], Step [40/143], Loss: 0.0962\n",
      "Epoch [66/100], Step [60/143], Loss: 0.1529\n",
      "Epoch [66/100], Step [80/143], Loss: 0.1105\n",
      "Epoch [66/100], Step [100/143], Loss: 0.1192\n",
      "Epoch [66/100], Step [120/143], Loss: 0.1251\n",
      "Epoch [66/100], Step [140/143], Loss: 0.2512\n",
      "Epoch [67/100], Step [20/143], Loss: 0.2626\n",
      "Epoch [67/100], Step [40/143], Loss: 0.0935\n",
      "Epoch [67/100], Step [60/143], Loss: 0.1487\n",
      "Epoch [67/100], Step [80/143], Loss: 0.1074\n",
      "Epoch [67/100], Step [100/143], Loss: 0.1162\n",
      "Epoch [67/100], Step [120/143], Loss: 0.1217\n",
      "Epoch [67/100], Step [140/143], Loss: 0.2484\n",
      "Epoch [68/100], Step [20/143], Loss: 0.2598\n",
      "Epoch [68/100], Step [40/143], Loss: 0.0910\n",
      "Epoch [68/100], Step [60/143], Loss: 0.1447\n",
      "Epoch [68/100], Step [80/143], Loss: 0.1045\n",
      "Epoch [68/100], Step [100/143], Loss: 0.1134\n",
      "Epoch [68/100], Step [120/143], Loss: 0.1184\n",
      "Epoch [68/100], Step [140/143], Loss: 0.2458\n",
      "Epoch [69/100], Step [20/143], Loss: 0.2572\n",
      "Epoch [69/100], Step [40/143], Loss: 0.0886\n",
      "Epoch [69/100], Step [60/143], Loss: 0.1409\n",
      "Epoch [69/100], Step [80/143], Loss: 0.1017\n",
      "Epoch [69/100], Step [100/143], Loss: 0.1107\n",
      "Epoch [69/100], Step [120/143], Loss: 0.1152\n",
      "Epoch [69/100], Step [140/143], Loss: 0.2433\n",
      "Epoch [70/100], Step [20/143], Loss: 0.2546\n",
      "Epoch [70/100], Step [40/143], Loss: 0.0864\n",
      "Epoch [70/100], Step [60/143], Loss: 0.1372\n",
      "Epoch [70/100], Step [80/143], Loss: 0.0991\n",
      "Epoch [70/100], Step [100/143], Loss: 0.1081\n",
      "Epoch [70/100], Step [120/143], Loss: 0.1122\n",
      "Epoch [70/100], Step [140/143], Loss: 0.2409\n",
      "Epoch [71/100], Step [20/143], Loss: 0.2522\n",
      "Epoch [71/100], Step [40/143], Loss: 0.0842\n",
      "Epoch [71/100], Step [60/143], Loss: 0.1337\n",
      "Epoch [71/100], Step [80/143], Loss: 0.0966\n",
      "Epoch [71/100], Step [100/143], Loss: 0.1056\n",
      "Epoch [71/100], Step [120/143], Loss: 0.1094\n",
      "Epoch [71/100], Step [140/143], Loss: 0.2386\n",
      "Epoch [72/100], Step [20/143], Loss: 0.2499\n",
      "Epoch [72/100], Step [40/143], Loss: 0.0821\n",
      "Epoch [72/100], Step [60/143], Loss: 0.1304\n",
      "Epoch [72/100], Step [80/143], Loss: 0.0941\n",
      "Epoch [72/100], Step [100/143], Loss: 0.1033\n",
      "Epoch [72/100], Step [120/143], Loss: 0.1067\n",
      "Epoch [72/100], Step [140/143], Loss: 0.2365\n",
      "Epoch [73/100], Step [20/143], Loss: 0.2477\n",
      "Epoch [73/100], Step [40/143], Loss: 0.0801\n",
      "Epoch [73/100], Step [60/143], Loss: 0.1271\n",
      "Epoch [73/100], Step [80/143], Loss: 0.0918\n",
      "Epoch [73/100], Step [100/143], Loss: 0.1010\n",
      "Epoch [73/100], Step [120/143], Loss: 0.1040\n",
      "Epoch [73/100], Step [140/143], Loss: 0.2345\n",
      "Epoch [74/100], Step [20/143], Loss: 0.2456\n",
      "Epoch [74/100], Step [40/143], Loss: 0.0782\n",
      "Epoch [74/100], Step [60/143], Loss: 0.1241\n",
      "Epoch [74/100], Step [80/143], Loss: 0.0896\n",
      "Epoch [74/100], Step [100/143], Loss: 0.0988\n",
      "Epoch [74/100], Step [120/143], Loss: 0.1015\n",
      "Epoch [74/100], Step [140/143], Loss: 0.2325\n",
      "Epoch [75/100], Step [20/143], Loss: 0.2436\n",
      "Epoch [75/100], Step [40/143], Loss: 0.0764\n",
      "Epoch [75/100], Step [60/143], Loss: 0.1211\n",
      "Epoch [75/100], Step [80/143], Loss: 0.0875\n",
      "Epoch [75/100], Step [100/143], Loss: 0.0966\n",
      "Epoch [75/100], Step [120/143], Loss: 0.0992\n",
      "Epoch [75/100], Step [140/143], Loss: 0.2307\n",
      "Epoch [76/100], Step [20/143], Loss: 0.2417\n",
      "Epoch [76/100], Step [40/143], Loss: 0.0747\n",
      "Epoch [76/100], Step [60/143], Loss: 0.1183\n",
      "Epoch [76/100], Step [80/143], Loss: 0.0854\n",
      "Epoch [76/100], Step [100/143], Loss: 0.0946\n",
      "Epoch [76/100], Step [120/143], Loss: 0.0969\n",
      "Epoch [76/100], Step [140/143], Loss: 0.2290\n",
      "Epoch [77/100], Step [20/143], Loss: 0.2399\n",
      "Epoch [77/100], Step [40/143], Loss: 0.0730\n",
      "Epoch [77/100], Step [60/143], Loss: 0.1156\n",
      "Epoch [77/100], Step [80/143], Loss: 0.0835\n",
      "Epoch [77/100], Step [100/143], Loss: 0.0926\n",
      "Epoch [77/100], Step [120/143], Loss: 0.0947\n",
      "Epoch [77/100], Step [140/143], Loss: 0.2273\n",
      "Epoch [78/100], Step [20/143], Loss: 0.2381\n",
      "Epoch [78/100], Step [40/143], Loss: 0.0714\n",
      "Epoch [78/100], Step [60/143], Loss: 0.1130\n",
      "Epoch [78/100], Step [80/143], Loss: 0.0816\n",
      "Epoch [78/100], Step [100/143], Loss: 0.0907\n",
      "Epoch [78/100], Step [120/143], Loss: 0.0926\n",
      "Epoch [78/100], Step [140/143], Loss: 0.2257\n",
      "Epoch [79/100], Step [20/143], Loss: 0.2364\n",
      "Epoch [79/100], Step [40/143], Loss: 0.0699\n",
      "Epoch [79/100], Step [60/143], Loss: 0.1105\n",
      "Epoch [79/100], Step [80/143], Loss: 0.0798\n",
      "Epoch [79/100], Step [100/143], Loss: 0.0889\n",
      "Epoch [79/100], Step [120/143], Loss: 0.0905\n",
      "Epoch [79/100], Step [140/143], Loss: 0.2242\n",
      "Epoch [80/100], Step [20/143], Loss: 0.2347\n",
      "Epoch [80/100], Step [40/143], Loss: 0.0684\n",
      "Epoch [80/100], Step [60/143], Loss: 0.1081\n",
      "Epoch [80/100], Step [80/143], Loss: 0.0781\n",
      "Epoch [80/100], Step [100/143], Loss: 0.0871\n",
      "Epoch [80/100], Step [120/143], Loss: 0.0886\n",
      "Epoch [80/100], Step [140/143], Loss: 0.2227\n",
      "Epoch [81/100], Step [20/143], Loss: 0.2332\n",
      "Epoch [81/100], Step [40/143], Loss: 0.0670\n",
      "Epoch [81/100], Step [60/143], Loss: 0.1057\n",
      "Epoch [81/100], Step [80/143], Loss: 0.0764\n",
      "Epoch [81/100], Step [100/143], Loss: 0.0854\n",
      "Epoch [81/100], Step [120/143], Loss: 0.0867\n",
      "Epoch [81/100], Step [140/143], Loss: 0.2214\n",
      "Epoch [82/100], Step [20/143], Loss: 0.2316\n",
      "Epoch [82/100], Step [40/143], Loss: 0.0656\n",
      "Epoch [82/100], Step [60/143], Loss: 0.1035\n",
      "Epoch [82/100], Step [80/143], Loss: 0.0748\n",
      "Epoch [82/100], Step [100/143], Loss: 0.0838\n",
      "Epoch [82/100], Step [120/143], Loss: 0.0849\n",
      "Epoch [82/100], Step [140/143], Loss: 0.2200\n",
      "Epoch [83/100], Step [20/143], Loss: 0.2302\n",
      "Epoch [83/100], Step [40/143], Loss: 0.0643\n",
      "Epoch [83/100], Step [60/143], Loss: 0.1013\n",
      "Epoch [83/100], Step [80/143], Loss: 0.0732\n",
      "Epoch [83/100], Step [100/143], Loss: 0.0822\n",
      "Epoch [83/100], Step [120/143], Loss: 0.0832\n",
      "Epoch [83/100], Step [140/143], Loss: 0.2187\n",
      "Epoch [84/100], Step [20/143], Loss: 0.2288\n",
      "Epoch [84/100], Step [40/143], Loss: 0.0630\n",
      "Epoch [84/100], Step [60/143], Loss: 0.0993\n",
      "Epoch [84/100], Step [80/143], Loss: 0.0718\n",
      "Epoch [84/100], Step [100/143], Loss: 0.0807\n",
      "Epoch [84/100], Step [120/143], Loss: 0.0815\n",
      "Epoch [84/100], Step [140/143], Loss: 0.2175\n",
      "Epoch [85/100], Step [20/143], Loss: 0.2274\n",
      "Epoch [85/100], Step [40/143], Loss: 0.0618\n",
      "Epoch [85/100], Step [60/143], Loss: 0.0973\n",
      "Epoch [85/100], Step [80/143], Loss: 0.0703\n",
      "Epoch [85/100], Step [100/143], Loss: 0.0792\n",
      "Epoch [85/100], Step [120/143], Loss: 0.0799\n",
      "Epoch [85/100], Step [140/143], Loss: 0.2163\n",
      "Epoch [86/100], Step [20/143], Loss: 0.2261\n",
      "Epoch [86/100], Step [40/143], Loss: 0.0606\n",
      "Epoch [86/100], Step [60/143], Loss: 0.0953\n",
      "Epoch [86/100], Step [80/143], Loss: 0.0689\n",
      "Epoch [86/100], Step [100/143], Loss: 0.0778\n",
      "Epoch [86/100], Step [120/143], Loss: 0.0784\n",
      "Epoch [86/100], Step [140/143], Loss: 0.2152\n",
      "Epoch [87/100], Step [20/143], Loss: 0.2248\n",
      "Epoch [87/100], Step [40/143], Loss: 0.0595\n",
      "Epoch [87/100], Step [60/143], Loss: 0.0935\n",
      "Epoch [87/100], Step [80/143], Loss: 0.0676\n",
      "Epoch [87/100], Step [100/143], Loss: 0.0764\n",
      "Epoch [87/100], Step [120/143], Loss: 0.0769\n",
      "Epoch [87/100], Step [140/143], Loss: 0.2141\n",
      "Epoch [88/100], Step [20/143], Loss: 0.2236\n",
      "Epoch [88/100], Step [40/143], Loss: 0.0584\n",
      "Epoch [88/100], Step [60/143], Loss: 0.0917\n",
      "Epoch [88/100], Step [80/143], Loss: 0.0663\n",
      "Epoch [88/100], Step [100/143], Loss: 0.0750\n",
      "Epoch [88/100], Step [120/143], Loss: 0.0754\n",
      "Epoch [88/100], Step [140/143], Loss: 0.2130\n",
      "Epoch [89/100], Step [20/143], Loss: 0.2224\n",
      "Epoch [89/100], Step [40/143], Loss: 0.0574\n",
      "Epoch [89/100], Step [60/143], Loss: 0.0899\n",
      "Epoch [89/100], Step [80/143], Loss: 0.0651\n",
      "Epoch [89/100], Step [100/143], Loss: 0.0737\n",
      "Epoch [89/100], Step [120/143], Loss: 0.0740\n",
      "Epoch [89/100], Step [140/143], Loss: 0.2120\n",
      "Epoch [90/100], Step [20/143], Loss: 0.2212\n",
      "Epoch [90/100], Step [40/143], Loss: 0.0563\n",
      "Epoch [90/100], Step [60/143], Loss: 0.0882\n",
      "Epoch [90/100], Step [80/143], Loss: 0.0639\n",
      "Epoch [90/100], Step [100/143], Loss: 0.0725\n",
      "Epoch [90/100], Step [120/143], Loss: 0.0727\n",
      "Epoch [90/100], Step [140/143], Loss: 0.2110\n",
      "Epoch [91/100], Step [20/143], Loss: 0.2201\n",
      "Epoch [91/100], Step [40/143], Loss: 0.0554\n",
      "Epoch [91/100], Step [60/143], Loss: 0.0866\n",
      "Epoch [91/100], Step [80/143], Loss: 0.0627\n",
      "Epoch [91/100], Step [100/143], Loss: 0.0712\n",
      "Epoch [91/100], Step [120/143], Loss: 0.0714\n",
      "Epoch [91/100], Step [140/143], Loss: 0.2100\n",
      "Epoch [92/100], Step [20/143], Loss: 0.2190\n",
      "Epoch [92/100], Step [40/143], Loss: 0.0544\n",
      "Epoch [92/100], Step [60/143], Loss: 0.0850\n",
      "Epoch [92/100], Step [80/143], Loss: 0.0616\n",
      "Epoch [92/100], Step [100/143], Loss: 0.0700\n",
      "Epoch [92/100], Step [120/143], Loss: 0.0701\n",
      "Epoch [92/100], Step [140/143], Loss: 0.2091\n",
      "Epoch [93/100], Step [20/143], Loss: 0.2180\n",
      "Epoch [93/100], Step [40/143], Loss: 0.0535\n",
      "Epoch [93/100], Step [60/143], Loss: 0.0835\n",
      "Epoch [93/100], Step [80/143], Loss: 0.0605\n",
      "Epoch [93/100], Step [100/143], Loss: 0.0689\n",
      "Epoch [93/100], Step [120/143], Loss: 0.0689\n",
      "Epoch [93/100], Step [140/143], Loss: 0.2082\n",
      "Epoch [94/100], Step [20/143], Loss: 0.2170\n",
      "Epoch [94/100], Step [40/143], Loss: 0.0526\n",
      "Epoch [94/100], Step [60/143], Loss: 0.0820\n",
      "Epoch [94/100], Step [80/143], Loss: 0.0594\n",
      "Epoch [94/100], Step [100/143], Loss: 0.0678\n",
      "Epoch [94/100], Step [120/143], Loss: 0.0677\n",
      "Epoch [94/100], Step [140/143], Loss: 0.2073\n",
      "Epoch [95/100], Step [20/143], Loss: 0.2160\n",
      "Epoch [95/100], Step [40/143], Loss: 0.0517\n",
      "Epoch [95/100], Step [60/143], Loss: 0.0806\n",
      "Epoch [95/100], Step [80/143], Loss: 0.0584\n",
      "Epoch [95/100], Step [100/143], Loss: 0.0667\n",
      "Epoch [95/100], Step [120/143], Loss: 0.0666\n",
      "Epoch [95/100], Step [140/143], Loss: 0.2065\n",
      "Epoch [96/100], Step [20/143], Loss: 0.2150\n",
      "Epoch [96/100], Step [40/143], Loss: 0.0509\n",
      "Epoch [96/100], Step [60/143], Loss: 0.0792\n",
      "Epoch [96/100], Step [80/143], Loss: 0.0574\n",
      "Epoch [96/100], Step [100/143], Loss: 0.0656\n",
      "Epoch [96/100], Step [120/143], Loss: 0.0655\n",
      "Epoch [96/100], Step [140/143], Loss: 0.2056\n",
      "Epoch [97/100], Step [20/143], Loss: 0.2141\n",
      "Epoch [97/100], Step [40/143], Loss: 0.0501\n",
      "Epoch [97/100], Step [60/143], Loss: 0.0779\n",
      "Epoch [97/100], Step [80/143], Loss: 0.0564\n",
      "Epoch [97/100], Step [100/143], Loss: 0.0646\n",
      "Epoch [97/100], Step [120/143], Loss: 0.0644\n",
      "Epoch [97/100], Step [140/143], Loss: 0.2048\n",
      "Epoch [98/100], Step [20/143], Loss: 0.2132\n",
      "Epoch [98/100], Step [40/143], Loss: 0.0493\n",
      "Epoch [98/100], Step [60/143], Loss: 0.0766\n",
      "Epoch [98/100], Step [80/143], Loss: 0.0555\n",
      "Epoch [98/100], Step [100/143], Loss: 0.0636\n",
      "Epoch [98/100], Step [120/143], Loss: 0.0634\n",
      "Epoch [98/100], Step [140/143], Loss: 0.2040\n",
      "Epoch [99/100], Step [20/143], Loss: 0.2123\n",
      "Epoch [99/100], Step [40/143], Loss: 0.0485\n",
      "Epoch [99/100], Step [60/143], Loss: 0.0753\n",
      "Epoch [99/100], Step [80/143], Loss: 0.0546\n",
      "Epoch [99/100], Step [100/143], Loss: 0.0626\n",
      "Epoch [99/100], Step [120/143], Loss: 0.0623\n",
      "Epoch [99/100], Step [140/143], Loss: 0.2033\n",
      "Epoch [100/100], Step [20/143], Loss: 0.2115\n",
      "Epoch [100/100], Step [40/143], Loss: 0.0478\n",
      "Epoch [100/100], Step [60/143], Loss: 0.0741\n",
      "Epoch [100/100], Step [80/143], Loss: 0.0537\n",
      "Epoch [100/100], Step [100/143], Loss: 0.0617\n",
      "Epoch [100/100], Step [120/143], Loss: 0.0614\n",
      "Epoch [100/100], Step [140/143], Loss: 0.2025\n"
     ]
    }
   ],
   "execution_count": 21
  },
  {
   "cell_type": "code",
   "id": "8a4e36b033918def",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T00:36:13.199073Z",
     "start_time": "2025-07-06T00:36:13.192975Z"
    }
   },
   "source": [
    "model.eval()\n",
    "\n",
    "def generate_text(content, steps, temperature=0.8):\n",
    "\n",
    "    words = [word for word in content]\n",
    "\n",
    "    hidden = None\n",
    "    for _ in range(steps):\n",
    "        # inputs = [word_to_idx[word] for word in words[-SEQ_LENGTH:]] # 取输入的最后SEQ_LENGTH个词的索引\n",
    "        inputs = [word_to_idx[word] for word in words[-1:]] # 取输入的最后SEQ_LENGTH个词的索引\n",
    "        inputs = torch.LongTensor(inputs)\n",
    "\n",
    "        # 输入形状调整\n",
    "        inputs = inputs.view(1, -1)  # (1, seq_len)\n",
    "\n",
    "        # 前向传播\n",
    "        with torch.no_grad():\n",
    "            # output中包含了每个时间步的输出，推理预测时，只需要取最后一个时间步的输出即可，比如输入“鹰击”，相当于有两个时间步，但是我们只需要第2个时间步的输出，而输出是词汇表中各个词的概率\n",
    "            # 而hidden表示隐藏层，在推理预测时，因为我们会连续预测，外层有一个for循环，所以hidden需要保存，以便下一次循环使用\n",
    "            outputs, hidden = model(inputs, hidden)\n",
    "            last_output = outputs[0, -1, :]  # 取最后一个时间步的输出\n",
    "\n",
    "        # 应用温度采样\n",
    "        # last_output / temperature，相当于将last_output缩小，比如[8,2,2] / 2 = [4,1,1]，使得三个选项对应的数字之间的差别变小了\n",
    "        # 当然如果temperature<1，那么就是放大差别，比如[8,2,2] / 0.5 = [16,4,4]\n",
    "        # probs为做了softmax之后的概率\n",
    "        probs = torch.softmax(last_output / temperature, dim=-1)\n",
    "\n",
    "        # 多项式采样，probs是一个概率，比如是[0.3,0.2,0.5]，那么就是从0,1,2中随机选一个，那么2被选中的概率就是50%\n",
    "        # 谁的概率大，随被采样的概率就大\n",
    "        result_idx = torch.multinomial(probs, 1).item()\n",
    "\n",
    "        #  取概率最大的索引\n",
    "        # result_idx = torch.argmax(probs).item()\n",
    "\n",
    "\n",
    "        # 更新输入序列\n",
    "        words.append(idx_to_word[result_idx])\n",
    "\n",
    "    return ''.join(words)\n",
    "\n",
    "\n",
    "# 20表示预测20次, temperature越大，越随机\n",
    "print(generate_text(\"鹰\", 20, temperature=0.1))"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "鹰击长空，鱼翔浅底，万类霜天竞自由。\n",
      "怅寥\n"
     ]
    }
   ],
   "execution_count": 22
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-06T00:36:13.212174Z",
     "start_time": "2025-07-06T00:36:13.208441Z"
    }
   },
   "cell_type": "code",
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "\n",
    "# 创建带梯度的参数（通常通过模型训练获得）\n",
    "param = nn.Parameter(torch.randn(2))\n",
    "param.grad = torch.tensor([3.0, 4.0], dtype=torch.float) # L2\n",
    "\n",
    "norm = torch.linalg.norm(param.grad)\n",
    "print(norm)\n",
    "\n",
    "nn.utils.clip_grad_norm_([param], max_norm=1.0)\n",
    "\n",
    "print(param.grad)\n",
    "\n",
    "norm = torch.linalg.norm(param.grad)\n",
    "print(norm)"
   ],
   "id": "89a9dffa1cb4d614",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor(5.)\n",
      "tensor([0.6000, 0.8000])\n",
      "tensor(1.0000)\n"
     ]
    }
   ],
   "execution_count": 23
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.18"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
