{
 "cells": [
  {
   "cell_type": "code",
   "id": "92c9da5866bdcf7",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-01T13:09:04.943483Z",
     "start_time": "2025-07-01T13:09:04.936932Z"
    }
   },
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "from torch.utils.data import Dataset, DataLoader\n",
    "import numpy as np\n",
    "\n",
    "# 示例文本数据，一首诗\n",
    "text = \"\"\"\n",
    "独立寒秋，湘江北去，橘子洲头。\n",
    "看万山红遍，层林尽染；漫江碧透，百舸争流。\n",
    "鹰击长空，鱼翔浅底，万类霜天竞自由。\n",
    "怅寥廓，问苍茫大地，谁主沉浮？\n",
    "携来百侣曾游，忆往昔峥嵘岁月稠。\n",
    "恰同学少年，风华正茂；书生意气，挥斥方遒。\n",
    "指点江山，激扬文字，粪土当年万户侯。\n",
    "曾记否，到中流击水，浪遏飞舟？\n",
    "\"\"\"\n",
    "\n",
    "# 创建词汇表\n",
    "words = set(text)\n",
    "vocab_size = len(words)\n",
    "word_to_idx = {word: i for i, word in enumerate(words)}\n",
    "idx_to_word = {i: word for i, word in enumerate(words)}\n",
    "\n",
    "print(words)"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "{'鱼', '昔', '看', '子', '寥', '？', '秋', '侯', '字', '生', '记', '到', '少', '舟', '大', '往', '空', '嵘', '同', '当', '峥', '稠', '红', '粪', '鹰', '竞', '底', '流', '橘', '茂', '气', '林', '山', '忆', '类', '户', '点', '；', '洲', '浅', '漫', '遒', '透', '年', '湘', '主', '由', '万', '地', '浮', '挥', '。', '游', '击', '侣', '长', '浪', '携', '争', '北', '怅', '天', '激', '舸', '否', '正', '霜', '岁', '江', '寒', '廓', '自', '风', '学', '水', '层', '翔', '书', '碧', '曾', '沉', '头', '遏', '意', '立', '飞', '月', '，', '来', '方', '指', '中', '文', '百', '恰', '染', '\\n', '谁', '茫', '扬', '遍', '华', '尽', '独', '问', '斥', '土', '去', '苍'}\n"
     ]
    }
   ],
   "execution_count": 11
  },
  {
   "cell_type": "code",
   "id": "91280acf83012c57",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-01T13:09:04.955979Z",
     "start_time": "2025-07-01T13:09:04.950479Z"
    }
   },
   "source": [
    "\n",
    "# 超参数设置\n",
    "SEQ_LENGTH = 10  # 输入序列长度\n",
    "BATCH_SIZE = 1\n",
    "HIDDEN_SIZE = 128\n",
    "\n",
    "\n",
    "# 创建训练数据\n",
    "class TextDataset(Dataset):\n",
    "    def __init__(self, text, seq_length):\n",
    "        self.text = text\n",
    "        self.seq_length = seq_length\n",
    "\n",
    "        # 转换为索引序列\n",
    "        self.data = [word_to_idx[ch] for ch in text]\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.data) - self.seq_length\n",
    "\n",
    "    def __getitem__(self, idx):\n",
    "        # 文本里的某个序列\n",
    "        input_seq = self.data[idx:idx + self.seq_length]\n",
    "\n",
    "        # 目标序列\n",
    "        target_seq = self.data[idx + 1:idx + self.seq_length + 1]\n",
    "\n",
    "        # 相当于，假如语料为abcdefg, input_seq=abc, target_seq=bcd\n",
    "\n",
    "        return torch.LongTensor(input_seq), torch.LongTensor(target_seq)\n",
    "\n",
    "\n",
    "dataset = TextDataset(text, SEQ_LENGTH)\n",
    "dataloader = DataLoader(dataset, batch_size=BATCH_SIZE, shuffle=False)\n",
    "print(dataset.data)"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[96, 103, 84, 69, 6, 87, 44, 68, 59, 107, 87, 28, 3, 38, 81, 51, 96, 2, 47, 32, 22, 100, 87, 75, 31, 102, 95, 37, 40, 68, 78, 42, 87, 93, 63, 58, 27, 51, 96, 24, 53, 55, 16, 87, 0, 76, 39, 26, 87, 47, 34, 66, 61, 25, 71, 46, 51, 96, 60, 4, 70, 87, 104, 108, 98, 14, 48, 87, 97, 45, 80, 49, 5, 96, 57, 88, 93, 54, 79, 52, 87, 33, 15, 1, 20, 17, 67, 86, 21, 51, 96, 94, 18, 73, 12, 43, 87, 72, 101, 65, 29, 37, 77, 9, 83, 30, 87, 50, 105, 89, 41, 51, 96, 90, 36, 68, 32, 87, 62, 99, 92, 8, 87, 23, 106, 19, 43, 47, 35, 7, 51, 96, 79, 10, 64, 87, 11, 91, 27, 53, 74, 87, 56, 82, 85, 13, 5, 96]\n"
     ]
    }
   ],
   "execution_count": 12
  },
  {
   "cell_type": "code",
   "id": "88cba6d2660e76b2",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-01T13:09:04.992581Z",
     "start_time": "2025-07-01T13:09:04.973871Z"
    }
   },
   "source": [
    "for input_seq, target_seq in dataloader:\n",
    "    print(input_seq)\n",
    "    print(target_seq)"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[ 96, 103,  84,  69,   6,  87,  44,  68,  59, 107]])\n",
      "tensor([[103,  84,  69,   6,  87,  44,  68,  59, 107,  87]])\n",
      "tensor([[103,  84,  69,   6,  87,  44,  68,  59, 107,  87]])\n",
      "tensor([[ 84,  69,   6,  87,  44,  68,  59, 107,  87,  28]])\n",
      "tensor([[ 84,  69,   6,  87,  44,  68,  59, 107,  87,  28]])\n",
      "tensor([[ 69,   6,  87,  44,  68,  59, 107,  87,  28,   3]])\n",
      "tensor([[ 69,   6,  87,  44,  68,  59, 107,  87,  28,   3]])\n",
      "tensor([[  6,  87,  44,  68,  59, 107,  87,  28,   3,  38]])\n",
      "tensor([[  6,  87,  44,  68,  59, 107,  87,  28,   3,  38]])\n",
      "tensor([[ 87,  44,  68,  59, 107,  87,  28,   3,  38,  81]])\n",
      "tensor([[ 87,  44,  68,  59, 107,  87,  28,   3,  38,  81]])\n",
      "tensor([[ 44,  68,  59, 107,  87,  28,   3,  38,  81,  51]])\n",
      "tensor([[ 44,  68,  59, 107,  87,  28,   3,  38,  81,  51]])\n",
      "tensor([[ 68,  59, 107,  87,  28,   3,  38,  81,  51,  96]])\n",
      "tensor([[ 68,  59, 107,  87,  28,   3,  38,  81,  51,  96]])\n",
      "tensor([[ 59, 107,  87,  28,   3,  38,  81,  51,  96,   2]])\n",
      "tensor([[ 59, 107,  87,  28,   3,  38,  81,  51,  96,   2]])\n",
      "tensor([[107,  87,  28,   3,  38,  81,  51,  96,   2,  47]])\n",
      "tensor([[107,  87,  28,   3,  38,  81,  51,  96,   2,  47]])\n",
      "tensor([[87, 28,  3, 38, 81, 51, 96,  2, 47, 32]])\n",
      "tensor([[87, 28,  3, 38, 81, 51, 96,  2, 47, 32]])\n",
      "tensor([[28,  3, 38, 81, 51, 96,  2, 47, 32, 22]])\n",
      "tensor([[28,  3, 38, 81, 51, 96,  2, 47, 32, 22]])\n",
      "tensor([[  3,  38,  81,  51,  96,   2,  47,  32,  22, 100]])\n",
      "tensor([[  3,  38,  81,  51,  96,   2,  47,  32,  22, 100]])\n",
      "tensor([[ 38,  81,  51,  96,   2,  47,  32,  22, 100,  87]])\n",
      "tensor([[ 38,  81,  51,  96,   2,  47,  32,  22, 100,  87]])\n",
      "tensor([[ 81,  51,  96,   2,  47,  32,  22, 100,  87,  75]])\n",
      "tensor([[ 81,  51,  96,   2,  47,  32,  22, 100,  87,  75]])\n",
      "tensor([[ 51,  96,   2,  47,  32,  22, 100,  87,  75,  31]])\n",
      "tensor([[ 51,  96,   2,  47,  32,  22, 100,  87,  75,  31]])\n",
      "tensor([[ 96,   2,  47,  32,  22, 100,  87,  75,  31, 102]])\n",
      "tensor([[ 96,   2,  47,  32,  22, 100,  87,  75,  31, 102]])\n",
      "tensor([[  2,  47,  32,  22, 100,  87,  75,  31, 102,  95]])\n",
      "tensor([[  2,  47,  32,  22, 100,  87,  75,  31, 102,  95]])\n",
      "tensor([[ 47,  32,  22, 100,  87,  75,  31, 102,  95,  37]])\n",
      "tensor([[ 47,  32,  22, 100,  87,  75,  31, 102,  95,  37]])\n",
      "tensor([[ 32,  22, 100,  87,  75,  31, 102,  95,  37,  40]])\n",
      "tensor([[ 32,  22, 100,  87,  75,  31, 102,  95,  37,  40]])\n",
      "tensor([[ 22, 100,  87,  75,  31, 102,  95,  37,  40,  68]])\n",
      "tensor([[ 22, 100,  87,  75,  31, 102,  95,  37,  40,  68]])\n",
      "tensor([[100,  87,  75,  31, 102,  95,  37,  40,  68,  78]])\n",
      "tensor([[100,  87,  75,  31, 102,  95,  37,  40,  68,  78]])\n",
      "tensor([[ 87,  75,  31, 102,  95,  37,  40,  68,  78,  42]])\n",
      "tensor([[ 87,  75,  31, 102,  95,  37,  40,  68,  78,  42]])\n",
      "tensor([[ 75,  31, 102,  95,  37,  40,  68,  78,  42,  87]])\n",
      "tensor([[ 75,  31, 102,  95,  37,  40,  68,  78,  42,  87]])\n",
      "tensor([[ 31, 102,  95,  37,  40,  68,  78,  42,  87,  93]])\n",
      "tensor([[ 31, 102,  95,  37,  40,  68,  78,  42,  87,  93]])\n",
      "tensor([[102,  95,  37,  40,  68,  78,  42,  87,  93,  63]])\n",
      "tensor([[102,  95,  37,  40,  68,  78,  42,  87,  93,  63]])\n",
      "tensor([[95, 37, 40, 68, 78, 42, 87, 93, 63, 58]])\n",
      "tensor([[95, 37, 40, 68, 78, 42, 87, 93, 63, 58]])\n",
      "tensor([[37, 40, 68, 78, 42, 87, 93, 63, 58, 27]])\n",
      "tensor([[37, 40, 68, 78, 42, 87, 93, 63, 58, 27]])\n",
      "tensor([[40, 68, 78, 42, 87, 93, 63, 58, 27, 51]])\n",
      "tensor([[40, 68, 78, 42, 87, 93, 63, 58, 27, 51]])\n",
      "tensor([[68, 78, 42, 87, 93, 63, 58, 27, 51, 96]])\n",
      "tensor([[68, 78, 42, 87, 93, 63, 58, 27, 51, 96]])\n",
      "tensor([[78, 42, 87, 93, 63, 58, 27, 51, 96, 24]])\n",
      "tensor([[78, 42, 87, 93, 63, 58, 27, 51, 96, 24]])\n",
      "tensor([[42, 87, 93, 63, 58, 27, 51, 96, 24, 53]])\n",
      "tensor([[42, 87, 93, 63, 58, 27, 51, 96, 24, 53]])\n",
      "tensor([[87, 93, 63, 58, 27, 51, 96, 24, 53, 55]])\n",
      "tensor([[87, 93, 63, 58, 27, 51, 96, 24, 53, 55]])\n",
      "tensor([[93, 63, 58, 27, 51, 96, 24, 53, 55, 16]])\n",
      "tensor([[93, 63, 58, 27, 51, 96, 24, 53, 55, 16]])\n",
      "tensor([[63, 58, 27, 51, 96, 24, 53, 55, 16, 87]])\n",
      "tensor([[63, 58, 27, 51, 96, 24, 53, 55, 16, 87]])\n",
      "tensor([[58, 27, 51, 96, 24, 53, 55, 16, 87,  0]])\n",
      "tensor([[58, 27, 51, 96, 24, 53, 55, 16, 87,  0]])\n",
      "tensor([[27, 51, 96, 24, 53, 55, 16, 87,  0, 76]])\n",
      "tensor([[27, 51, 96, 24, 53, 55, 16, 87,  0, 76]])\n",
      "tensor([[51, 96, 24, 53, 55, 16, 87,  0, 76, 39]])\n",
      "tensor([[51, 96, 24, 53, 55, 16, 87,  0, 76, 39]])\n",
      "tensor([[96, 24, 53, 55, 16, 87,  0, 76, 39, 26]])\n",
      "tensor([[96, 24, 53, 55, 16, 87,  0, 76, 39, 26]])\n",
      "tensor([[24, 53, 55, 16, 87,  0, 76, 39, 26, 87]])\n",
      "tensor([[24, 53, 55, 16, 87,  0, 76, 39, 26, 87]])\n",
      "tensor([[53, 55, 16, 87,  0, 76, 39, 26, 87, 47]])\n",
      "tensor([[53, 55, 16, 87,  0, 76, 39, 26, 87, 47]])\n",
      "tensor([[55, 16, 87,  0, 76, 39, 26, 87, 47, 34]])\n",
      "tensor([[55, 16, 87,  0, 76, 39, 26, 87, 47, 34]])\n",
      "tensor([[16, 87,  0, 76, 39, 26, 87, 47, 34, 66]])\n",
      "tensor([[16, 87,  0, 76, 39, 26, 87, 47, 34, 66]])\n",
      "tensor([[87,  0, 76, 39, 26, 87, 47, 34, 66, 61]])\n",
      "tensor([[87,  0, 76, 39, 26, 87, 47, 34, 66, 61]])\n",
      "tensor([[ 0, 76, 39, 26, 87, 47, 34, 66, 61, 25]])\n",
      "tensor([[ 0, 76, 39, 26, 87, 47, 34, 66, 61, 25]])\n",
      "tensor([[76, 39, 26, 87, 47, 34, 66, 61, 25, 71]])\n",
      "tensor([[76, 39, 26, 87, 47, 34, 66, 61, 25, 71]])\n",
      "tensor([[39, 26, 87, 47, 34, 66, 61, 25, 71, 46]])\n",
      "tensor([[39, 26, 87, 47, 34, 66, 61, 25, 71, 46]])\n",
      "tensor([[26, 87, 47, 34, 66, 61, 25, 71, 46, 51]])\n",
      "tensor([[26, 87, 47, 34, 66, 61, 25, 71, 46, 51]])\n",
      "tensor([[87, 47, 34, 66, 61, 25, 71, 46, 51, 96]])\n",
      "tensor([[87, 47, 34, 66, 61, 25, 71, 46, 51, 96]])\n",
      "tensor([[47, 34, 66, 61, 25, 71, 46, 51, 96, 60]])\n",
      "tensor([[47, 34, 66, 61, 25, 71, 46, 51, 96, 60]])\n",
      "tensor([[34, 66, 61, 25, 71, 46, 51, 96, 60,  4]])\n",
      "tensor([[34, 66, 61, 25, 71, 46, 51, 96, 60,  4]])\n",
      "tensor([[66, 61, 25, 71, 46, 51, 96, 60,  4, 70]])\n",
      "tensor([[66, 61, 25, 71, 46, 51, 96, 60,  4, 70]])\n",
      "tensor([[61, 25, 71, 46, 51, 96, 60,  4, 70, 87]])\n",
      "tensor([[61, 25, 71, 46, 51, 96, 60,  4, 70, 87]])\n",
      "tensor([[ 25,  71,  46,  51,  96,  60,   4,  70,  87, 104]])\n",
      "tensor([[ 25,  71,  46,  51,  96,  60,   4,  70,  87, 104]])\n",
      "tensor([[ 71,  46,  51,  96,  60,   4,  70,  87, 104, 108]])\n",
      "tensor([[ 71,  46,  51,  96,  60,   4,  70,  87, 104, 108]])\n",
      "tensor([[ 46,  51,  96,  60,   4,  70,  87, 104, 108,  98]])\n",
      "tensor([[ 46,  51,  96,  60,   4,  70,  87, 104, 108,  98]])\n",
      "tensor([[ 51,  96,  60,   4,  70,  87, 104, 108,  98,  14]])\n",
      "tensor([[ 51,  96,  60,   4,  70,  87, 104, 108,  98,  14]])\n",
      "tensor([[ 96,  60,   4,  70,  87, 104, 108,  98,  14,  48]])\n",
      "tensor([[ 96,  60,   4,  70,  87, 104, 108,  98,  14,  48]])\n",
      "tensor([[ 60,   4,  70,  87, 104, 108,  98,  14,  48,  87]])\n",
      "tensor([[ 60,   4,  70,  87, 104, 108,  98,  14,  48,  87]])\n",
      "tensor([[  4,  70,  87, 104, 108,  98,  14,  48,  87,  97]])\n",
      "tensor([[  4,  70,  87, 104, 108,  98,  14,  48,  87,  97]])\n",
      "tensor([[ 70,  87, 104, 108,  98,  14,  48,  87,  97,  45]])\n",
      "tensor([[ 70,  87, 104, 108,  98,  14,  48,  87,  97,  45]])\n",
      "tensor([[ 87, 104, 108,  98,  14,  48,  87,  97,  45,  80]])\n",
      "tensor([[ 87, 104, 108,  98,  14,  48,  87,  97,  45,  80]])\n",
      "tensor([[104, 108,  98,  14,  48,  87,  97,  45,  80,  49]])\n",
      "tensor([[104, 108,  98,  14,  48,  87,  97,  45,  80,  49]])\n",
      "tensor([[108,  98,  14,  48,  87,  97,  45,  80,  49,   5]])\n",
      "tensor([[108,  98,  14,  48,  87,  97,  45,  80,  49,   5]])\n",
      "tensor([[98, 14, 48, 87, 97, 45, 80, 49,  5, 96]])\n",
      "tensor([[98, 14, 48, 87, 97, 45, 80, 49,  5, 96]])\n",
      "tensor([[14, 48, 87, 97, 45, 80, 49,  5, 96, 57]])\n",
      "tensor([[14, 48, 87, 97, 45, 80, 49,  5, 96, 57]])\n",
      "tensor([[48, 87, 97, 45, 80, 49,  5, 96, 57, 88]])\n",
      "tensor([[48, 87, 97, 45, 80, 49,  5, 96, 57, 88]])\n",
      "tensor([[87, 97, 45, 80, 49,  5, 96, 57, 88, 93]])\n",
      "tensor([[87, 97, 45, 80, 49,  5, 96, 57, 88, 93]])\n",
      "tensor([[97, 45, 80, 49,  5, 96, 57, 88, 93, 54]])\n",
      "tensor([[97, 45, 80, 49,  5, 96, 57, 88, 93, 54]])\n",
      "tensor([[45, 80, 49,  5, 96, 57, 88, 93, 54, 79]])\n",
      "tensor([[45, 80, 49,  5, 96, 57, 88, 93, 54, 79]])\n",
      "tensor([[80, 49,  5, 96, 57, 88, 93, 54, 79, 52]])\n",
      "tensor([[80, 49,  5, 96, 57, 88, 93, 54, 79, 52]])\n",
      "tensor([[49,  5, 96, 57, 88, 93, 54, 79, 52, 87]])\n",
      "tensor([[49,  5, 96, 57, 88, 93, 54, 79, 52, 87]])\n",
      "tensor([[ 5, 96, 57, 88, 93, 54, 79, 52, 87, 33]])\n",
      "tensor([[ 5, 96, 57, 88, 93, 54, 79, 52, 87, 33]])\n",
      "tensor([[96, 57, 88, 93, 54, 79, 52, 87, 33, 15]])\n",
      "tensor([[96, 57, 88, 93, 54, 79, 52, 87, 33, 15]])\n",
      "tensor([[57, 88, 93, 54, 79, 52, 87, 33, 15,  1]])\n",
      "tensor([[57, 88, 93, 54, 79, 52, 87, 33, 15,  1]])\n",
      "tensor([[88, 93, 54, 79, 52, 87, 33, 15,  1, 20]])\n",
      "tensor([[88, 93, 54, 79, 52, 87, 33, 15,  1, 20]])\n",
      "tensor([[93, 54, 79, 52, 87, 33, 15,  1, 20, 17]])\n",
      "tensor([[93, 54, 79, 52, 87, 33, 15,  1, 20, 17]])\n",
      "tensor([[54, 79, 52, 87, 33, 15,  1, 20, 17, 67]])\n",
      "tensor([[54, 79, 52, 87, 33, 15,  1, 20, 17, 67]])\n",
      "tensor([[79, 52, 87, 33, 15,  1, 20, 17, 67, 86]])\n",
      "tensor([[79, 52, 87, 33, 15,  1, 20, 17, 67, 86]])\n",
      "tensor([[52, 87, 33, 15,  1, 20, 17, 67, 86, 21]])\n",
      "tensor([[52, 87, 33, 15,  1, 20, 17, 67, 86, 21]])\n",
      "tensor([[87, 33, 15,  1, 20, 17, 67, 86, 21, 51]])\n",
      "tensor([[87, 33, 15,  1, 20, 17, 67, 86, 21, 51]])\n",
      "tensor([[33, 15,  1, 20, 17, 67, 86, 21, 51, 96]])\n",
      "tensor([[33, 15,  1, 20, 17, 67, 86, 21, 51, 96]])\n",
      "tensor([[15,  1, 20, 17, 67, 86, 21, 51, 96, 94]])\n",
      "tensor([[15,  1, 20, 17, 67, 86, 21, 51, 96, 94]])\n",
      "tensor([[ 1, 20, 17, 67, 86, 21, 51, 96, 94, 18]])\n",
      "tensor([[ 1, 20, 17, 67, 86, 21, 51, 96, 94, 18]])\n",
      "tensor([[20, 17, 67, 86, 21, 51, 96, 94, 18, 73]])\n",
      "tensor([[20, 17, 67, 86, 21, 51, 96, 94, 18, 73]])\n",
      "tensor([[17, 67, 86, 21, 51, 96, 94, 18, 73, 12]])\n",
      "tensor([[17, 67, 86, 21, 51, 96, 94, 18, 73, 12]])\n",
      "tensor([[67, 86, 21, 51, 96, 94, 18, 73, 12, 43]])\n",
      "tensor([[67, 86, 21, 51, 96, 94, 18, 73, 12, 43]])\n",
      "tensor([[86, 21, 51, 96, 94, 18, 73, 12, 43, 87]])\n",
      "tensor([[86, 21, 51, 96, 94, 18, 73, 12, 43, 87]])\n",
      "tensor([[21, 51, 96, 94, 18, 73, 12, 43, 87, 72]])\n",
      "tensor([[21, 51, 96, 94, 18, 73, 12, 43, 87, 72]])\n",
      "tensor([[ 51,  96,  94,  18,  73,  12,  43,  87,  72, 101]])\n",
      "tensor([[ 51,  96,  94,  18,  73,  12,  43,  87,  72, 101]])\n",
      "tensor([[ 96,  94,  18,  73,  12,  43,  87,  72, 101,  65]])\n",
      "tensor([[ 96,  94,  18,  73,  12,  43,  87,  72, 101,  65]])\n",
      "tensor([[ 94,  18,  73,  12,  43,  87,  72, 101,  65,  29]])\n",
      "tensor([[ 94,  18,  73,  12,  43,  87,  72, 101,  65,  29]])\n",
      "tensor([[ 18,  73,  12,  43,  87,  72, 101,  65,  29,  37]])\n",
      "tensor([[ 18,  73,  12,  43,  87,  72, 101,  65,  29,  37]])\n",
      "tensor([[ 73,  12,  43,  87,  72, 101,  65,  29,  37,  77]])\n",
      "tensor([[ 73,  12,  43,  87,  72, 101,  65,  29,  37,  77]])\n",
      "tensor([[ 12,  43,  87,  72, 101,  65,  29,  37,  77,   9]])\n",
      "tensor([[ 12,  43,  87,  72, 101,  65,  29,  37,  77,   9]])\n",
      "tensor([[ 43,  87,  72, 101,  65,  29,  37,  77,   9,  83]])\n",
      "tensor([[ 43,  87,  72, 101,  65,  29,  37,  77,   9,  83]])\n",
      "tensor([[ 87,  72, 101,  65,  29,  37,  77,   9,  83,  30]])\n",
      "tensor([[ 87,  72, 101,  65,  29,  37,  77,   9,  83,  30]])\n",
      "tensor([[ 72, 101,  65,  29,  37,  77,   9,  83,  30,  87]])\n",
      "tensor([[ 72, 101,  65,  29,  37,  77,   9,  83,  30,  87]])\n",
      "tensor([[101,  65,  29,  37,  77,   9,  83,  30,  87,  50]])\n",
      "tensor([[101,  65,  29,  37,  77,   9,  83,  30,  87,  50]])\n",
      "tensor([[ 65,  29,  37,  77,   9,  83,  30,  87,  50, 105]])\n",
      "tensor([[ 65,  29,  37,  77,   9,  83,  30,  87,  50, 105]])\n",
      "tensor([[ 29,  37,  77,   9,  83,  30,  87,  50, 105,  89]])\n",
      "tensor([[ 29,  37,  77,   9,  83,  30,  87,  50, 105,  89]])\n",
      "tensor([[ 37,  77,   9,  83,  30,  87,  50, 105,  89,  41]])\n",
      "tensor([[ 37,  77,   9,  83,  30,  87,  50, 105,  89,  41]])\n",
      "tensor([[ 77,   9,  83,  30,  87,  50, 105,  89,  41,  51]])\n",
      "tensor([[ 77,   9,  83,  30,  87,  50, 105,  89,  41,  51]])\n",
      "tensor([[  9,  83,  30,  87,  50, 105,  89,  41,  51,  96]])\n",
      "tensor([[  9,  83,  30,  87,  50, 105,  89,  41,  51,  96]])\n",
      "tensor([[ 83,  30,  87,  50, 105,  89,  41,  51,  96,  90]])\n",
      "tensor([[ 83,  30,  87,  50, 105,  89,  41,  51,  96,  90]])\n",
      "tensor([[ 30,  87,  50, 105,  89,  41,  51,  96,  90,  36]])\n",
      "tensor([[ 30,  87,  50, 105,  89,  41,  51,  96,  90,  36]])\n",
      "tensor([[ 87,  50, 105,  89,  41,  51,  96,  90,  36,  68]])\n",
      "tensor([[ 87,  50, 105,  89,  41,  51,  96,  90,  36,  68]])\n",
      "tensor([[ 50, 105,  89,  41,  51,  96,  90,  36,  68,  32]])\n",
      "tensor([[ 50, 105,  89,  41,  51,  96,  90,  36,  68,  32]])\n",
      "tensor([[105,  89,  41,  51,  96,  90,  36,  68,  32,  87]])\n",
      "tensor([[105,  89,  41,  51,  96,  90,  36,  68,  32,  87]])\n",
      "tensor([[89, 41, 51, 96, 90, 36, 68, 32, 87, 62]])\n",
      "tensor([[89, 41, 51, 96, 90, 36, 68, 32, 87, 62]])\n",
      "tensor([[41, 51, 96, 90, 36, 68, 32, 87, 62, 99]])\n",
      "tensor([[41, 51, 96, 90, 36, 68, 32, 87, 62, 99]])\n",
      "tensor([[51, 96, 90, 36, 68, 32, 87, 62, 99, 92]])\n",
      "tensor([[51, 96, 90, 36, 68, 32, 87, 62, 99, 92]])\n",
      "tensor([[96, 90, 36, 68, 32, 87, 62, 99, 92,  8]])\n",
      "tensor([[96, 90, 36, 68, 32, 87, 62, 99, 92,  8]])\n",
      "tensor([[90, 36, 68, 32, 87, 62, 99, 92,  8, 87]])\n",
      "tensor([[90, 36, 68, 32, 87, 62, 99, 92,  8, 87]])\n",
      "tensor([[36, 68, 32, 87, 62, 99, 92,  8, 87, 23]])\n",
      "tensor([[36, 68, 32, 87, 62, 99, 92,  8, 87, 23]])\n",
      "tensor([[ 68,  32,  87,  62,  99,  92,   8,  87,  23, 106]])\n",
      "tensor([[ 68,  32,  87,  62,  99,  92,   8,  87,  23, 106]])\n",
      "tensor([[ 32,  87,  62,  99,  92,   8,  87,  23, 106,  19]])\n",
      "tensor([[ 32,  87,  62,  99,  92,   8,  87,  23, 106,  19]])\n",
      "tensor([[ 87,  62,  99,  92,   8,  87,  23, 106,  19,  43]])\n",
      "tensor([[ 87,  62,  99,  92,   8,  87,  23, 106,  19,  43]])\n",
      "tensor([[ 62,  99,  92,   8,  87,  23, 106,  19,  43,  47]])\n",
      "tensor([[ 62,  99,  92,   8,  87,  23, 106,  19,  43,  47]])\n",
      "tensor([[ 99,  92,   8,  87,  23, 106,  19,  43,  47,  35]])\n",
      "tensor([[ 99,  92,   8,  87,  23, 106,  19,  43,  47,  35]])\n",
      "tensor([[ 92,   8,  87,  23, 106,  19,  43,  47,  35,   7]])\n",
      "tensor([[ 92,   8,  87,  23, 106,  19,  43,  47,  35,   7]])\n",
      "tensor([[  8,  87,  23, 106,  19,  43,  47,  35,   7,  51]])\n",
      "tensor([[  8,  87,  23, 106,  19,  43,  47,  35,   7,  51]])\n",
      "tensor([[ 87,  23, 106,  19,  43,  47,  35,   7,  51,  96]])\n",
      "tensor([[ 87,  23, 106,  19,  43,  47,  35,   7,  51,  96]])\n",
      "tensor([[ 23, 106,  19,  43,  47,  35,   7,  51,  96,  79]])\n",
      "tensor([[ 23, 106,  19,  43,  47,  35,   7,  51,  96,  79]])\n",
      "tensor([[106,  19,  43,  47,  35,   7,  51,  96,  79,  10]])\n",
      "tensor([[106,  19,  43,  47,  35,   7,  51,  96,  79,  10]])\n",
      "tensor([[19, 43, 47, 35,  7, 51, 96, 79, 10, 64]])\n",
      "tensor([[19, 43, 47, 35,  7, 51, 96, 79, 10, 64]])\n",
      "tensor([[43, 47, 35,  7, 51, 96, 79, 10, 64, 87]])\n",
      "tensor([[43, 47, 35,  7, 51, 96, 79, 10, 64, 87]])\n",
      "tensor([[47, 35,  7, 51, 96, 79, 10, 64, 87, 11]])\n",
      "tensor([[47, 35,  7, 51, 96, 79, 10, 64, 87, 11]])\n",
      "tensor([[35,  7, 51, 96, 79, 10, 64, 87, 11, 91]])\n",
      "tensor([[35,  7, 51, 96, 79, 10, 64, 87, 11, 91]])\n",
      "tensor([[ 7, 51, 96, 79, 10, 64, 87, 11, 91, 27]])\n",
      "tensor([[ 7, 51, 96, 79, 10, 64, 87, 11, 91, 27]])\n",
      "tensor([[51, 96, 79, 10, 64, 87, 11, 91, 27, 53]])\n",
      "tensor([[51, 96, 79, 10, 64, 87, 11, 91, 27, 53]])\n",
      "tensor([[96, 79, 10, 64, 87, 11, 91, 27, 53, 74]])\n",
      "tensor([[96, 79, 10, 64, 87, 11, 91, 27, 53, 74]])\n",
      "tensor([[79, 10, 64, 87, 11, 91, 27, 53, 74, 87]])\n",
      "tensor([[79, 10, 64, 87, 11, 91, 27, 53, 74, 87]])\n",
      "tensor([[10, 64, 87, 11, 91, 27, 53, 74, 87, 56]])\n",
      "tensor([[10, 64, 87, 11, 91, 27, 53, 74, 87, 56]])\n",
      "tensor([[64, 87, 11, 91, 27, 53, 74, 87, 56, 82]])\n",
      "tensor([[64, 87, 11, 91, 27, 53, 74, 87, 56, 82]])\n",
      "tensor([[87, 11, 91, 27, 53, 74, 87, 56, 82, 85]])\n",
      "tensor([[87, 11, 91, 27, 53, 74, 87, 56, 82, 85]])\n",
      "tensor([[11, 91, 27, 53, 74, 87, 56, 82, 85, 13]])\n",
      "tensor([[11, 91, 27, 53, 74, 87, 56, 82, 85, 13]])\n",
      "tensor([[91, 27, 53, 74, 87, 56, 82, 85, 13,  5]])\n",
      "tensor([[91, 27, 53, 74, 87, 56, 82, 85, 13,  5]])\n",
      "tensor([[27, 53, 74, 87, 56, 82, 85, 13,  5, 96]])\n"
     ]
    }
   ],
   "execution_count": 13
  },
  {
   "cell_type": "code",
   "id": "d9e9fd013d3d01ba",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-01T13:09:05.012207Z",
     "start_time": "2025-07-01T13:09:05.004639Z"
    }
   },
   "source": [
    "# 大都督周瑜（我的微信: dadudu6789）\n",
    "class ZhouyuRNN(nn.Module):\n",
    "    def __init__(self, vocab_size, hidden_size):\n",
    "        super().__init__()\n",
    "        self.hidden_size = hidden_size\n",
    "\n",
    "        # 嵌入层，输入词索引，输出词向量\n",
    "        self.embedding = nn.Embedding(vocab_size, hidden_size)\n",
    "\n",
    "        # RNN参数\n",
    "        self.W_xh = nn.Parameter(torch.randn(hidden_size, hidden_size))\n",
    "        self.W_hh = nn.Parameter(torch.randn(hidden_size, hidden_size))\n",
    "        self.b_h = nn.Parameter(torch.zeros(hidden_size))\n",
    "\n",
    "        # 输出层\n",
    "        self.out_linear = nn.Linear(hidden_size, vocab_size)\n",
    "\n",
    "    def forward(self, x, hidden=None):\n",
    "\n",
    "        embedded = self.embedding(x)\n",
    "        batch_size, seq_len, hidden_size = embedded.shape\n",
    "\n",
    "        # 初始化隐藏状态，每个seq都创建一个初始隐藏状态\n",
    "        if hidden is None:\n",
    "            hidden = torch.zeros(batch_size, hidden_size)\n",
    "\n",
    "        outputs = []\n",
    "        for t in range(seq_len):\n",
    "            # 取第t个时间步对应字的向量\n",
    "            x_t = embedded[:, t, :]  # (batch_size, hidden_size)\n",
    "\n",
    "            hidden = torch.tanh(\n",
    "                torch.mm(x_t, self.W_xh) +\n",
    "                torch.mm(hidden, self.W_hh) +\n",
    "                self.b_h\n",
    "            )\n",
    "\n",
    "            # 隐藏状态输入到线性输出层，得到t时刻的输出\n",
    "            # 这里的隐藏状态，相当于记忆了前t-1个字的信息，然后结合t时刻的输入x，要预测t时刻对应的y\n",
    "            outputs.append(self.out_linear(hidden))\n",
    "\n",
    "        # outputs保存了所有时间步的输出，输入的是一个序列，每个时间步的输出就组合成了输出序列，然后再和目标序列进行误差计算\n",
    "        outputs = torch.stack(outputs, dim=1)  # (batch_size, seq_length, vocab_size)\n",
    "\n",
    "        return outputs, hidden\n",
    "\n",
    "\n",
    "# 初始化模型\n",
    "model = ZhouyuRNN(vocab_size, HIDDEN_SIZE)\n",
    "criterion = nn.CrossEntropyLoss()\n",
    "optimizer = torch.optim.SGD(model.parameters(), lr=0.005)"
   ],
   "outputs": [],
   "execution_count": 14
  },
  {
   "cell_type": "code",
   "id": "2348428ce74982e4",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-01T13:09:23.777044Z",
     "start_time": "2025-07-01T13:09:05.025067Z"
    }
   },
   "source": [
    "for epoch in range(100):\n",
    "    for i, (inputs, targets) in enumerate(dataloader):\n",
    "        # 前向传播\n",
    "        outputs, _ = model(inputs)\n",
    "\n",
    "        # 计算损失\n",
    "        # 用每个时间步的输出和每个时间步的标签进行比较，并平均损失\n",
    "        loss = criterion(\n",
    "            outputs.view(-1, vocab_size),  # (batch_size*seq_length, vocab_size)\n",
    "            targets.view(-1)  # (batch_size*seq_length)\n",
    "        )\n",
    "\n",
    "        optimizer.zero_grad()\n",
    "        loss.backward()\n",
    "        # 梯度裁剪防止爆炸\n",
    "        # nn.utils.clip_grad_norm_(model.parameters(), max_norm=1.0)\n",
    "        optimizer.step()\n",
    "\n",
    "        if (i + 1) % 20 == 0:\n",
    "            print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'\n",
    "                  .format(epoch + 1, 100, i + 1, len(dataloader), loss.item()))"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch [1/100], Step [20/138], Loss: 4.8067\n",
      "Epoch [1/100], Step [40/138], Loss: 4.7658\n",
      "Epoch [1/100], Step [60/138], Loss: 4.9512\n",
      "Epoch [1/100], Step [80/138], Loss: 4.6872\n",
      "Epoch [1/100], Step [100/138], Loss: 4.3843\n",
      "Epoch [1/100], Step [120/138], Loss: 4.2940\n",
      "Epoch [2/100], Step [20/138], Loss: 4.5744\n",
      "Epoch [2/100], Step [40/138], Loss: 4.6247\n",
      "Epoch [2/100], Step [60/138], Loss: 4.5522\n",
      "Epoch [2/100], Step [80/138], Loss: 4.2047\n",
      "Epoch [2/100], Step [100/138], Loss: 4.1802\n",
      "Epoch [2/100], Step [120/138], Loss: 3.8663\n",
      "Epoch [3/100], Step [20/138], Loss: 4.4050\n",
      "Epoch [3/100], Step [40/138], Loss: 4.0929\n",
      "Epoch [3/100], Step [60/138], Loss: 4.2980\n",
      "Epoch [3/100], Step [80/138], Loss: 3.9400\n",
      "Epoch [3/100], Step [100/138], Loss: 3.7904\n",
      "Epoch [3/100], Step [120/138], Loss: 3.6623\n",
      "Epoch [4/100], Step [20/138], Loss: 3.9836\n",
      "Epoch [4/100], Step [40/138], Loss: 3.8256\n",
      "Epoch [4/100], Step [60/138], Loss: 4.0507\n",
      "Epoch [4/100], Step [80/138], Loss: 3.6578\n",
      "Epoch [4/100], Step [100/138], Loss: 3.5046\n",
      "Epoch [4/100], Step [120/138], Loss: 3.2810\n",
      "Epoch [5/100], Step [20/138], Loss: 3.5794\n",
      "Epoch [5/100], Step [40/138], Loss: 3.4566\n",
      "Epoch [5/100], Step [60/138], Loss: 3.6903\n",
      "Epoch [5/100], Step [80/138], Loss: 3.2183\n",
      "Epoch [5/100], Step [100/138], Loss: 3.0448\n",
      "Epoch [5/100], Step [120/138], Loss: 3.0399\n",
      "Epoch [6/100], Step [20/138], Loss: 3.2770\n",
      "Epoch [6/100], Step [40/138], Loss: 3.2897\n",
      "Epoch [6/100], Step [60/138], Loss: 3.4602\n",
      "Epoch [6/100], Step [80/138], Loss: 2.9417\n",
      "Epoch [6/100], Step [100/138], Loss: 2.6833\n",
      "Epoch [6/100], Step [120/138], Loss: 2.8028\n",
      "Epoch [7/100], Step [20/138], Loss: 2.8505\n",
      "Epoch [7/100], Step [40/138], Loss: 2.7170\n",
      "Epoch [7/100], Step [60/138], Loss: 3.0036\n",
      "Epoch [7/100], Step [80/138], Loss: 2.6592\n",
      "Epoch [7/100], Step [100/138], Loss: 2.4316\n",
      "Epoch [7/100], Step [120/138], Loss: 2.4930\n",
      "Epoch [8/100], Step [20/138], Loss: 2.6014\n",
      "Epoch [8/100], Step [40/138], Loss: 2.5091\n",
      "Epoch [8/100], Step [60/138], Loss: 2.9349\n",
      "Epoch [8/100], Step [80/138], Loss: 2.3046\n",
      "Epoch [8/100], Step [100/138], Loss: 2.2770\n",
      "Epoch [8/100], Step [120/138], Loss: 2.3260\n",
      "Epoch [9/100], Step [20/138], Loss: 2.2622\n",
      "Epoch [9/100], Step [40/138], Loss: 2.2406\n",
      "Epoch [9/100], Step [60/138], Loss: 2.5541\n",
      "Epoch [9/100], Step [80/138], Loss: 2.0989\n",
      "Epoch [9/100], Step [100/138], Loss: 1.9963\n",
      "Epoch [9/100], Step [120/138], Loss: 2.0463\n",
      "Epoch [10/100], Step [20/138], Loss: 2.1859\n",
      "Epoch [10/100], Step [40/138], Loss: 1.9820\n",
      "Epoch [10/100], Step [60/138], Loss: 2.5010\n",
      "Epoch [10/100], Step [80/138], Loss: 1.7978\n",
      "Epoch [10/100], Step [100/138], Loss: 1.8669\n",
      "Epoch [10/100], Step [120/138], Loss: 1.8606\n",
      "Epoch [11/100], Step [20/138], Loss: 1.7631\n",
      "Epoch [11/100], Step [40/138], Loss: 1.9768\n",
      "Epoch [11/100], Step [60/138], Loss: 2.0540\n",
      "Epoch [11/100], Step [80/138], Loss: 1.6324\n",
      "Epoch [11/100], Step [100/138], Loss: 1.6466\n",
      "Epoch [11/100], Step [120/138], Loss: 1.6428\n",
      "Epoch [12/100], Step [20/138], Loss: 1.6259\n",
      "Epoch [12/100], Step [40/138], Loss: 1.6250\n",
      "Epoch [12/100], Step [60/138], Loss: 1.8106\n",
      "Epoch [12/100], Step [80/138], Loss: 1.3916\n",
      "Epoch [12/100], Step [100/138], Loss: 1.2969\n",
      "Epoch [12/100], Step [120/138], Loss: 1.4399\n",
      "Epoch [13/100], Step [20/138], Loss: 1.4268\n",
      "Epoch [13/100], Step [40/138], Loss: 1.5190\n",
      "Epoch [13/100], Step [60/138], Loss: 1.5692\n",
      "Epoch [13/100], Step [80/138], Loss: 1.1715\n",
      "Epoch [13/100], Step [100/138], Loss: 1.2367\n",
      "Epoch [13/100], Step [120/138], Loss: 1.2489\n",
      "Epoch [14/100], Step [20/138], Loss: 1.3260\n",
      "Epoch [14/100], Step [40/138], Loss: 1.3724\n",
      "Epoch [14/100], Step [60/138], Loss: 1.4245\n",
      "Epoch [14/100], Step [80/138], Loss: 1.2487\n",
      "Epoch [14/100], Step [100/138], Loss: 1.1577\n",
      "Epoch [14/100], Step [120/138], Loss: 1.1334\n",
      "Epoch [15/100], Step [20/138], Loss: 1.1757\n",
      "Epoch [15/100], Step [40/138], Loss: 1.3173\n",
      "Epoch [15/100], Step [60/138], Loss: 1.4433\n",
      "Epoch [15/100], Step [80/138], Loss: 1.0471\n",
      "Epoch [15/100], Step [100/138], Loss: 0.9398\n",
      "Epoch [15/100], Step [120/138], Loss: 1.0535\n",
      "Epoch [16/100], Step [20/138], Loss: 0.9999\n",
      "Epoch [16/100], Step [40/138], Loss: 1.5201\n",
      "Epoch [16/100], Step [60/138], Loss: 1.2338\n",
      "Epoch [16/100], Step [80/138], Loss: 0.9162\n",
      "Epoch [16/100], Step [100/138], Loss: 0.8139\n",
      "Epoch [16/100], Step [120/138], Loss: 0.9012\n",
      "Epoch [17/100], Step [20/138], Loss: 0.9631\n",
      "Epoch [17/100], Step [40/138], Loss: 1.1657\n",
      "Epoch [17/100], Step [60/138], Loss: 1.1700\n",
      "Epoch [17/100], Step [80/138], Loss: 0.7694\n",
      "Epoch [17/100], Step [100/138], Loss: 0.7605\n",
      "Epoch [17/100], Step [120/138], Loss: 0.7983\n",
      "Epoch [18/100], Step [20/138], Loss: 0.6951\n",
      "Epoch [18/100], Step [40/138], Loss: 1.0275\n",
      "Epoch [18/100], Step [60/138], Loss: 0.9641\n",
      "Epoch [18/100], Step [80/138], Loss: 0.7880\n",
      "Epoch [18/100], Step [100/138], Loss: 0.8343\n",
      "Epoch [18/100], Step [120/138], Loss: 0.7598\n",
      "Epoch [19/100], Step [20/138], Loss: 0.6910\n",
      "Epoch [19/100], Step [40/138], Loss: 1.0520\n",
      "Epoch [19/100], Step [60/138], Loss: 0.8825\n",
      "Epoch [19/100], Step [80/138], Loss: 0.6784\n",
      "Epoch [19/100], Step [100/138], Loss: 0.5987\n",
      "Epoch [19/100], Step [120/138], Loss: 0.6516\n",
      "Epoch [20/100], Step [20/138], Loss: 0.6414\n",
      "Epoch [20/100], Step [40/138], Loss: 1.0463\n",
      "Epoch [20/100], Step [60/138], Loss: 0.8927\n",
      "Epoch [20/100], Step [80/138], Loss: 0.7301\n",
      "Epoch [20/100], Step [100/138], Loss: 0.5331\n",
      "Epoch [20/100], Step [120/138], Loss: 0.6606\n",
      "Epoch [21/100], Step [20/138], Loss: 0.5852\n",
      "Epoch [21/100], Step [40/138], Loss: 0.9087\n",
      "Epoch [21/100], Step [60/138], Loss: 0.6880\n",
      "Epoch [21/100], Step [80/138], Loss: 0.7450\n",
      "Epoch [21/100], Step [100/138], Loss: 0.4737\n",
      "Epoch [21/100], Step [120/138], Loss: 0.5276\n",
      "Epoch [22/100], Step [20/138], Loss: 0.5326\n",
      "Epoch [22/100], Step [40/138], Loss: 0.5881\n",
      "Epoch [22/100], Step [60/138], Loss: 0.7029\n",
      "Epoch [22/100], Step [80/138], Loss: 0.6628\n",
      "Epoch [22/100], Step [100/138], Loss: 0.4759\n",
      "Epoch [22/100], Step [120/138], Loss: 0.5351\n",
      "Epoch [23/100], Step [20/138], Loss: 0.5375\n",
      "Epoch [23/100], Step [40/138], Loss: 0.8155\n",
      "Epoch [23/100], Step [60/138], Loss: 0.9174\n",
      "Epoch [23/100], Step [80/138], Loss: 0.5923\n",
      "Epoch [23/100], Step [100/138], Loss: 0.4538\n",
      "Epoch [23/100], Step [120/138], Loss: 0.4383\n",
      "Epoch [24/100], Step [20/138], Loss: 0.6018\n",
      "Epoch [24/100], Step [40/138], Loss: 0.6473\n",
      "Epoch [24/100], Step [60/138], Loss: 0.8465\n",
      "Epoch [24/100], Step [80/138], Loss: 0.5116\n",
      "Epoch [24/100], Step [100/138], Loss: 0.3851\n",
      "Epoch [24/100], Step [120/138], Loss: 0.4347\n",
      "Epoch [25/100], Step [20/138], Loss: 0.4734\n",
      "Epoch [25/100], Step [40/138], Loss: 0.5226\n",
      "Epoch [25/100], Step [60/138], Loss: 0.8099\n",
      "Epoch [25/100], Step [80/138], Loss: 0.4804\n",
      "Epoch [25/100], Step [100/138], Loss: 0.4241\n",
      "Epoch [25/100], Step [120/138], Loss: 0.3798\n",
      "Epoch [26/100], Step [20/138], Loss: 0.4872\n",
      "Epoch [26/100], Step [40/138], Loss: 0.6760\n",
      "Epoch [26/100], Step [60/138], Loss: 0.8160\n",
      "Epoch [26/100], Step [80/138], Loss: 0.5322\n",
      "Epoch [26/100], Step [100/138], Loss: 0.3626\n",
      "Epoch [26/100], Step [120/138], Loss: 0.3983\n",
      "Epoch [27/100], Step [20/138], Loss: 0.4406\n",
      "Epoch [27/100], Step [40/138], Loss: 0.5551\n",
      "Epoch [27/100], Step [60/138], Loss: 0.5168\n",
      "Epoch [27/100], Step [80/138], Loss: 0.5016\n",
      "Epoch [27/100], Step [100/138], Loss: 0.3440\n",
      "Epoch [27/100], Step [120/138], Loss: 0.3636\n",
      "Epoch [28/100], Step [20/138], Loss: 0.5128\n",
      "Epoch [28/100], Step [40/138], Loss: 0.5279\n",
      "Epoch [28/100], Step [60/138], Loss: 0.6384\n",
      "Epoch [28/100], Step [80/138], Loss: 0.4762\n",
      "Epoch [28/100], Step [100/138], Loss: 0.3492\n",
      "Epoch [28/100], Step [120/138], Loss: 0.3692\n",
      "Epoch [29/100], Step [20/138], Loss: 0.4074\n",
      "Epoch [29/100], Step [40/138], Loss: 0.5772\n",
      "Epoch [29/100], Step [60/138], Loss: 0.4546\n",
      "Epoch [29/100], Step [80/138], Loss: 0.4504\n",
      "Epoch [29/100], Step [100/138], Loss: 0.3245\n",
      "Epoch [29/100], Step [120/138], Loss: 0.4236\n",
      "Epoch [30/100], Step [20/138], Loss: 0.3645\n",
      "Epoch [30/100], Step [40/138], Loss: 0.5098\n",
      "Epoch [30/100], Step [60/138], Loss: 0.5050\n",
      "Epoch [30/100], Step [80/138], Loss: 0.4829\n",
      "Epoch [30/100], Step [100/138], Loss: 0.3516\n",
      "Epoch [30/100], Step [120/138], Loss: 0.3263\n",
      "Epoch [31/100], Step [20/138], Loss: 0.3551\n",
      "Epoch [31/100], Step [40/138], Loss: 0.4280\n",
      "Epoch [31/100], Step [60/138], Loss: 0.4756\n",
      "Epoch [31/100], Step [80/138], Loss: 0.4649\n",
      "Epoch [31/100], Step [100/138], Loss: 0.3150\n",
      "Epoch [31/100], Step [120/138], Loss: 0.2965\n",
      "Epoch [32/100], Step [20/138], Loss: 0.4507\n",
      "Epoch [32/100], Step [40/138], Loss: 0.3365\n",
      "Epoch [32/100], Step [60/138], Loss: 0.4708\n",
      "Epoch [32/100], Step [80/138], Loss: 0.4560\n",
      "Epoch [32/100], Step [100/138], Loss: 0.2440\n",
      "Epoch [32/100], Step [120/138], Loss: 0.2796\n",
      "Epoch [33/100], Step [20/138], Loss: 0.4080\n",
      "Epoch [33/100], Step [40/138], Loss: 0.4492\n",
      "Epoch [33/100], Step [60/138], Loss: 0.4615\n",
      "Epoch [33/100], Step [80/138], Loss: 0.5335\n",
      "Epoch [33/100], Step [100/138], Loss: 0.2735\n",
      "Epoch [33/100], Step [120/138], Loss: 0.3281\n",
      "Epoch [34/100], Step [20/138], Loss: 0.3723\n",
      "Epoch [34/100], Step [40/138], Loss: 0.3621\n",
      "Epoch [34/100], Step [60/138], Loss: 0.3860\n",
      "Epoch [34/100], Step [80/138], Loss: 0.4156\n",
      "Epoch [34/100], Step [100/138], Loss: 0.2978\n",
      "Epoch [34/100], Step [120/138], Loss: 0.2506\n",
      "Epoch [35/100], Step [20/138], Loss: 0.3513\n",
      "Epoch [35/100], Step [40/138], Loss: 0.3015\n",
      "Epoch [35/100], Step [60/138], Loss: 0.3808\n",
      "Epoch [35/100], Step [80/138], Loss: 0.3991\n",
      "Epoch [35/100], Step [100/138], Loss: 0.2671\n",
      "Epoch [35/100], Step [120/138], Loss: 0.2442\n",
      "Epoch [36/100], Step [20/138], Loss: 0.3241\n",
      "Epoch [36/100], Step [40/138], Loss: 0.3651\n",
      "Epoch [36/100], Step [60/138], Loss: 0.4061\n",
      "Epoch [36/100], Step [80/138], Loss: 0.4072\n",
      "Epoch [36/100], Step [100/138], Loss: 0.2450\n",
      "Epoch [36/100], Step [120/138], Loss: 0.2407\n",
      "Epoch [37/100], Step [20/138], Loss: 0.3084\n",
      "Epoch [37/100], Step [40/138], Loss: 0.3256\n",
      "Epoch [37/100], Step [60/138], Loss: 0.3419\n",
      "Epoch [37/100], Step [80/138], Loss: 0.3777\n",
      "Epoch [37/100], Step [100/138], Loss: 0.2203\n",
      "Epoch [37/100], Step [120/138], Loss: 0.2331\n",
      "Epoch [38/100], Step [20/138], Loss: 0.3036\n",
      "Epoch [38/100], Step [40/138], Loss: 0.3096\n",
      "Epoch [38/100], Step [60/138], Loss: 0.4444\n",
      "Epoch [38/100], Step [80/138], Loss: 0.3227\n",
      "Epoch [38/100], Step [100/138], Loss: 0.2033\n",
      "Epoch [38/100], Step [120/138], Loss: 0.2286\n",
      "Epoch [39/100], Step [20/138], Loss: 0.2967\n",
      "Epoch [39/100], Step [40/138], Loss: 0.2873\n",
      "Epoch [39/100], Step [60/138], Loss: 0.4021\n",
      "Epoch [39/100], Step [80/138], Loss: 0.3152\n",
      "Epoch [39/100], Step [100/138], Loss: 0.2368\n",
      "Epoch [39/100], Step [120/138], Loss: 0.2222\n",
      "Epoch [40/100], Step [20/138], Loss: 0.2878\n",
      "Epoch [40/100], Step [40/138], Loss: 0.2129\n",
      "Epoch [40/100], Step [60/138], Loss: 0.3261\n",
      "Epoch [40/100], Step [80/138], Loss: 0.2963\n",
      "Epoch [40/100], Step [100/138], Loss: 0.2211\n",
      "Epoch [40/100], Step [120/138], Loss: 0.2087\n",
      "Epoch [41/100], Step [20/138], Loss: 0.2825\n",
      "Epoch [41/100], Step [40/138], Loss: 0.2023\n",
      "Epoch [41/100], Step [60/138], Loss: 0.3009\n",
      "Epoch [41/100], Step [80/138], Loss: 0.2802\n",
      "Epoch [41/100], Step [100/138], Loss: 0.2091\n",
      "Epoch [41/100], Step [120/138], Loss: 0.1853\n",
      "Epoch [42/100], Step [20/138], Loss: 0.2344\n",
      "Epoch [42/100], Step [40/138], Loss: 0.2015\n",
      "Epoch [42/100], Step [60/138], Loss: 0.3417\n",
      "Epoch [42/100], Step [80/138], Loss: 0.2740\n",
      "Epoch [42/100], Step [100/138], Loss: 0.2104\n",
      "Epoch [42/100], Step [120/138], Loss: 0.1748\n",
      "Epoch [43/100], Step [20/138], Loss: 0.2115\n",
      "Epoch [43/100], Step [40/138], Loss: 0.2216\n",
      "Epoch [43/100], Step [60/138], Loss: 0.3289\n",
      "Epoch [43/100], Step [80/138], Loss: 0.2813\n",
      "Epoch [43/100], Step [100/138], Loss: 0.2151\n",
      "Epoch [43/100], Step [120/138], Loss: 0.1798\n",
      "Epoch [44/100], Step [20/138], Loss: 0.2077\n",
      "Epoch [44/100], Step [40/138], Loss: 0.2325\n",
      "Epoch [44/100], Step [60/138], Loss: 0.3104\n",
      "Epoch [44/100], Step [80/138], Loss: 0.2802\n",
      "Epoch [44/100], Step [100/138], Loss: 0.1595\n",
      "Epoch [44/100], Step [120/138], Loss: 0.1726\n",
      "Epoch [45/100], Step [20/138], Loss: 0.2014\n",
      "Epoch [45/100], Step [40/138], Loss: 0.2006\n",
      "Epoch [45/100], Step [60/138], Loss: 0.4486\n",
      "Epoch [45/100], Step [80/138], Loss: 0.3076\n",
      "Epoch [45/100], Step [100/138], Loss: 0.1637\n",
      "Epoch [45/100], Step [120/138], Loss: 0.1827\n",
      "Epoch [46/100], Step [20/138], Loss: 0.2177\n",
      "Epoch [46/100], Step [40/138], Loss: 0.2012\n",
      "Epoch [46/100], Step [60/138], Loss: 0.2813\n",
      "Epoch [46/100], Step [80/138], Loss: 0.2665\n",
      "Epoch [46/100], Step [100/138], Loss: 0.1553\n",
      "Epoch [46/100], Step [120/138], Loss: 0.1723\n",
      "Epoch [47/100], Step [20/138], Loss: 0.1902\n",
      "Epoch [47/100], Step [40/138], Loss: 0.1970\n",
      "Epoch [47/100], Step [60/138], Loss: 0.2549\n",
      "Epoch [47/100], Step [80/138], Loss: 0.2489\n",
      "Epoch [47/100], Step [100/138], Loss: 0.1483\n",
      "Epoch [47/100], Step [120/138], Loss: 0.1482\n",
      "Epoch [48/100], Step [20/138], Loss: 0.1833\n",
      "Epoch [48/100], Step [40/138], Loss: 0.1917\n",
      "Epoch [48/100], Step [60/138], Loss: 0.2404\n",
      "Epoch [48/100], Step [80/138], Loss: 0.2882\n",
      "Epoch [48/100], Step [100/138], Loss: 0.1466\n",
      "Epoch [48/100], Step [120/138], Loss: 0.1554\n",
      "Epoch [49/100], Step [20/138], Loss: 0.1862\n",
      "Epoch [49/100], Step [40/138], Loss: 0.1931\n",
      "Epoch [49/100], Step [60/138], Loss: 0.2307\n",
      "Epoch [49/100], Step [80/138], Loss: 0.3037\n",
      "Epoch [49/100], Step [100/138], Loss: 0.1348\n",
      "Epoch [49/100], Step [120/138], Loss: 0.1474\n",
      "Epoch [50/100], Step [20/138], Loss: 0.1789\n",
      "Epoch [50/100], Step [40/138], Loss: 0.1746\n",
      "Epoch [50/100], Step [60/138], Loss: 0.2756\n",
      "Epoch [50/100], Step [80/138], Loss: 0.2336\n",
      "Epoch [50/100], Step [100/138], Loss: 0.1326\n",
      "Epoch [50/100], Step [120/138], Loss: 0.1451\n",
      "Epoch [51/100], Step [20/138], Loss: 0.1738\n",
      "Epoch [51/100], Step [40/138], Loss: 0.2026\n",
      "Epoch [51/100], Step [60/138], Loss: 0.2805\n",
      "Epoch [51/100], Step [80/138], Loss: 0.2005\n",
      "Epoch [51/100], Step [100/138], Loss: 0.1372\n",
      "Epoch [51/100], Step [120/138], Loss: 0.1386\n",
      "Epoch [52/100], Step [20/138], Loss: 0.1700\n",
      "Epoch [52/100], Step [40/138], Loss: 0.1862\n",
      "Epoch [52/100], Step [60/138], Loss: 0.2225\n",
      "Epoch [52/100], Step [80/138], Loss: 0.1911\n",
      "Epoch [52/100], Step [100/138], Loss: 0.1323\n",
      "Epoch [52/100], Step [120/138], Loss: 0.1305\n",
      "Epoch [53/100], Step [20/138], Loss: 0.1694\n",
      "Epoch [53/100], Step [40/138], Loss: 0.1577\n",
      "Epoch [53/100], Step [60/138], Loss: 0.2011\n",
      "Epoch [53/100], Step [80/138], Loss: 0.1848\n",
      "Epoch [53/100], Step [100/138], Loss: 0.1273\n",
      "Epoch [53/100], Step [120/138], Loss: 0.1401\n",
      "Epoch [54/100], Step [20/138], Loss: 0.1616\n",
      "Epoch [54/100], Step [40/138], Loss: 0.1528\n",
      "Epoch [54/100], Step [60/138], Loss: 0.1822\n",
      "Epoch [54/100], Step [80/138], Loss: 0.1819\n",
      "Epoch [54/100], Step [100/138], Loss: 0.1227\n",
      "Epoch [54/100], Step [120/138], Loss: 0.1366\n",
      "Epoch [55/100], Step [20/138], Loss: 0.1577\n",
      "Epoch [55/100], Step [40/138], Loss: 0.1536\n",
      "Epoch [55/100], Step [60/138], Loss: 0.2336\n",
      "Epoch [55/100], Step [80/138], Loss: 0.1753\n",
      "Epoch [55/100], Step [100/138], Loss: 0.1378\n",
      "Epoch [55/100], Step [120/138], Loss: 0.1279\n",
      "Epoch [56/100], Step [20/138], Loss: 0.1546\n",
      "Epoch [56/100], Step [40/138], Loss: 0.1600\n",
      "Epoch [56/100], Step [60/138], Loss: 0.1757\n",
      "Epoch [56/100], Step [80/138], Loss: 0.1720\n",
      "Epoch [56/100], Step [100/138], Loss: 0.1308\n",
      "Epoch [56/100], Step [120/138], Loss: 0.1171\n",
      "Epoch [57/100], Step [20/138], Loss: 0.1500\n",
      "Epoch [57/100], Step [40/138], Loss: 0.1525\n",
      "Epoch [57/100], Step [60/138], Loss: 0.2005\n",
      "Epoch [57/100], Step [80/138], Loss: 0.1660\n",
      "Epoch [57/100], Step [100/138], Loss: 0.1105\n",
      "Epoch [57/100], Step [120/138], Loss: 0.1471\n",
      "Epoch [58/100], Step [20/138], Loss: 0.1376\n",
      "Epoch [58/100], Step [40/138], Loss: 0.1510\n",
      "Epoch [58/100], Step [60/138], Loss: 0.1514\n",
      "Epoch [58/100], Step [80/138], Loss: 0.1620\n",
      "Epoch [58/100], Step [100/138], Loss: 0.1089\n",
      "Epoch [58/100], Step [120/138], Loss: 0.1098\n",
      "Epoch [59/100], Step [20/138], Loss: 0.1366\n",
      "Epoch [59/100], Step [40/138], Loss: 0.1454\n",
      "Epoch [59/100], Step [60/138], Loss: 0.1391\n",
      "Epoch [59/100], Step [80/138], Loss: 0.1599\n",
      "Epoch [59/100], Step [100/138], Loss: 0.1003\n",
      "Epoch [59/100], Step [120/138], Loss: 0.1085\n",
      "Epoch [60/100], Step [20/138], Loss: 0.1336\n",
      "Epoch [60/100], Step [40/138], Loss: 0.1383\n",
      "Epoch [60/100], Step [60/138], Loss: 0.1349\n",
      "Epoch [60/100], Step [80/138], Loss: 0.1573\n",
      "Epoch [60/100], Step [100/138], Loss: 0.0954\n",
      "Epoch [60/100], Step [120/138], Loss: 0.1060\n",
      "Epoch [61/100], Step [20/138], Loss: 0.1394\n",
      "Epoch [61/100], Step [40/138], Loss: 0.1295\n",
      "Epoch [61/100], Step [60/138], Loss: 0.1619\n",
      "Epoch [61/100], Step [80/138], Loss: 0.1545\n",
      "Epoch [61/100], Step [100/138], Loss: 0.0881\n",
      "Epoch [61/100], Step [120/138], Loss: 0.0984\n",
      "Epoch [62/100], Step [20/138], Loss: 0.1354\n",
      "Epoch [62/100], Step [40/138], Loss: 0.1262\n",
      "Epoch [62/100], Step [60/138], Loss: 0.2013\n",
      "Epoch [62/100], Step [80/138], Loss: 0.1529\n",
      "Epoch [62/100], Step [100/138], Loss: 0.0829\n",
      "Epoch [62/100], Step [120/138], Loss: 0.0972\n",
      "Epoch [63/100], Step [20/138], Loss: 0.1287\n",
      "Epoch [63/100], Step [40/138], Loss: 0.1231\n",
      "Epoch [63/100], Step [60/138], Loss: 0.1430\n",
      "Epoch [63/100], Step [80/138], Loss: 0.1486\n",
      "Epoch [63/100], Step [100/138], Loss: 0.0873\n",
      "Epoch [63/100], Step [120/138], Loss: 0.0957\n",
      "Epoch [64/100], Step [20/138], Loss: 0.1256\n",
      "Epoch [64/100], Step [40/138], Loss: 0.1201\n",
      "Epoch [64/100], Step [60/138], Loss: 0.1445\n",
      "Epoch [64/100], Step [80/138], Loss: 0.1458\n",
      "Epoch [64/100], Step [100/138], Loss: 0.0849\n",
      "Epoch [64/100], Step [120/138], Loss: 0.0966\n",
      "Epoch [65/100], Step [20/138], Loss: 0.1206\n",
      "Epoch [65/100], Step [40/138], Loss: 0.1139\n",
      "Epoch [65/100], Step [60/138], Loss: 0.1399\n",
      "Epoch [65/100], Step [80/138], Loss: 0.1443\n",
      "Epoch [65/100], Step [100/138], Loss: 0.0791\n",
      "Epoch [65/100], Step [120/138], Loss: 0.0903\n",
      "Epoch [66/100], Step [20/138], Loss: 0.1298\n",
      "Epoch [66/100], Step [40/138], Loss: 0.1095\n",
      "Epoch [66/100], Step [60/138], Loss: 0.1274\n",
      "Epoch [66/100], Step [80/138], Loss: 0.1404\n",
      "Epoch [66/100], Step [100/138], Loss: 0.0773\n",
      "Epoch [66/100], Step [120/138], Loss: 0.0887\n",
      "Epoch [67/100], Step [20/138], Loss: 0.1225\n",
      "Epoch [67/100], Step [40/138], Loss: 0.1122\n",
      "Epoch [67/100], Step [60/138], Loss: 0.1358\n",
      "Epoch [67/100], Step [80/138], Loss: 0.1362\n",
      "Epoch [67/100], Step [100/138], Loss: 0.0759\n",
      "Epoch [67/100], Step [120/138], Loss: 0.0864\n",
      "Epoch [68/100], Step [20/138], Loss: 0.1212\n",
      "Epoch [68/100], Step [40/138], Loss: 0.1108\n",
      "Epoch [68/100], Step [60/138], Loss: 0.1444\n",
      "Epoch [68/100], Step [80/138], Loss: 0.1354\n",
      "Epoch [68/100], Step [100/138], Loss: 0.0801\n",
      "Epoch [68/100], Step [120/138], Loss: 0.0897\n",
      "Epoch [69/100], Step [20/138], Loss: 0.1186\n",
      "Epoch [69/100], Step [40/138], Loss: 0.1192\n",
      "Epoch [69/100], Step [60/138], Loss: 0.1730\n",
      "Epoch [69/100], Step [80/138], Loss: 0.1315\n",
      "Epoch [69/100], Step [100/138], Loss: 0.0798\n",
      "Epoch [69/100], Step [120/138], Loss: 0.1334\n",
      "Epoch [70/100], Step [20/138], Loss: 0.1173\n",
      "Epoch [70/100], Step [40/138], Loss: 0.1132\n",
      "Epoch [70/100], Step [60/138], Loss: 0.1380\n",
      "Epoch [70/100], Step [80/138], Loss: 0.1344\n",
      "Epoch [70/100], Step [100/138], Loss: 0.0755\n",
      "Epoch [70/100], Step [120/138], Loss: 0.1438\n",
      "Epoch [71/100], Step [20/138], Loss: 0.1171\n",
      "Epoch [71/100], Step [40/138], Loss: 0.1273\n",
      "Epoch [71/100], Step [60/138], Loss: 0.1542\n",
      "Epoch [71/100], Step [80/138], Loss: 0.1318\n",
      "Epoch [71/100], Step [100/138], Loss: 0.0736\n",
      "Epoch [71/100], Step [120/138], Loss: 0.0870\n",
      "Epoch [72/100], Step [20/138], Loss: 0.1163\n",
      "Epoch [72/100], Step [40/138], Loss: 0.0982\n",
      "Epoch [72/100], Step [60/138], Loss: 0.1583\n",
      "Epoch [72/100], Step [80/138], Loss: 0.1297\n",
      "Epoch [72/100], Step [100/138], Loss: 0.0707\n",
      "Epoch [72/100], Step [120/138], Loss: 0.0972\n",
      "Epoch [73/100], Step [20/138], Loss: 0.1160\n",
      "Epoch [73/100], Step [40/138], Loss: 0.0963\n",
      "Epoch [73/100], Step [60/138], Loss: 0.1262\n",
      "Epoch [73/100], Step [80/138], Loss: 0.1253\n",
      "Epoch [73/100], Step [100/138], Loss: 0.0681\n",
      "Epoch [73/100], Step [120/138], Loss: 0.1161\n",
      "Epoch [74/100], Step [20/138], Loss: 0.1145\n",
      "Epoch [74/100], Step [40/138], Loss: 0.0943\n",
      "Epoch [74/100], Step [60/138], Loss: 0.1316\n",
      "Epoch [74/100], Step [80/138], Loss: 0.1239\n",
      "Epoch [74/100], Step [100/138], Loss: 0.0663\n",
      "Epoch [74/100], Step [120/138], Loss: 0.0853\n",
      "Epoch [75/100], Step [20/138], Loss: 0.1134\n",
      "Epoch [75/100], Step [40/138], Loss: 0.0927\n",
      "Epoch [75/100], Step [60/138], Loss: 0.1276\n",
      "Epoch [75/100], Step [80/138], Loss: 0.1218\n",
      "Epoch [75/100], Step [100/138], Loss: 0.0649\n",
      "Epoch [75/100], Step [120/138], Loss: 0.0806\n",
      "Epoch [76/100], Step [20/138], Loss: 0.1120\n",
      "Epoch [76/100], Step [40/138], Loss: 0.0893\n",
      "Epoch [76/100], Step [60/138], Loss: 0.1252\n",
      "Epoch [76/100], Step [80/138], Loss: 0.1197\n",
      "Epoch [76/100], Step [100/138], Loss: 0.0636\n",
      "Epoch [76/100], Step [120/138], Loss: 0.0787\n",
      "Epoch [77/100], Step [20/138], Loss: 0.1106\n",
      "Epoch [77/100], Step [40/138], Loss: 0.0868\n",
      "Epoch [77/100], Step [60/138], Loss: 0.1216\n",
      "Epoch [77/100], Step [80/138], Loss: 0.1177\n",
      "Epoch [77/100], Step [100/138], Loss: 0.0662\n",
      "Epoch [77/100], Step [120/138], Loss: 0.0734\n",
      "Epoch [78/100], Step [20/138], Loss: 0.1091\n",
      "Epoch [78/100], Step [40/138], Loss: 0.0853\n",
      "Epoch [78/100], Step [60/138], Loss: 0.1183\n",
      "Epoch [78/100], Step [80/138], Loss: 0.1156\n",
      "Epoch [78/100], Step [100/138], Loss: 0.0636\n",
      "Epoch [78/100], Step [120/138], Loss: 0.0777\n",
      "Epoch [79/100], Step [20/138], Loss: 0.1122\n",
      "Epoch [79/100], Step [40/138], Loss: 0.0838\n",
      "Epoch [79/100], Step [60/138], Loss: 0.1164\n",
      "Epoch [79/100], Step [80/138], Loss: 0.1140\n",
      "Epoch [79/100], Step [100/138], Loss: 0.0640\n",
      "Epoch [79/100], Step [120/138], Loss: 0.0814\n",
      "Epoch [80/100], Step [20/138], Loss: 0.1071\n",
      "Epoch [80/100], Step [40/138], Loss: 0.0819\n",
      "Epoch [80/100], Step [60/138], Loss: 0.1137\n",
      "Epoch [80/100], Step [80/138], Loss: 0.1120\n",
      "Epoch [80/100], Step [100/138], Loss: 0.0627\n",
      "Epoch [80/100], Step [120/138], Loss: 0.0807\n",
      "Epoch [81/100], Step [20/138], Loss: 0.1063\n",
      "Epoch [81/100], Step [40/138], Loss: 0.0842\n",
      "Epoch [81/100], Step [60/138], Loss: 0.1109\n",
      "Epoch [81/100], Step [80/138], Loss: 0.1114\n",
      "Epoch [81/100], Step [100/138], Loss: 0.0619\n",
      "Epoch [81/100], Step [120/138], Loss: 0.0743\n",
      "Epoch [82/100], Step [20/138], Loss: 0.1054\n",
      "Epoch [82/100], Step [40/138], Loss: 0.0787\n",
      "Epoch [82/100], Step [60/138], Loss: 0.1065\n",
      "Epoch [82/100], Step [80/138], Loss: 0.1081\n",
      "Epoch [82/100], Step [100/138], Loss: 0.0609\n",
      "Epoch [82/100], Step [120/138], Loss: 0.0735\n",
      "Epoch [83/100], Step [20/138], Loss: 0.1048\n",
      "Epoch [83/100], Step [40/138], Loss: 0.0764\n",
      "Epoch [83/100], Step [60/138], Loss: 0.1071\n",
      "Epoch [83/100], Step [80/138], Loss: 0.1058\n",
      "Epoch [83/100], Step [100/138], Loss: 0.0597\n",
      "Epoch [83/100], Step [120/138], Loss: 0.0741\n",
      "Epoch [84/100], Step [20/138], Loss: 0.1043\n",
      "Epoch [84/100], Step [40/138], Loss: 0.0779\n",
      "Epoch [84/100], Step [60/138], Loss: 0.1011\n",
      "Epoch [84/100], Step [80/138], Loss: 0.1041\n",
      "Epoch [84/100], Step [100/138], Loss: 0.0586\n",
      "Epoch [84/100], Step [120/138], Loss: 0.0664\n",
      "Epoch [85/100], Step [20/138], Loss: 0.1032\n",
      "Epoch [85/100], Step [40/138], Loss: 0.0743\n",
      "Epoch [85/100], Step [60/138], Loss: 0.0993\n",
      "Epoch [85/100], Step [80/138], Loss: 0.1027\n",
      "Epoch [85/100], Step [100/138], Loss: 0.0576\n",
      "Epoch [85/100], Step [120/138], Loss: 0.0642\n",
      "Epoch [86/100], Step [20/138], Loss: 0.1024\n",
      "Epoch [86/100], Step [40/138], Loss: 0.0750\n",
      "Epoch [86/100], Step [60/138], Loss: 0.1018\n",
      "Epoch [86/100], Step [80/138], Loss: 0.1013\n",
      "Epoch [86/100], Step [100/138], Loss: 0.0572\n",
      "Epoch [86/100], Step [120/138], Loss: 0.0609\n",
      "Epoch [87/100], Step [20/138], Loss: 0.1015\n",
      "Epoch [87/100], Step [40/138], Loss: 0.0718\n",
      "Epoch [87/100], Step [60/138], Loss: 0.0963\n",
      "Epoch [87/100], Step [80/138], Loss: 0.1000\n",
      "Epoch [87/100], Step [100/138], Loss: 0.0560\n",
      "Epoch [87/100], Step [120/138], Loss: 0.0677\n",
      "Epoch [88/100], Step [20/138], Loss: 0.1008\n",
      "Epoch [88/100], Step [40/138], Loss: 0.0717\n",
      "Epoch [88/100], Step [60/138], Loss: 0.0950\n",
      "Epoch [88/100], Step [80/138], Loss: 0.0986\n",
      "Epoch [88/100], Step [100/138], Loss: 0.0550\n",
      "Epoch [88/100], Step [120/138], Loss: 0.0709\n",
      "Epoch [89/100], Step [20/138], Loss: 0.1001\n",
      "Epoch [89/100], Step [40/138], Loss: 0.0695\n",
      "Epoch [89/100], Step [60/138], Loss: 0.0938\n",
      "Epoch [89/100], Step [80/138], Loss: 0.0973\n",
      "Epoch [89/100], Step [100/138], Loss: 0.0540\n",
      "Epoch [89/100], Step [120/138], Loss: 0.0636\n",
      "Epoch [90/100], Step [20/138], Loss: 0.0996\n",
      "Epoch [90/100], Step [40/138], Loss: 0.0684\n",
      "Epoch [90/100], Step [60/138], Loss: 0.0931\n",
      "Epoch [90/100], Step [80/138], Loss: 0.0963\n",
      "Epoch [90/100], Step [100/138], Loss: 0.0534\n",
      "Epoch [90/100], Step [120/138], Loss: 0.0627\n",
      "Epoch [91/100], Step [20/138], Loss: 0.0992\n",
      "Epoch [91/100], Step [40/138], Loss: 0.0673\n",
      "Epoch [91/100], Step [60/138], Loss: 0.0913\n",
      "Epoch [91/100], Step [80/138], Loss: 0.0950\n",
      "Epoch [91/100], Step [100/138], Loss: 0.0526\n",
      "Epoch [91/100], Step [120/138], Loss: 0.0618\n",
      "Epoch [92/100], Step [20/138], Loss: 0.0988\n",
      "Epoch [92/100], Step [40/138], Loss: 0.0662\n",
      "Epoch [92/100], Step [60/138], Loss: 0.0900\n",
      "Epoch [92/100], Step [80/138], Loss: 0.0939\n",
      "Epoch [92/100], Step [100/138], Loss: 0.0519\n",
      "Epoch [92/100], Step [120/138], Loss: 0.0610\n",
      "Epoch [93/100], Step [20/138], Loss: 0.0984\n",
      "Epoch [93/100], Step [40/138], Loss: 0.0651\n",
      "Epoch [93/100], Step [60/138], Loss: 0.0886\n",
      "Epoch [93/100], Step [80/138], Loss: 0.0927\n",
      "Epoch [93/100], Step [100/138], Loss: 0.0511\n",
      "Epoch [93/100], Step [120/138], Loss: 0.0601\n",
      "Epoch [94/100], Step [20/138], Loss: 0.0980\n",
      "Epoch [94/100], Step [40/138], Loss: 0.0639\n",
      "Epoch [94/100], Step [60/138], Loss: 0.0873\n",
      "Epoch [94/100], Step [80/138], Loss: 0.0916\n",
      "Epoch [94/100], Step [100/138], Loss: 0.0503\n",
      "Epoch [94/100], Step [120/138], Loss: 0.0592\n",
      "Epoch [95/100], Step [20/138], Loss: 0.0977\n",
      "Epoch [95/100], Step [40/138], Loss: 0.0629\n",
      "Epoch [95/100], Step [60/138], Loss: 0.0861\n",
      "Epoch [95/100], Step [80/138], Loss: 0.0906\n",
      "Epoch [95/100], Step [100/138], Loss: 0.0496\n",
      "Epoch [95/100], Step [120/138], Loss: 0.0560\n",
      "Epoch [96/100], Step [20/138], Loss: 0.0974\n",
      "Epoch [96/100], Step [40/138], Loss: 0.0618\n",
      "Epoch [96/100], Step [60/138], Loss: 0.0847\n",
      "Epoch [96/100], Step [80/138], Loss: 0.0896\n",
      "Epoch [96/100], Step [100/138], Loss: 0.0489\n",
      "Epoch [96/100], Step [120/138], Loss: 0.0585\n",
      "Epoch [97/100], Step [20/138], Loss: 0.0968\n",
      "Epoch [97/100], Step [40/138], Loss: 0.0609\n",
      "Epoch [97/100], Step [60/138], Loss: 0.0826\n",
      "Epoch [97/100], Step [80/138], Loss: 0.0886\n",
      "Epoch [97/100], Step [100/138], Loss: 0.0483\n",
      "Epoch [97/100], Step [120/138], Loss: 0.0598\n",
      "Epoch [98/100], Step [20/138], Loss: 0.0964\n",
      "Epoch [98/100], Step [40/138], Loss: 0.0599\n",
      "Epoch [98/100], Step [60/138], Loss: 0.0811\n",
      "Epoch [98/100], Step [80/138], Loss: 0.0876\n",
      "Epoch [98/100], Step [100/138], Loss: 0.0476\n",
      "Epoch [98/100], Step [120/138], Loss: 0.0588\n",
      "Epoch [99/100], Step [20/138], Loss: 0.0959\n",
      "Epoch [99/100], Step [40/138], Loss: 0.0589\n",
      "Epoch [99/100], Step [60/138], Loss: 0.0809\n",
      "Epoch [99/100], Step [80/138], Loss: 0.0866\n",
      "Epoch [99/100], Step [100/138], Loss: 0.0470\n",
      "Epoch [99/100], Step [120/138], Loss: 0.0578\n",
      "Epoch [100/100], Step [20/138], Loss: 0.0954\n",
      "Epoch [100/100], Step [40/138], Loss: 0.0581\n",
      "Epoch [100/100], Step [60/138], Loss: 0.0789\n",
      "Epoch [100/100], Step [80/138], Loss: 0.0857\n",
      "Epoch [100/100], Step [100/138], Loss: 0.0464\n",
      "Epoch [100/100], Step [120/138], Loss: 0.0556\n"
     ]
    }
   ],
   "execution_count": 15
  },
  {
   "cell_type": "code",
   "id": "8a4e36b033918def",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-01T13:09:23.805800Z",
     "start_time": "2025-07-01T13:09:23.795343Z"
    }
   },
   "source": [
    "model.eval()\n",
    "\n",
    "def generate_text(content, steps, temperature=0.8):\n",
    "\n",
    "    words = [word for word in content]\n",
    "\n",
    "    hidden = None\n",
    "    for _ in range(steps):\n",
    "        # inputs = [word_to_idx[word] for word in words[-SEQ_LENGTH:]] # 取输入的最后SEQ_LENGTH个词的索引\n",
    "        inputs = [word_to_idx[word] for word in words[-1:]] # 取输入的最后SEQ_LENGTH个词的索引\n",
    "        inputs = torch.LongTensor(inputs)\n",
    "\n",
    "        # 输入形状调整\n",
    "        inputs = inputs.view(1, -1)  # (1, seq_len)\n",
    "\n",
    "        # 前向传播\n",
    "        with torch.no_grad():\n",
    "            # output中包含了每个时间步的输出，推理预测时，只需要取最后一个时间步的输出即可，比如输入“鹰击”，相当于有两个时间步，但是我们只需要第2个时间步的输出，而输出是词汇表中各个词的概率\n",
    "            # 而hidden表示隐藏层，在推理预测时，因为我们会连续预测，外层有一个for循环，所以hidden需要保存，以便下一次循环使用\n",
    "            outputs, hidden = model(inputs, hidden)\n",
    "            last_output = outputs[0, -1, :]  # 取最后一个时间步的输出\n",
    "\n",
    "        # 应用温度采样\n",
    "        # last_output / temperature，相当于将last_output缩小，比如[8,2,2] / 2 = [4,1,1]，使得三个选项对应的数字之间的差别变小了\n",
    "        # 当然如果temperature<1，那么就是放大差别，比如[8,2,2] / 0.5 = [16,4,4]\n",
    "        # probs为做了softmax之后的概率\n",
    "        probs = torch.softmax(last_output / temperature, dim=-1)\n",
    "\n",
    "        # 多项式采样，probs是一个概率，比如是[0.3,0.2,0.5]，那么就是从0,1,2中随机选一个，那么2被选中的概率就是50%\n",
    "        # 谁的概率大，随被采样的概率就大\n",
    "        result_idx = torch.multinomial(probs, 1).item()\n",
    "\n",
    "        # 更新输入序列\n",
    "        words.append(idx_to_word[result_idx])\n",
    "\n",
    "    return ''.join(words)\n",
    "\n",
    "\n",
    "# 20表示预测20次, temperature越大，越随机\n",
    "print(generate_text(\"鹰\", 20, temperature=0.1))"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "鹰击长空，鱼翔浅底，万类霜天竞自由。\n",
      "怅寥\n"
     ]
    }
   ],
   "execution_count": 16
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.18"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
