{
 "cells": [
  {
   "metadata": {},
   "cell_type": "markdown",
   "source": [
    "独立寒秋，湘江北去，橘子洲头。\n",
    "立寒秋，湘江北去，橘子洲头EOS\n",
    "\n",
    "\n",
    "独立寒秋，湘江北去，橘子洲头。\n",
    "\n",
    "立寒秋，湘江北去，橘子洲头EOS\n",
    "\n",
    "独立寒秋，\n",
    "立寒秋，湘\n",
    "\n",
    "\n",
    "浪遏飞舟？\n",
    "遏飞舟？独"
   ],
   "id": "9c24a48cb62e5121"
  },
  {
   "cell_type": "code",
   "id": "92c9da5866bdcf7",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-04T03:27:01.016616Z",
     "start_time": "2025-07-04T03:27:01.011683Z"
    }
   },
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "from torch.utils.data import Dataset, DataLoader\n",
    "import numpy as np\n",
    "\n",
    "# 示例文本数据，一首诗\n",
    "text = \"\"\"\n",
    "独立寒秋，湘江北去，橘子洲头。\n",
    "看万山红遍，层林尽染；漫江碧透，百舸争流。\n",
    "鹰击长空，鱼翔浅底，万类霜天竞自由。\n",
    "怅寥廓，问苍茫大地，谁主沉浮？\n",
    "携来百侣曾游，忆往昔峥嵘岁月稠。\n",
    "恰同学少年，风华正茂；书生意气，挥斥方遒。\n",
    "指点江山，激扬文字，粪土当年万户侯。\n",
    "曾记否，到中流击水，浪遏飞舟？\n",
    "\"\"\"\n",
    "\n",
    "# 创建词汇表\n",
    "words = set(text)\n",
    "vocab_size = len(words)\n",
    "word_to_idx = {word: i for i, word in enumerate(words)}\n",
    "idx_to_word = {i: word for i, word in enumerate(words)}\n",
    "\n",
    "print(idx_to_word)"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "{0: '学', 1: '浮', 2: '头', 3: '沉', 4: '激', 5: '鱼', 6: '橘', 7: '。', 8: '书', 9: '游', 10: '谁', 11: '华', 12: '文', 13: '翔', 14: '击', 15: '透', 16: '峥', 17: '字', 18: '湘', 19: '浪', 20: '染', 21: '天', 22: '鹰', 23: '携', 24: '主', 25: '寥', 26: '地', 27: '挥', 28: '山', 29: '争', 30: '意', 31: '遏', 32: '气', 33: '土', 34: '浅', 35: '岁', 36: '江', 37: '飞', 38: '万', 39: '问', 40: '竞', 41: '舟', 42: '红', 43: '侯', 44: '\\n', 45: '，', 46: '秋', 47: '长', 48: '遒', 49: '生', 50: '北', 51: '层', 52: '漫', 53: '类', 54: '底', 55: '？', 56: '粪', 57: '看', 58: '月', 59: '林', 60: '户', 61: '百', 62: '寒', 63: '中', 64: '苍', 65: '记', 66: '方', 67: '扬', 68: '否', 69: '恰', 70: '正', 71: '独', 72: '水', 73: '同', 74: '洲', 75: '茫', 76: '去', 77: '遍', 78: '廓', 79: '来', 80: '到', 81: '当', 82: '立', 83: '少', 84: '霜', 85: '尽', 86: '曾', 87: '嵘', 88: '；', 89: '由', 90: '年', 91: '稠', 92: '碧', 93: '自', 94: '风', 95: '斥', 96: '点', 97: '子', 98: '昔', 99: '茂', 100: '忆', 101: '空', 102: '舸', 103: '指', 104: '流', 105: '往', 106: '怅', 107: '侣', 108: '大'}\n"
     ]
    }
   ],
   "execution_count": 11
  },
  {
   "cell_type": "code",
   "id": "91280acf83012c57",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-04T03:27:04.178347Z",
     "start_time": "2025-07-04T03:27:04.171244Z"
    }
   },
   "source": [
    "\n",
    "# 超参数设置\n",
    "SEQ_LENGTH = 5  # 输入序列长度\n",
    "BATCH_SIZE = 1\n",
    "HIDDEN_SIZE = 128\n",
    "INPUT_SIZE = 128\n",
    "\n",
    "\n",
    "# 创建训练数据\n",
    "class TextDataset(Dataset):\n",
    "    def __init__(self, text, seq_length):\n",
    "        self.text = text\n",
    "        self.seq_length = seq_length\n",
    "\n",
    "        # 转换为索引序列\n",
    "        self.data = [word_to_idx[ch] for ch in text]\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.data) - self.seq_length\n",
    "\n",
    "    def __getitem__(self, idx):\n",
    "        # 文本里的某个序列 X\n",
    "        input_seq = self.data[idx:idx + self.seq_length]\n",
    "\n",
    "        # 目标序列 Y\n",
    "        target_seq = self.data[idx + 1:idx + self.seq_length + 1]\n",
    "\n",
    "        # 相当于，假如语料为abcdefg, input_seq=abc, target_seq=bcd\n",
    "\n",
    "        return torch.LongTensor(input_seq), torch.LongTensor(target_seq)\n",
    "\n",
    "\n",
    "dataset = TextDataset(text, SEQ_LENGTH)\n",
    "dataloader = DataLoader(dataset, batch_size=BATCH_SIZE, shuffle=False)\n",
    "\n",
    "for input_seq, target_seq in dataloader:\n",
    "    print(input_seq)\n",
    "    print(target_seq)\n",
    "    break"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[44, 71, 82, 62, 46]])\n",
      "tensor([[71, 82, 62, 46, 45]])\n"
     ]
    }
   ],
   "execution_count": 13
  },
  {
   "cell_type": "code",
   "id": "88cba6d2660e76b2",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-04T03:27:06.867819Z",
     "start_time": "2025-07-04T03:27:06.844768Z"
    }
   },
   "source": [
    "for input_seq, target_seq in dataloader:\n",
    "    print(input_seq)\n",
    "    print(target_seq)"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[44, 71, 82, 62, 46]])\n",
      "tensor([[71, 82, 62, 46, 45]])\n",
      "tensor([[71, 82, 62, 46, 45]])\n",
      "tensor([[82, 62, 46, 45, 18]])\n",
      "tensor([[82, 62, 46, 45, 18]])\n",
      "tensor([[62, 46, 45, 18, 36]])\n",
      "tensor([[62, 46, 45, 18, 36]])\n",
      "tensor([[46, 45, 18, 36, 50]])\n",
      "tensor([[46, 45, 18, 36, 50]])\n",
      "tensor([[45, 18, 36, 50, 76]])\n",
      "tensor([[45, 18, 36, 50, 76]])\n",
      "tensor([[18, 36, 50, 76, 45]])\n",
      "tensor([[18, 36, 50, 76, 45]])\n",
      "tensor([[36, 50, 76, 45,  6]])\n",
      "tensor([[36, 50, 76, 45,  6]])\n",
      "tensor([[50, 76, 45,  6, 97]])\n",
      "tensor([[50, 76, 45,  6, 97]])\n",
      "tensor([[76, 45,  6, 97, 74]])\n",
      "tensor([[76, 45,  6, 97, 74]])\n",
      "tensor([[45,  6, 97, 74,  2]])\n",
      "tensor([[45,  6, 97, 74,  2]])\n",
      "tensor([[ 6, 97, 74,  2,  7]])\n",
      "tensor([[ 6, 97, 74,  2,  7]])\n",
      "tensor([[97, 74,  2,  7, 44]])\n",
      "tensor([[97, 74,  2,  7, 44]])\n",
      "tensor([[74,  2,  7, 44, 57]])\n",
      "tensor([[74,  2,  7, 44, 57]])\n",
      "tensor([[ 2,  7, 44, 57, 38]])\n",
      "tensor([[ 2,  7, 44, 57, 38]])\n",
      "tensor([[ 7, 44, 57, 38, 28]])\n",
      "tensor([[ 7, 44, 57, 38, 28]])\n",
      "tensor([[44, 57, 38, 28, 42]])\n",
      "tensor([[44, 57, 38, 28, 42]])\n",
      "tensor([[57, 38, 28, 42, 77]])\n",
      "tensor([[57, 38, 28, 42, 77]])\n",
      "tensor([[38, 28, 42, 77, 45]])\n",
      "tensor([[38, 28, 42, 77, 45]])\n",
      "tensor([[28, 42, 77, 45, 51]])\n",
      "tensor([[28, 42, 77, 45, 51]])\n",
      "tensor([[42, 77, 45, 51, 59]])\n",
      "tensor([[42, 77, 45, 51, 59]])\n",
      "tensor([[77, 45, 51, 59, 85]])\n",
      "tensor([[77, 45, 51, 59, 85]])\n",
      "tensor([[45, 51, 59, 85, 20]])\n",
      "tensor([[45, 51, 59, 85, 20]])\n",
      "tensor([[51, 59, 85, 20, 88]])\n",
      "tensor([[51, 59, 85, 20, 88]])\n",
      "tensor([[59, 85, 20, 88, 52]])\n",
      "tensor([[59, 85, 20, 88, 52]])\n",
      "tensor([[85, 20, 88, 52, 36]])\n",
      "tensor([[85, 20, 88, 52, 36]])\n",
      "tensor([[20, 88, 52, 36, 92]])\n",
      "tensor([[20, 88, 52, 36, 92]])\n",
      "tensor([[88, 52, 36, 92, 15]])\n",
      "tensor([[88, 52, 36, 92, 15]])\n",
      "tensor([[52, 36, 92, 15, 45]])\n",
      "tensor([[52, 36, 92, 15, 45]])\n",
      "tensor([[36, 92, 15, 45, 61]])\n",
      "tensor([[36, 92, 15, 45, 61]])\n",
      "tensor([[ 92,  15,  45,  61, 102]])\n",
      "tensor([[ 92,  15,  45,  61, 102]])\n",
      "tensor([[ 15,  45,  61, 102,  29]])\n",
      "tensor([[ 15,  45,  61, 102,  29]])\n",
      "tensor([[ 45,  61, 102,  29, 104]])\n",
      "tensor([[ 45,  61, 102,  29, 104]])\n",
      "tensor([[ 61, 102,  29, 104,   7]])\n",
      "tensor([[ 61, 102,  29, 104,   7]])\n",
      "tensor([[102,  29, 104,   7,  44]])\n",
      "tensor([[102,  29, 104,   7,  44]])\n",
      "tensor([[ 29, 104,   7,  44,  22]])\n",
      "tensor([[ 29, 104,   7,  44,  22]])\n",
      "tensor([[104,   7,  44,  22,  14]])\n",
      "tensor([[104,   7,  44,  22,  14]])\n",
      "tensor([[ 7, 44, 22, 14, 47]])\n",
      "tensor([[ 7, 44, 22, 14, 47]])\n",
      "tensor([[ 44,  22,  14,  47, 101]])\n",
      "tensor([[ 44,  22,  14,  47, 101]])\n",
      "tensor([[ 22,  14,  47, 101,  45]])\n",
      "tensor([[ 22,  14,  47, 101,  45]])\n",
      "tensor([[ 14,  47, 101,  45,   5]])\n",
      "tensor([[ 14,  47, 101,  45,   5]])\n",
      "tensor([[ 47, 101,  45,   5,  13]])\n",
      "tensor([[ 47, 101,  45,   5,  13]])\n",
      "tensor([[101,  45,   5,  13,  34]])\n",
      "tensor([[101,  45,   5,  13,  34]])\n",
      "tensor([[45,  5, 13, 34, 54]])\n",
      "tensor([[45,  5, 13, 34, 54]])\n",
      "tensor([[ 5, 13, 34, 54, 45]])\n",
      "tensor([[ 5, 13, 34, 54, 45]])\n",
      "tensor([[13, 34, 54, 45, 38]])\n",
      "tensor([[13, 34, 54, 45, 38]])\n",
      "tensor([[34, 54, 45, 38, 53]])\n",
      "tensor([[34, 54, 45, 38, 53]])\n",
      "tensor([[54, 45, 38, 53, 84]])\n",
      "tensor([[54, 45, 38, 53, 84]])\n",
      "tensor([[45, 38, 53, 84, 21]])\n",
      "tensor([[45, 38, 53, 84, 21]])\n",
      "tensor([[38, 53, 84, 21, 40]])\n",
      "tensor([[38, 53, 84, 21, 40]])\n",
      "tensor([[53, 84, 21, 40, 93]])\n",
      "tensor([[53, 84, 21, 40, 93]])\n",
      "tensor([[84, 21, 40, 93, 89]])\n",
      "tensor([[84, 21, 40, 93, 89]])\n",
      "tensor([[21, 40, 93, 89,  7]])\n",
      "tensor([[21, 40, 93, 89,  7]])\n",
      "tensor([[40, 93, 89,  7, 44]])\n",
      "tensor([[40, 93, 89,  7, 44]])\n",
      "tensor([[ 93,  89,   7,  44, 106]])\n",
      "tensor([[ 93,  89,   7,  44, 106]])\n",
      "tensor([[ 89,   7,  44, 106,  25]])\n",
      "tensor([[ 89,   7,  44, 106,  25]])\n",
      "tensor([[  7,  44, 106,  25,  78]])\n",
      "tensor([[  7,  44, 106,  25,  78]])\n",
      "tensor([[ 44, 106,  25,  78,  45]])\n",
      "tensor([[ 44, 106,  25,  78,  45]])\n",
      "tensor([[106,  25,  78,  45,  39]])\n",
      "tensor([[106,  25,  78,  45,  39]])\n",
      "tensor([[25, 78, 45, 39, 64]])\n",
      "tensor([[25, 78, 45, 39, 64]])\n",
      "tensor([[78, 45, 39, 64, 75]])\n",
      "tensor([[78, 45, 39, 64, 75]])\n",
      "tensor([[ 45,  39,  64,  75, 108]])\n",
      "tensor([[ 45,  39,  64,  75, 108]])\n",
      "tensor([[ 39,  64,  75, 108,  26]])\n",
      "tensor([[ 39,  64,  75, 108,  26]])\n",
      "tensor([[ 64,  75, 108,  26,  45]])\n",
      "tensor([[ 64,  75, 108,  26,  45]])\n",
      "tensor([[ 75, 108,  26,  45,  10]])\n",
      "tensor([[ 75, 108,  26,  45,  10]])\n",
      "tensor([[108,  26,  45,  10,  24]])\n",
      "tensor([[108,  26,  45,  10,  24]])\n",
      "tensor([[26, 45, 10, 24,  3]])\n",
      "tensor([[26, 45, 10, 24,  3]])\n",
      "tensor([[45, 10, 24,  3,  1]])\n",
      "tensor([[45, 10, 24,  3,  1]])\n",
      "tensor([[10, 24,  3,  1, 55]])\n",
      "tensor([[10, 24,  3,  1, 55]])\n",
      "tensor([[24,  3,  1, 55, 44]])\n",
      "tensor([[24,  3,  1, 55, 44]])\n",
      "tensor([[ 3,  1, 55, 44, 23]])\n",
      "tensor([[ 3,  1, 55, 44, 23]])\n",
      "tensor([[ 1, 55, 44, 23, 79]])\n",
      "tensor([[ 1, 55, 44, 23, 79]])\n",
      "tensor([[55, 44, 23, 79, 61]])\n",
      "tensor([[55, 44, 23, 79, 61]])\n",
      "tensor([[ 44,  23,  79,  61, 107]])\n",
      "tensor([[ 44,  23,  79,  61, 107]])\n",
      "tensor([[ 23,  79,  61, 107,  86]])\n",
      "tensor([[ 23,  79,  61, 107,  86]])\n",
      "tensor([[ 79,  61, 107,  86,   9]])\n",
      "tensor([[ 79,  61, 107,  86,   9]])\n",
      "tensor([[ 61, 107,  86,   9,  45]])\n",
      "tensor([[ 61, 107,  86,   9,  45]])\n",
      "tensor([[107,  86,   9,  45, 100]])\n",
      "tensor([[107,  86,   9,  45, 100]])\n",
      "tensor([[ 86,   9,  45, 100, 105]])\n",
      "tensor([[ 86,   9,  45, 100, 105]])\n",
      "tensor([[  9,  45, 100, 105,  98]])\n",
      "tensor([[  9,  45, 100, 105,  98]])\n",
      "tensor([[ 45, 100, 105,  98,  16]])\n",
      "tensor([[ 45, 100, 105,  98,  16]])\n",
      "tensor([[100, 105,  98,  16,  87]])\n",
      "tensor([[100, 105,  98,  16,  87]])\n",
      "tensor([[105,  98,  16,  87,  35]])\n",
      "tensor([[105,  98,  16,  87,  35]])\n",
      "tensor([[98, 16, 87, 35, 58]])\n",
      "tensor([[98, 16, 87, 35, 58]])\n",
      "tensor([[16, 87, 35, 58, 91]])\n",
      "tensor([[16, 87, 35, 58, 91]])\n",
      "tensor([[87, 35, 58, 91,  7]])\n",
      "tensor([[87, 35, 58, 91,  7]])\n",
      "tensor([[35, 58, 91,  7, 44]])\n",
      "tensor([[35, 58, 91,  7, 44]])\n",
      "tensor([[58, 91,  7, 44, 69]])\n",
      "tensor([[58, 91,  7, 44, 69]])\n",
      "tensor([[91,  7, 44, 69, 73]])\n",
      "tensor([[91,  7, 44, 69, 73]])\n",
      "tensor([[ 7, 44, 69, 73,  0]])\n",
      "tensor([[ 7, 44, 69, 73,  0]])\n",
      "tensor([[44, 69, 73,  0, 83]])\n",
      "tensor([[44, 69, 73,  0, 83]])\n",
      "tensor([[69, 73,  0, 83, 90]])\n",
      "tensor([[69, 73,  0, 83, 90]])\n",
      "tensor([[73,  0, 83, 90, 45]])\n",
      "tensor([[73,  0, 83, 90, 45]])\n",
      "tensor([[ 0, 83, 90, 45, 94]])\n",
      "tensor([[ 0, 83, 90, 45, 94]])\n",
      "tensor([[83, 90, 45, 94, 11]])\n",
      "tensor([[83, 90, 45, 94, 11]])\n",
      "tensor([[90, 45, 94, 11, 70]])\n",
      "tensor([[90, 45, 94, 11, 70]])\n",
      "tensor([[45, 94, 11, 70, 99]])\n",
      "tensor([[45, 94, 11, 70, 99]])\n",
      "tensor([[94, 11, 70, 99, 88]])\n",
      "tensor([[94, 11, 70, 99, 88]])\n",
      "tensor([[11, 70, 99, 88,  8]])\n",
      "tensor([[11, 70, 99, 88,  8]])\n",
      "tensor([[70, 99, 88,  8, 49]])\n",
      "tensor([[70, 99, 88,  8, 49]])\n",
      "tensor([[99, 88,  8, 49, 30]])\n",
      "tensor([[99, 88,  8, 49, 30]])\n",
      "tensor([[88,  8, 49, 30, 32]])\n",
      "tensor([[88,  8, 49, 30, 32]])\n",
      "tensor([[ 8, 49, 30, 32, 45]])\n",
      "tensor([[ 8, 49, 30, 32, 45]])\n",
      "tensor([[49, 30, 32, 45, 27]])\n",
      "tensor([[49, 30, 32, 45, 27]])\n",
      "tensor([[30, 32, 45, 27, 95]])\n",
      "tensor([[30, 32, 45, 27, 95]])\n",
      "tensor([[32, 45, 27, 95, 66]])\n",
      "tensor([[32, 45, 27, 95, 66]])\n",
      "tensor([[45, 27, 95, 66, 48]])\n",
      "tensor([[45, 27, 95, 66, 48]])\n",
      "tensor([[27, 95, 66, 48,  7]])\n",
      "tensor([[27, 95, 66, 48,  7]])\n",
      "tensor([[95, 66, 48,  7, 44]])\n",
      "tensor([[95, 66, 48,  7, 44]])\n",
      "tensor([[ 66,  48,   7,  44, 103]])\n",
      "tensor([[ 66,  48,   7,  44, 103]])\n",
      "tensor([[ 48,   7,  44, 103,  96]])\n",
      "tensor([[ 48,   7,  44, 103,  96]])\n",
      "tensor([[  7,  44, 103,  96,  36]])\n",
      "tensor([[  7,  44, 103,  96,  36]])\n",
      "tensor([[ 44, 103,  96,  36,  28]])\n",
      "tensor([[ 44, 103,  96,  36,  28]])\n",
      "tensor([[103,  96,  36,  28,  45]])\n",
      "tensor([[103,  96,  36,  28,  45]])\n",
      "tensor([[96, 36, 28, 45,  4]])\n",
      "tensor([[96, 36, 28, 45,  4]])\n",
      "tensor([[36, 28, 45,  4, 67]])\n",
      "tensor([[36, 28, 45,  4, 67]])\n",
      "tensor([[28, 45,  4, 67, 12]])\n",
      "tensor([[28, 45,  4, 67, 12]])\n",
      "tensor([[45,  4, 67, 12, 17]])\n",
      "tensor([[45,  4, 67, 12, 17]])\n",
      "tensor([[ 4, 67, 12, 17, 45]])\n",
      "tensor([[ 4, 67, 12, 17, 45]])\n",
      "tensor([[67, 12, 17, 45, 56]])\n",
      "tensor([[67, 12, 17, 45, 56]])\n",
      "tensor([[12, 17, 45, 56, 33]])\n",
      "tensor([[12, 17, 45, 56, 33]])\n",
      "tensor([[17, 45, 56, 33, 81]])\n",
      "tensor([[17, 45, 56, 33, 81]])\n",
      "tensor([[45, 56, 33, 81, 90]])\n",
      "tensor([[45, 56, 33, 81, 90]])\n",
      "tensor([[56, 33, 81, 90, 38]])\n",
      "tensor([[56, 33, 81, 90, 38]])\n",
      "tensor([[33, 81, 90, 38, 60]])\n",
      "tensor([[33, 81, 90, 38, 60]])\n",
      "tensor([[81, 90, 38, 60, 43]])\n",
      "tensor([[81, 90, 38, 60, 43]])\n",
      "tensor([[90, 38, 60, 43,  7]])\n",
      "tensor([[90, 38, 60, 43,  7]])\n",
      "tensor([[38, 60, 43,  7, 44]])\n",
      "tensor([[38, 60, 43,  7, 44]])\n",
      "tensor([[60, 43,  7, 44, 86]])\n",
      "tensor([[60, 43,  7, 44, 86]])\n",
      "tensor([[43,  7, 44, 86, 65]])\n",
      "tensor([[43,  7, 44, 86, 65]])\n",
      "tensor([[ 7, 44, 86, 65, 68]])\n",
      "tensor([[ 7, 44, 86, 65, 68]])\n",
      "tensor([[44, 86, 65, 68, 45]])\n",
      "tensor([[44, 86, 65, 68, 45]])\n",
      "tensor([[86, 65, 68, 45, 80]])\n",
      "tensor([[86, 65, 68, 45, 80]])\n",
      "tensor([[65, 68, 45, 80, 63]])\n",
      "tensor([[65, 68, 45, 80, 63]])\n",
      "tensor([[ 68,  45,  80,  63, 104]])\n",
      "tensor([[ 68,  45,  80,  63, 104]])\n",
      "tensor([[ 45,  80,  63, 104,  14]])\n",
      "tensor([[ 45,  80,  63, 104,  14]])\n",
      "tensor([[ 80,  63, 104,  14,  72]])\n",
      "tensor([[ 80,  63, 104,  14,  72]])\n",
      "tensor([[ 63, 104,  14,  72,  45]])\n",
      "tensor([[ 63, 104,  14,  72,  45]])\n",
      "tensor([[104,  14,  72,  45,  19]])\n",
      "tensor([[104,  14,  72,  45,  19]])\n",
      "tensor([[14, 72, 45, 19, 31]])\n",
      "tensor([[14, 72, 45, 19, 31]])\n",
      "tensor([[72, 45, 19, 31, 37]])\n",
      "tensor([[72, 45, 19, 31, 37]])\n",
      "tensor([[45, 19, 31, 37, 41]])\n",
      "tensor([[45, 19, 31, 37, 41]])\n",
      "tensor([[19, 31, 37, 41, 55]])\n",
      "tensor([[19, 31, 37, 41, 55]])\n",
      "tensor([[31, 37, 41, 55, 44]])\n"
     ]
    }
   ],
   "execution_count": 14
  },
  {
   "cell_type": "code",
   "id": "d9e9fd013d3d01ba",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-04T03:27:33.820279Z",
     "start_time": "2025-07-04T03:27:33.812052Z"
    }
   },
   "source": [
    "# 大都督周瑜（我的微信: it_zhouyu）\n",
    "class ZhouyuModel(nn.Module):\n",
    "    def __init__(self, vocab_size, input_size, hidden_size):\n",
    "        super().__init__()\n",
    "\n",
    "        self.hidden_size = hidden_size\n",
    "\n",
    "        # 嵌入层，输入词索引，输出词向量\n",
    "        self.embedding = nn.Embedding(vocab_size, input_size)\n",
    "\n",
    "        # RNN参数\n",
    "        self.W_xh = nn.Parameter(torch.randn(input_size, hidden_size))\n",
    "        self.W_hh = nn.Parameter(torch.randn(hidden_size, hidden_size))\n",
    "        self.b_h = nn.Parameter(torch.zeros(hidden_size))\n",
    "\n",
    "        # 输出层\n",
    "        self.out_linear = nn.Linear(hidden_size, vocab_size)\n",
    "\n",
    "    def forward(self, x, hidden=None):\n",
    "\n",
    "        embedded = self.embedding(x)\n",
    "\n",
    "        batch_size, seq_len, input_size = embedded.shape\n",
    "        embedded = torch.transpose(embedded, 0, 1)\n",
    "\n",
    "        # 初始化隐藏状态，每个seq都创建一个初始隐藏状态\n",
    "        if hidden is None:\n",
    "            hidden = torch.zeros(batch_size, self.hidden_size)\n",
    "\n",
    "        outputs = []\n",
    "        for t in range(seq_len):\n",
    "            # 取第t个时间步对应字的向量\n",
    "            x_t = embedded[t]  # (batch_size, hidden_size)\n",
    "\n",
    "            hidden = torch.tanh(\n",
    "                torch.mm(x_t, self.W_xh) +\n",
    "                torch.mm(hidden, self.W_hh) +\n",
    "                self.b_h\n",
    "            )\n",
    "\n",
    "            # 隐藏状态输入到线性输出层，得到t时刻的输出\n",
    "            # 这里的隐藏状态，相当于记忆了前t-1个字的信息，然后结合t时刻的输入x，要预测t时刻对应的y\n",
    "            outputs.append(self.out_linear(hidden))\n",
    "\n",
    "        # outputs保存了所有时间步的输出，输入的是一个序列，每个时间步的输出就组合成了输出序列，然后再和目标序列进行误差计算\n",
    "        outputs = torch.stack(outputs, dim=1)  # (batch_size, seq_length, vocab_size)\n",
    "\n",
    "        return outputs, hidden\n",
    "\n",
    "\n",
    "# 初始化模型\n",
    "model = ZhouyuModel(vocab_size, INPUT_SIZE, HIDDEN_SIZE)\n",
    "criterion = nn.CrossEntropyLoss()\n",
    "optimizer = torch.optim.SGD(model.parameters(), lr=0.005)"
   ],
   "outputs": [],
   "execution_count": 17
  },
  {
   "cell_type": "code",
   "id": "2348428ce74982e4",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-04T03:27:45.394287Z",
     "start_time": "2025-07-04T03:27:36.466429Z"
    }
   },
   "source": [
    "for epoch in range(100):\n",
    "    for i, (inputs, targets) in enumerate(dataloader):\n",
    "        # 前向传播\n",
    "        outputs, _ = model(inputs)\n",
    "\n",
    "        # 计算损失\n",
    "        # 用每个时间步的输出和每个时间步的标签进行比较，并平均损失\n",
    "        loss = criterion(\n",
    "            outputs.view(-1, vocab_size),  # (batch_size*seq_length, vocab_size)\n",
    "            targets.view(-1)  # (batch_size*seq_length)\n",
    "        )\n",
    "\n",
    "        optimizer.zero_grad()\n",
    "        loss.backward()\n",
    "        # 梯度裁剪防止爆炸\n",
    "        # nn.utils.clip_grad_norm_(model.parameters(), max_norm=1.0)\n",
    "        optimizer.step()\n",
    "\n",
    "        if (i + 1) % 20 == 0:\n",
    "            print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'\n",
    "                  .format(epoch + 1, 100, i + 1, len(dataloader), loss.item()))"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch [1/100], Step [20/143], Loss: 4.8509\n",
      "Epoch [1/100], Step [40/143], Loss: 4.8693\n",
      "Epoch [1/100], Step [60/143], Loss: 5.0201\n",
      "Epoch [1/100], Step [80/143], Loss: 5.0608\n",
      "Epoch [1/100], Step [100/143], Loss: 4.4665\n",
      "Epoch [1/100], Step [120/143], Loss: 4.8779\n",
      "Epoch [1/100], Step [140/143], Loss: 5.1905\n",
      "Epoch [2/100], Step [20/143], Loss: 4.3804\n",
      "Epoch [2/100], Step [40/143], Loss: 4.3945\n",
      "Epoch [2/100], Step [60/143], Loss: 4.5492\n",
      "Epoch [2/100], Step [80/143], Loss: 4.6675\n",
      "Epoch [2/100], Step [100/143], Loss: 4.0177\n",
      "Epoch [2/100], Step [120/143], Loss: 4.4023\n",
      "Epoch [2/100], Step [140/143], Loss: 4.7387\n",
      "Epoch [3/100], Step [20/143], Loss: 3.9655\n",
      "Epoch [3/100], Step [40/143], Loss: 3.9909\n",
      "Epoch [3/100], Step [60/143], Loss: 4.0249\n",
      "Epoch [3/100], Step [80/143], Loss: 4.3146\n",
      "Epoch [3/100], Step [100/143], Loss: 3.5619\n",
      "Epoch [3/100], Step [120/143], Loss: 3.9495\n",
      "Epoch [3/100], Step [140/143], Loss: 4.3903\n",
      "Epoch [4/100], Step [20/143], Loss: 3.4847\n",
      "Epoch [4/100], Step [40/143], Loss: 3.7218\n",
      "Epoch [4/100], Step [60/143], Loss: 3.7324\n",
      "Epoch [4/100], Step [80/143], Loss: 3.9718\n",
      "Epoch [4/100], Step [100/143], Loss: 3.1615\n",
      "Epoch [4/100], Step [120/143], Loss: 3.5464\n",
      "Epoch [4/100], Step [140/143], Loss: 4.0458\n",
      "Epoch [5/100], Step [20/143], Loss: 3.1239\n",
      "Epoch [5/100], Step [40/143], Loss: 3.6027\n",
      "Epoch [5/100], Step [60/143], Loss: 3.3216\n",
      "Epoch [5/100], Step [80/143], Loss: 3.6527\n",
      "Epoch [5/100], Step [100/143], Loss: 2.8032\n",
      "Epoch [5/100], Step [120/143], Loss: 3.1480\n",
      "Epoch [5/100], Step [140/143], Loss: 3.7202\n",
      "Epoch [6/100], Step [20/143], Loss: 2.8757\n",
      "Epoch [6/100], Step [40/143], Loss: 3.2203\n",
      "Epoch [6/100], Step [60/143], Loss: 3.0197\n",
      "Epoch [6/100], Step [80/143], Loss: 3.3516\n",
      "Epoch [6/100], Step [100/143], Loss: 2.4480\n",
      "Epoch [6/100], Step [120/143], Loss: 2.7975\n",
      "Epoch [6/100], Step [140/143], Loss: 3.3939\n",
      "Epoch [7/100], Step [20/143], Loss: 2.4054\n",
      "Epoch [7/100], Step [40/143], Loss: 2.9081\n",
      "Epoch [7/100], Step [60/143], Loss: 2.5746\n",
      "Epoch [7/100], Step [80/143], Loss: 3.0766\n",
      "Epoch [7/100], Step [100/143], Loss: 2.1266\n",
      "Epoch [7/100], Step [120/143], Loss: 2.5284\n",
      "Epoch [7/100], Step [140/143], Loss: 3.0782\n",
      "Epoch [8/100], Step [20/143], Loss: 2.1370\n",
      "Epoch [8/100], Step [40/143], Loss: 2.5532\n",
      "Epoch [8/100], Step [60/143], Loss: 2.2564\n",
      "Epoch [8/100], Step [80/143], Loss: 2.8022\n",
      "Epoch [8/100], Step [100/143], Loss: 1.8435\n",
      "Epoch [8/100], Step [120/143], Loss: 2.1850\n",
      "Epoch [8/100], Step [140/143], Loss: 2.7763\n",
      "Epoch [9/100], Step [20/143], Loss: 1.8960\n",
      "Epoch [9/100], Step [40/143], Loss: 2.3483\n",
      "Epoch [9/100], Step [60/143], Loss: 2.2236\n",
      "Epoch [9/100], Step [80/143], Loss: 2.5607\n",
      "Epoch [9/100], Step [100/143], Loss: 1.5677\n",
      "Epoch [9/100], Step [120/143], Loss: 1.8762\n",
      "Epoch [9/100], Step [140/143], Loss: 2.5313\n",
      "Epoch [10/100], Step [20/143], Loss: 1.6710\n",
      "Epoch [10/100], Step [40/143], Loss: 2.0975\n",
      "Epoch [10/100], Step [60/143], Loss: 1.9645\n",
      "Epoch [10/100], Step [80/143], Loss: 2.3284\n",
      "Epoch [10/100], Step [100/143], Loss: 1.3566\n",
      "Epoch [10/100], Step [120/143], Loss: 1.6386\n",
      "Epoch [10/100], Step [140/143], Loss: 2.3116\n",
      "Epoch [11/100], Step [20/143], Loss: 1.4763\n",
      "Epoch [11/100], Step [40/143], Loss: 1.8173\n",
      "Epoch [11/100], Step [60/143], Loss: 1.6701\n",
      "Epoch [11/100], Step [80/143], Loss: 2.1142\n",
      "Epoch [11/100], Step [100/143], Loss: 1.1547\n",
      "Epoch [11/100], Step [120/143], Loss: 1.4240\n",
      "Epoch [11/100], Step [140/143], Loss: 2.0593\n",
      "Epoch [12/100], Step [20/143], Loss: 1.3134\n",
      "Epoch [12/100], Step [40/143], Loss: 1.6108\n",
      "Epoch [12/100], Step [60/143], Loss: 1.4386\n",
      "Epoch [12/100], Step [80/143], Loss: 1.9305\n",
      "Epoch [12/100], Step [100/143], Loss: 1.0009\n",
      "Epoch [12/100], Step [120/143], Loss: 1.2250\n",
      "Epoch [12/100], Step [140/143], Loss: 1.8580\n",
      "Epoch [13/100], Step [20/143], Loss: 1.1622\n",
      "Epoch [13/100], Step [40/143], Loss: 1.4227\n",
      "Epoch [13/100], Step [60/143], Loss: 1.2219\n",
      "Epoch [13/100], Step [80/143], Loss: 1.7634\n",
      "Epoch [13/100], Step [100/143], Loss: 0.8540\n",
      "Epoch [13/100], Step [120/143], Loss: 1.0779\n",
      "Epoch [13/100], Step [140/143], Loss: 1.6783\n",
      "Epoch [14/100], Step [20/143], Loss: 1.0397\n",
      "Epoch [14/100], Step [40/143], Loss: 1.2904\n",
      "Epoch [14/100], Step [60/143], Loss: 1.0738\n",
      "Epoch [14/100], Step [80/143], Loss: 1.6262\n",
      "Epoch [14/100], Step [100/143], Loss: 0.7429\n",
      "Epoch [14/100], Step [120/143], Loss: 0.9556\n",
      "Epoch [14/100], Step [140/143], Loss: 1.5247\n",
      "Epoch [15/100], Step [20/143], Loss: 0.9395\n",
      "Epoch [15/100], Step [40/143], Loss: 1.2230\n",
      "Epoch [15/100], Step [60/143], Loss: 1.1649\n",
      "Epoch [15/100], Step [80/143], Loss: 1.4920\n",
      "Epoch [15/100], Step [100/143], Loss: 0.6593\n",
      "Epoch [15/100], Step [120/143], Loss: 0.8526\n",
      "Epoch [15/100], Step [140/143], Loss: 1.4101\n",
      "Epoch [16/100], Step [20/143], Loss: 0.8500\n",
      "Epoch [16/100], Step [40/143], Loss: 1.0118\n",
      "Epoch [16/100], Step [60/143], Loss: 0.9874\n",
      "Epoch [16/100], Step [80/143], Loss: 1.3626\n",
      "Epoch [16/100], Step [100/143], Loss: 0.5801\n",
      "Epoch [16/100], Step [120/143], Loss: 0.7693\n",
      "Epoch [16/100], Step [140/143], Loss: 1.2827\n",
      "Epoch [17/100], Step [20/143], Loss: 0.7727\n",
      "Epoch [17/100], Step [40/143], Loss: 0.9529\n",
      "Epoch [17/100], Step [60/143], Loss: 0.7532\n",
      "Epoch [17/100], Step [80/143], Loss: 1.2512\n",
      "Epoch [17/100], Step [100/143], Loss: 0.5104\n",
      "Epoch [17/100], Step [120/143], Loss: 0.6893\n",
      "Epoch [17/100], Step [140/143], Loss: 1.1636\n",
      "Epoch [18/100], Step [20/143], Loss: 0.7039\n",
      "Epoch [18/100], Step [40/143], Loss: 0.7677\n",
      "Epoch [18/100], Step [60/143], Loss: 0.7135\n",
      "Epoch [18/100], Step [80/143], Loss: 1.1550\n",
      "Epoch [18/100], Step [100/143], Loss: 0.4551\n",
      "Epoch [18/100], Step [120/143], Loss: 0.6242\n",
      "Epoch [18/100], Step [140/143], Loss: 1.0471\n",
      "Epoch [19/100], Step [20/143], Loss: 0.6367\n",
      "Epoch [19/100], Step [40/143], Loss: 0.6777\n",
      "Epoch [19/100], Step [60/143], Loss: 0.6252\n",
      "Epoch [19/100], Step [80/143], Loss: 1.0637\n",
      "Epoch [19/100], Step [100/143], Loss: 0.4119\n",
      "Epoch [19/100], Step [120/143], Loss: 0.5692\n",
      "Epoch [19/100], Step [140/143], Loss: 0.9673\n",
      "Epoch [20/100], Step [20/143], Loss: 0.5838\n",
      "Epoch [20/100], Step [40/143], Loss: 0.6002\n",
      "Epoch [20/100], Step [60/143], Loss: 0.5624\n",
      "Epoch [20/100], Step [80/143], Loss: 0.9834\n",
      "Epoch [20/100], Step [100/143], Loss: 0.3747\n",
      "Epoch [20/100], Step [120/143], Loss: 0.5207\n",
      "Epoch [20/100], Step [140/143], Loss: 0.8984\n",
      "Epoch [21/100], Step [20/143], Loss: 0.5427\n",
      "Epoch [21/100], Step [40/143], Loss: 0.5433\n",
      "Epoch [21/100], Step [60/143], Loss: 0.4929\n",
      "Epoch [21/100], Step [80/143], Loss: 0.9148\n",
      "Epoch [21/100], Step [100/143], Loss: 0.3424\n",
      "Epoch [21/100], Step [120/143], Loss: 0.4762\n",
      "Epoch [21/100], Step [140/143], Loss: 0.8323\n",
      "Epoch [22/100], Step [20/143], Loss: 0.5075\n",
      "Epoch [22/100], Step [40/143], Loss: 0.4908\n",
      "Epoch [22/100], Step [60/143], Loss: 0.4435\n",
      "Epoch [22/100], Step [80/143], Loss: 0.8520\n",
      "Epoch [22/100], Step [100/143], Loss: 0.3150\n",
      "Epoch [22/100], Step [120/143], Loss: 0.4367\n",
      "Epoch [22/100], Step [140/143], Loss: 0.7787\n",
      "Epoch [23/100], Step [20/143], Loss: 0.4764\n",
      "Epoch [23/100], Step [40/143], Loss: 0.4501\n",
      "Epoch [23/100], Step [60/143], Loss: 0.3999\n",
      "Epoch [23/100], Step [80/143], Loss: 0.7944\n",
      "Epoch [23/100], Step [100/143], Loss: 0.2910\n",
      "Epoch [23/100], Step [120/143], Loss: 0.4017\n",
      "Epoch [23/100], Step [140/143], Loss: 0.7319\n",
      "Epoch [24/100], Step [20/143], Loss: 0.4441\n",
      "Epoch [24/100], Step [40/143], Loss: 0.4161\n",
      "Epoch [24/100], Step [60/143], Loss: 0.3680\n",
      "Epoch [24/100], Step [80/143], Loss: 0.7438\n",
      "Epoch [24/100], Step [100/143], Loss: 0.2700\n",
      "Epoch [24/100], Step [120/143], Loss: 0.3705\n",
      "Epoch [24/100], Step [140/143], Loss: 0.6905\n",
      "Epoch [25/100], Step [20/143], Loss: 0.4218\n",
      "Epoch [25/100], Step [40/143], Loss: 0.3869\n",
      "Epoch [25/100], Step [60/143], Loss: 0.3421\n",
      "Epoch [25/100], Step [80/143], Loss: 0.6991\n",
      "Epoch [25/100], Step [100/143], Loss: 0.2524\n",
      "Epoch [25/100], Step [120/143], Loss: 0.3435\n",
      "Epoch [25/100], Step [140/143], Loss: 0.6528\n",
      "Epoch [26/100], Step [20/143], Loss: 0.4014\n",
      "Epoch [26/100], Step [40/143], Loss: 0.3605\n",
      "Epoch [26/100], Step [60/143], Loss: 0.3187\n",
      "Epoch [26/100], Step [80/143], Loss: 0.6576\n",
      "Epoch [26/100], Step [100/143], Loss: 0.2355\n",
      "Epoch [26/100], Step [120/143], Loss: 0.3195\n",
      "Epoch [26/100], Step [140/143], Loss: 0.6190\n",
      "Epoch [27/100], Step [20/143], Loss: 0.3830\n",
      "Epoch [27/100], Step [40/143], Loss: 0.3374\n",
      "Epoch [27/100], Step [60/143], Loss: 0.2979\n",
      "Epoch [27/100], Step [80/143], Loss: 0.6169\n",
      "Epoch [27/100], Step [100/143], Loss: 0.2210\n",
      "Epoch [27/100], Step [120/143], Loss: 0.2985\n",
      "Epoch [27/100], Step [140/143], Loss: 0.5884\n",
      "Epoch [28/100], Step [20/143], Loss: 0.3667\n",
      "Epoch [28/100], Step [40/143], Loss: 0.3168\n",
      "Epoch [28/100], Step [60/143], Loss: 0.2794\n",
      "Epoch [28/100], Step [80/143], Loss: 0.5831\n",
      "Epoch [28/100], Step [100/143], Loss: 0.2076\n",
      "Epoch [28/100], Step [120/143], Loss: 0.2801\n",
      "Epoch [28/100], Step [140/143], Loss: 0.5615\n",
      "Epoch [29/100], Step [20/143], Loss: 0.3531\n",
      "Epoch [29/100], Step [40/143], Loss: 0.2985\n",
      "Epoch [29/100], Step [60/143], Loss: 0.2627\n",
      "Epoch [29/100], Step [80/143], Loss: 0.5527\n",
      "Epoch [29/100], Step [100/143], Loss: 0.1962\n",
      "Epoch [29/100], Step [120/143], Loss: 0.2633\n",
      "Epoch [29/100], Step [140/143], Loss: 0.5367\n",
      "Epoch [30/100], Step [20/143], Loss: 0.3409\n",
      "Epoch [30/100], Step [40/143], Loss: 0.2819\n",
      "Epoch [30/100], Step [60/143], Loss: 0.2480\n",
      "Epoch [30/100], Step [80/143], Loss: 0.5232\n",
      "Epoch [30/100], Step [100/143], Loss: 0.1859\n",
      "Epoch [30/100], Step [120/143], Loss: 0.2485\n",
      "Epoch [30/100], Step [140/143], Loss: 0.5140\n",
      "Epoch [31/100], Step [20/143], Loss: 0.3305\n",
      "Epoch [31/100], Step [40/143], Loss: 0.2670\n",
      "Epoch [31/100], Step [60/143], Loss: 0.2344\n",
      "Epoch [31/100], Step [80/143], Loss: 0.4967\n",
      "Epoch [31/100], Step [100/143], Loss: 0.1766\n",
      "Epoch [31/100], Step [120/143], Loss: 0.2345\n",
      "Epoch [31/100], Step [140/143], Loss: 0.4929\n",
      "Epoch [32/100], Step [20/143], Loss: 0.3206\n",
      "Epoch [32/100], Step [40/143], Loss: 0.2534\n",
      "Epoch [32/100], Step [60/143], Loss: 0.2222\n",
      "Epoch [32/100], Step [80/143], Loss: 0.4711\n",
      "Epoch [32/100], Step [100/143], Loss: 0.1679\n",
      "Epoch [32/100], Step [120/143], Loss: 0.2224\n",
      "Epoch [32/100], Step [140/143], Loss: 0.4730\n",
      "Epoch [33/100], Step [20/143], Loss: 0.3116\n",
      "Epoch [33/100], Step [40/143], Loss: 0.2409\n",
      "Epoch [33/100], Step [60/143], Loss: 0.2111\n",
      "Epoch [33/100], Step [80/143], Loss: 0.4482\n",
      "Epoch [33/100], Step [100/143], Loss: 0.1602\n",
      "Epoch [33/100], Step [120/143], Loss: 0.2110\n",
      "Epoch [33/100], Step [140/143], Loss: 0.4544\n",
      "Epoch [34/100], Step [20/143], Loss: 0.3036\n",
      "Epoch [34/100], Step [40/143], Loss: 0.2295\n",
      "Epoch [34/100], Step [60/143], Loss: 0.2010\n",
      "Epoch [34/100], Step [80/143], Loss: 0.4262\n",
      "Epoch [34/100], Step [100/143], Loss: 0.1530\n",
      "Epoch [34/100], Step [120/143], Loss: 0.2009\n",
      "Epoch [34/100], Step [140/143], Loss: 0.4377\n",
      "Epoch [35/100], Step [20/143], Loss: 0.2958\n",
      "Epoch [35/100], Step [40/143], Loss: 0.2190\n",
      "Epoch [35/100], Step [60/143], Loss: 0.1917\n",
      "Epoch [35/100], Step [80/143], Loss: 0.4063\n",
      "Epoch [35/100], Step [100/143], Loss: 0.1464\n",
      "Epoch [35/100], Step [120/143], Loss: 0.1915\n",
      "Epoch [35/100], Step [140/143], Loss: 0.4232\n",
      "Epoch [36/100], Step [20/143], Loss: 0.2894\n",
      "Epoch [36/100], Step [40/143], Loss: 0.2093\n",
      "Epoch [36/100], Step [60/143], Loss: 0.1832\n",
      "Epoch [36/100], Step [80/143], Loss: 0.3870\n",
      "Epoch [36/100], Step [100/143], Loss: 0.1403\n",
      "Epoch [36/100], Step [120/143], Loss: 0.1831\n",
      "Epoch [36/100], Step [140/143], Loss: 0.4098\n",
      "Epoch [37/100], Step [20/143], Loss: 0.2838\n",
      "Epoch [37/100], Step [40/143], Loss: 0.2003\n",
      "Epoch [37/100], Step [60/143], Loss: 0.1755\n",
      "Epoch [37/100], Step [80/143], Loss: 0.3697\n",
      "Epoch [37/100], Step [100/143], Loss: 0.1347\n",
      "Epoch [37/100], Step [120/143], Loss: 0.1751\n",
      "Epoch [37/100], Step [140/143], Loss: 0.3975\n",
      "Epoch [38/100], Step [20/143], Loss: 0.2783\n",
      "Epoch [38/100], Step [40/143], Loss: 0.1921\n",
      "Epoch [38/100], Step [60/143], Loss: 0.1683\n",
      "Epoch [38/100], Step [80/143], Loss: 0.3539\n",
      "Epoch [38/100], Step [100/143], Loss: 0.1295\n",
      "Epoch [38/100], Step [120/143], Loss: 0.1679\n",
      "Epoch [38/100], Step [140/143], Loss: 0.3864\n",
      "Epoch [39/100], Step [20/143], Loss: 0.2737\n",
      "Epoch [39/100], Step [40/143], Loss: 0.1844\n",
      "Epoch [39/100], Step [60/143], Loss: 0.1616\n",
      "Epoch [39/100], Step [80/143], Loss: 0.3384\n",
      "Epoch [39/100], Step [100/143], Loss: 0.1247\n",
      "Epoch [39/100], Step [120/143], Loss: 0.1611\n",
      "Epoch [39/100], Step [140/143], Loss: 0.3758\n",
      "Epoch [40/100], Step [20/143], Loss: 0.2689\n",
      "Epoch [40/100], Step [40/143], Loss: 0.1774\n",
      "Epoch [40/100], Step [60/143], Loss: 0.1555\n",
      "Epoch [40/100], Step [80/143], Loss: 0.3251\n",
      "Epoch [40/100], Step [100/143], Loss: 0.1202\n",
      "Epoch [40/100], Step [120/143], Loss: 0.1549\n",
      "Epoch [40/100], Step [140/143], Loss: 0.3662\n",
      "Epoch [41/100], Step [20/143], Loss: 0.2650\n",
      "Epoch [41/100], Step [40/143], Loss: 0.1708\n",
      "Epoch [41/100], Step [60/143], Loss: 0.1498\n",
      "Epoch [41/100], Step [80/143], Loss: 0.3115\n",
      "Epoch [41/100], Step [100/143], Loss: 0.1161\n",
      "Epoch [41/100], Step [120/143], Loss: 0.1491\n",
      "Epoch [41/100], Step [140/143], Loss: 0.3569\n",
      "Epoch [42/100], Step [20/143], Loss: 0.2611\n",
      "Epoch [42/100], Step [40/143], Loss: 0.1646\n",
      "Epoch [42/100], Step [60/143], Loss: 0.1444\n",
      "Epoch [42/100], Step [80/143], Loss: 0.2990\n",
      "Epoch [42/100], Step [100/143], Loss: 0.1122\n",
      "Epoch [42/100], Step [120/143], Loss: 0.1436\n",
      "Epoch [42/100], Step [140/143], Loss: 0.3488\n",
      "Epoch [43/100], Step [20/143], Loss: 0.2575\n",
      "Epoch [43/100], Step [40/143], Loss: 0.1589\n",
      "Epoch [43/100], Step [60/143], Loss: 0.1393\n",
      "Epoch [43/100], Step [80/143], Loss: 0.2876\n",
      "Epoch [43/100], Step [100/143], Loss: 0.1085\n",
      "Epoch [43/100], Step [120/143], Loss: 0.1384\n",
      "Epoch [43/100], Step [140/143], Loss: 0.3406\n",
      "Epoch [44/100], Step [20/143], Loss: 0.2541\n",
      "Epoch [44/100], Step [40/143], Loss: 0.1535\n",
      "Epoch [44/100], Step [60/143], Loss: 0.1347\n",
      "Epoch [44/100], Step [80/143], Loss: 0.2767\n",
      "Epoch [44/100], Step [100/143], Loss: 0.1051\n",
      "Epoch [44/100], Step [120/143], Loss: 0.1337\n",
      "Epoch [44/100], Step [140/143], Loss: 0.3329\n",
      "Epoch [45/100], Step [20/143], Loss: 0.2508\n",
      "Epoch [45/100], Step [40/143], Loss: 0.1484\n",
      "Epoch [45/100], Step [60/143], Loss: 0.1304\n",
      "Epoch [45/100], Step [80/143], Loss: 0.2670\n",
      "Epoch [45/100], Step [100/143], Loss: 0.1018\n",
      "Epoch [45/100], Step [120/143], Loss: 0.1293\n",
      "Epoch [45/100], Step [140/143], Loss: 0.3260\n",
      "Epoch [46/100], Step [20/143], Loss: 0.2477\n",
      "Epoch [46/100], Step [40/143], Loss: 0.1436\n",
      "Epoch [46/100], Step [60/143], Loss: 0.1262\n",
      "Epoch [46/100], Step [80/143], Loss: 0.2573\n",
      "Epoch [46/100], Step [100/143], Loss: 0.0988\n",
      "Epoch [46/100], Step [120/143], Loss: 0.1251\n",
      "Epoch [46/100], Step [140/143], Loss: 0.3194\n",
      "Epoch [47/100], Step [20/143], Loss: 0.2456\n",
      "Epoch [47/100], Step [40/143], Loss: 0.1392\n",
      "Epoch [47/100], Step [60/143], Loss: 0.1223\n",
      "Epoch [47/100], Step [80/143], Loss: 0.2483\n",
      "Epoch [47/100], Step [100/143], Loss: 0.0959\n",
      "Epoch [47/100], Step [120/143], Loss: 0.1213\n",
      "Epoch [47/100], Step [140/143], Loss: 0.3132\n",
      "Epoch [48/100], Step [20/143], Loss: 0.2421\n",
      "Epoch [48/100], Step [40/143], Loss: 0.1350\n",
      "Epoch [48/100], Step [60/143], Loss: 0.1187\n",
      "Epoch [48/100], Step [80/143], Loss: 0.2402\n",
      "Epoch [48/100], Step [100/143], Loss: 0.0932\n",
      "Epoch [48/100], Step [120/143], Loss: 0.1176\n",
      "Epoch [48/100], Step [140/143], Loss: 0.3076\n",
      "Epoch [49/100], Step [20/143], Loss: 0.2400\n",
      "Epoch [49/100], Step [40/143], Loss: 0.1309\n",
      "Epoch [49/100], Step [60/143], Loss: 0.1152\n",
      "Epoch [49/100], Step [80/143], Loss: 0.2321\n",
      "Epoch [49/100], Step [100/143], Loss: 0.0906\n",
      "Epoch [49/100], Step [120/143], Loss: 0.1141\n",
      "Epoch [49/100], Step [140/143], Loss: 0.3022\n",
      "Epoch [50/100], Step [20/143], Loss: 0.2373\n",
      "Epoch [50/100], Step [40/143], Loss: 0.1271\n",
      "Epoch [50/100], Step [60/143], Loss: 0.1120\n",
      "Epoch [50/100], Step [80/143], Loss: 0.2249\n",
      "Epoch [50/100], Step [100/143], Loss: 0.0882\n",
      "Epoch [50/100], Step [120/143], Loss: 0.1108\n",
      "Epoch [50/100], Step [140/143], Loss: 0.2972\n",
      "Epoch [51/100], Step [20/143], Loss: 0.2351\n",
      "Epoch [51/100], Step [40/143], Loss: 0.1235\n",
      "Epoch [51/100], Step [60/143], Loss: 0.1089\n",
      "Epoch [51/100], Step [80/143], Loss: 0.2176\n",
      "Epoch [51/100], Step [100/143], Loss: 0.0859\n",
      "Epoch [51/100], Step [120/143], Loss: 0.1077\n",
      "Epoch [51/100], Step [140/143], Loss: 0.2923\n",
      "Epoch [52/100], Step [20/143], Loss: 0.2332\n",
      "Epoch [52/100], Step [40/143], Loss: 0.1201\n",
      "Epoch [52/100], Step [60/143], Loss: 0.1059\n",
      "Epoch [52/100], Step [80/143], Loss: 0.2110\n",
      "Epoch [52/100], Step [100/143], Loss: 0.0837\n",
      "Epoch [52/100], Step [120/143], Loss: 0.1047\n",
      "Epoch [52/100], Step [140/143], Loss: 0.2877\n",
      "Epoch [53/100], Step [20/143], Loss: 0.2310\n",
      "Epoch [53/100], Step [40/143], Loss: 0.1169\n",
      "Epoch [53/100], Step [60/143], Loss: 0.1031\n",
      "Epoch [53/100], Step [80/143], Loss: 0.2046\n",
      "Epoch [53/100], Step [100/143], Loss: 0.0816\n",
      "Epoch [53/100], Step [120/143], Loss: 0.1019\n",
      "Epoch [53/100], Step [140/143], Loss: 0.2834\n",
      "Epoch [54/100], Step [20/143], Loss: 0.2289\n",
      "Epoch [54/100], Step [40/143], Loss: 0.1138\n",
      "Epoch [54/100], Step [60/143], Loss: 0.1005\n",
      "Epoch [54/100], Step [80/143], Loss: 0.1987\n",
      "Epoch [54/100], Step [100/143], Loss: 0.0796\n",
      "Epoch [54/100], Step [120/143], Loss: 0.0993\n",
      "Epoch [54/100], Step [140/143], Loss: 0.2794\n",
      "Epoch [55/100], Step [20/143], Loss: 0.2272\n",
      "Epoch [55/100], Step [40/143], Loss: 0.1108\n",
      "Epoch [55/100], Step [60/143], Loss: 0.0979\n",
      "Epoch [55/100], Step [80/143], Loss: 0.1928\n",
      "Epoch [55/100], Step [100/143], Loss: 0.0777\n",
      "Epoch [55/100], Step [120/143], Loss: 0.0968\n",
      "Epoch [55/100], Step [140/143], Loss: 0.2754\n",
      "Epoch [56/100], Step [20/143], Loss: 0.2252\n",
      "Epoch [56/100], Step [40/143], Loss: 0.1080\n",
      "Epoch [56/100], Step [60/143], Loss: 0.0956\n",
      "Epoch [56/100], Step [80/143], Loss: 0.1877\n",
      "Epoch [56/100], Step [100/143], Loss: 0.0759\n",
      "Epoch [56/100], Step [120/143], Loss: 0.0943\n",
      "Epoch [56/100], Step [140/143], Loss: 0.2718\n",
      "Epoch [57/100], Step [20/143], Loss: 0.2239\n",
      "Epoch [57/100], Step [40/143], Loss: 0.1053\n",
      "Epoch [57/100], Step [60/143], Loss: 0.0933\n",
      "Epoch [57/100], Step [80/143], Loss: 0.1825\n",
      "Epoch [57/100], Step [100/143], Loss: 0.0742\n",
      "Epoch [57/100], Step [120/143], Loss: 0.0921\n",
      "Epoch [57/100], Step [140/143], Loss: 0.2681\n",
      "Epoch [58/100], Step [20/143], Loss: 0.2220\n",
      "Epoch [58/100], Step [40/143], Loss: 0.1027\n",
      "Epoch [58/100], Step [60/143], Loss: 0.0911\n",
      "Epoch [58/100], Step [80/143], Loss: 0.1776\n",
      "Epoch [58/100], Step [100/143], Loss: 0.0725\n",
      "Epoch [58/100], Step [120/143], Loss: 0.0899\n",
      "Epoch [58/100], Step [140/143], Loss: 0.2647\n",
      "Epoch [59/100], Step [20/143], Loss: 0.2207\n",
      "Epoch [59/100], Step [40/143], Loss: 0.1002\n",
      "Epoch [59/100], Step [60/143], Loss: 0.0890\n",
      "Epoch [59/100], Step [80/143], Loss: 0.1729\n",
      "Epoch [59/100], Step [100/143], Loss: 0.0709\n",
      "Epoch [59/100], Step [120/143], Loss: 0.0879\n",
      "Epoch [59/100], Step [140/143], Loss: 0.2614\n",
      "Epoch [60/100], Step [20/143], Loss: 0.2191\n",
      "Epoch [60/100], Step [40/143], Loss: 0.0978\n",
      "Epoch [60/100], Step [60/143], Loss: 0.0871\n",
      "Epoch [60/100], Step [80/143], Loss: 0.1686\n",
      "Epoch [60/100], Step [100/143], Loss: 0.0694\n",
      "Epoch [60/100], Step [120/143], Loss: 0.0859\n",
      "Epoch [60/100], Step [140/143], Loss: 0.2584\n",
      "Epoch [61/100], Step [20/143], Loss: 0.2179\n",
      "Epoch [61/100], Step [40/143], Loss: 0.0955\n",
      "Epoch [61/100], Step [60/143], Loss: 0.0851\n",
      "Epoch [61/100], Step [80/143], Loss: 0.1642\n",
      "Epoch [61/100], Step [100/143], Loss: 0.0679\n",
      "Epoch [61/100], Step [120/143], Loss: 0.0840\n",
      "Epoch [61/100], Step [140/143], Loss: 0.2556\n",
      "Epoch [62/100], Step [20/143], Loss: 0.2163\n",
      "Epoch [62/100], Step [40/143], Loss: 0.0933\n",
      "Epoch [62/100], Step [60/143], Loss: 0.0833\n",
      "Epoch [62/100], Step [80/143], Loss: 0.1603\n",
      "Epoch [62/100], Step [100/143], Loss: 0.0666\n",
      "Epoch [62/100], Step [120/143], Loss: 0.0821\n",
      "Epoch [62/100], Step [140/143], Loss: 0.2530\n",
      "Epoch [63/100], Step [20/143], Loss: 0.2154\n",
      "Epoch [63/100], Step [40/143], Loss: 0.0912\n",
      "Epoch [63/100], Step [60/143], Loss: 0.0816\n",
      "Epoch [63/100], Step [80/143], Loss: 0.1564\n",
      "Epoch [63/100], Step [100/143], Loss: 0.0652\n",
      "Epoch [63/100], Step [120/143], Loss: 0.0804\n",
      "Epoch [63/100], Step [140/143], Loss: 0.2505\n",
      "Epoch [64/100], Step [20/143], Loss: 0.2140\n",
      "Epoch [64/100], Step [40/143], Loss: 0.0892\n",
      "Epoch [64/100], Step [60/143], Loss: 0.0799\n",
      "Epoch [64/100], Step [80/143], Loss: 0.1528\n",
      "Epoch [64/100], Step [100/143], Loss: 0.0639\n",
      "Epoch [64/100], Step [120/143], Loss: 0.0788\n",
      "Epoch [64/100], Step [140/143], Loss: 0.2481\n",
      "Epoch [65/100], Step [20/143], Loss: 0.2129\n",
      "Epoch [65/100], Step [40/143], Loss: 0.0873\n",
      "Epoch [65/100], Step [60/143], Loss: 0.0783\n",
      "Epoch [65/100], Step [80/143], Loss: 0.1492\n",
      "Epoch [65/100], Step [100/143], Loss: 0.0627\n",
      "Epoch [65/100], Step [120/143], Loss: 0.0772\n",
      "Epoch [65/100], Step [140/143], Loss: 0.2458\n",
      "Epoch [66/100], Step [20/143], Loss: 0.2118\n",
      "Epoch [66/100], Step [40/143], Loss: 0.0854\n",
      "Epoch [66/100], Step [60/143], Loss: 0.0767\n",
      "Epoch [66/100], Step [80/143], Loss: 0.1459\n",
      "Epoch [66/100], Step [100/143], Loss: 0.0615\n",
      "Epoch [66/100], Step [120/143], Loss: 0.0756\n",
      "Epoch [66/100], Step [140/143], Loss: 0.2437\n",
      "Epoch [67/100], Step [20/143], Loss: 0.2107\n",
      "Epoch [67/100], Step [40/143], Loss: 0.0836\n",
      "Epoch [67/100], Step [60/143], Loss: 0.0752\n",
      "Epoch [67/100], Step [80/143], Loss: 0.1426\n",
      "Epoch [67/100], Step [100/143], Loss: 0.0603\n",
      "Epoch [67/100], Step [120/143], Loss: 0.0741\n",
      "Epoch [67/100], Step [140/143], Loss: 0.2416\n",
      "Epoch [68/100], Step [20/143], Loss: 0.2095\n",
      "Epoch [68/100], Step [40/143], Loss: 0.0819\n",
      "Epoch [68/100], Step [60/143], Loss: 0.0738\n",
      "Epoch [68/100], Step [80/143], Loss: 0.1396\n",
      "Epoch [68/100], Step [100/143], Loss: 0.0592\n",
      "Epoch [68/100], Step [120/143], Loss: 0.0727\n",
      "Epoch [68/100], Step [140/143], Loss: 0.2396\n",
      "Epoch [69/100], Step [20/143], Loss: 0.2086\n",
      "Epoch [69/100], Step [40/143], Loss: 0.0803\n",
      "Epoch [69/100], Step [60/143], Loss: 0.0724\n",
      "Epoch [69/100], Step [80/143], Loss: 0.1365\n",
      "Epoch [69/100], Step [100/143], Loss: 0.0582\n",
      "Epoch [69/100], Step [120/143], Loss: 0.0713\n",
      "Epoch [69/100], Step [140/143], Loss: 0.2377\n",
      "Epoch [70/100], Step [20/143], Loss: 0.2077\n",
      "Epoch [70/100], Step [40/143], Loss: 0.0787\n",
      "Epoch [70/100], Step [60/143], Loss: 0.0710\n",
      "Epoch [70/100], Step [80/143], Loss: 0.1338\n",
      "Epoch [70/100], Step [100/143], Loss: 0.0571\n",
      "Epoch [70/100], Step [120/143], Loss: 0.0700\n",
      "Epoch [70/100], Step [140/143], Loss: 0.2359\n",
      "Epoch [71/100], Step [20/143], Loss: 0.2067\n",
      "Epoch [71/100], Step [40/143], Loss: 0.0772\n",
      "Epoch [71/100], Step [60/143], Loss: 0.0698\n",
      "Epoch [71/100], Step [80/143], Loss: 0.1310\n",
      "Epoch [71/100], Step [100/143], Loss: 0.0561\n",
      "Epoch [71/100], Step [120/143], Loss: 0.0687\n",
      "Epoch [71/100], Step [140/143], Loss: 0.2341\n",
      "Epoch [72/100], Step [20/143], Loss: 0.2058\n",
      "Epoch [72/100], Step [40/143], Loss: 0.0757\n",
      "Epoch [72/100], Step [60/143], Loss: 0.0685\n",
      "Epoch [72/100], Step [80/143], Loss: 0.1284\n",
      "Epoch [72/100], Step [100/143], Loss: 0.0552\n",
      "Epoch [72/100], Step [120/143], Loss: 0.0675\n",
      "Epoch [72/100], Step [140/143], Loss: 0.2325\n",
      "Epoch [73/100], Step [20/143], Loss: 0.2048\n",
      "Epoch [73/100], Step [40/143], Loss: 0.0743\n",
      "Epoch [73/100], Step [60/143], Loss: 0.0673\n",
      "Epoch [73/100], Step [80/143], Loss: 0.1258\n",
      "Epoch [73/100], Step [100/143], Loss: 0.0542\n",
      "Epoch [73/100], Step [120/143], Loss: 0.0663\n",
      "Epoch [73/100], Step [140/143], Loss: 0.2308\n",
      "Epoch [74/100], Step [20/143], Loss: 0.2042\n",
      "Epoch [74/100], Step [40/143], Loss: 0.0729\n",
      "Epoch [74/100], Step [60/143], Loss: 0.0661\n",
      "Epoch [74/100], Step [80/143], Loss: 0.1234\n",
      "Epoch [74/100], Step [100/143], Loss: 0.0533\n",
      "Epoch [74/100], Step [120/143], Loss: 0.0652\n",
      "Epoch [74/100], Step [140/143], Loss: 0.2293\n",
      "Epoch [75/100], Step [20/143], Loss: 0.2033\n",
      "Epoch [75/100], Step [40/143], Loss: 0.0716\n",
      "Epoch [75/100], Step [60/143], Loss: 0.0650\n",
      "Epoch [75/100], Step [80/143], Loss: 0.1210\n",
      "Epoch [75/100], Step [100/143], Loss: 0.0525\n",
      "Epoch [75/100], Step [120/143], Loss: 0.0641\n",
      "Epoch [75/100], Step [140/143], Loss: 0.2277\n",
      "Epoch [76/100], Step [20/143], Loss: 0.2024\n",
      "Epoch [76/100], Step [40/143], Loss: 0.0703\n",
      "Epoch [76/100], Step [60/143], Loss: 0.0639\n",
      "Epoch [76/100], Step [80/143], Loss: 0.1187\n",
      "Epoch [76/100], Step [100/143], Loss: 0.0516\n",
      "Epoch [76/100], Step [120/143], Loss: 0.0630\n",
      "Epoch [76/100], Step [140/143], Loss: 0.2263\n",
      "Epoch [77/100], Step [20/143], Loss: 0.2017\n",
      "Epoch [77/100], Step [40/143], Loss: 0.0691\n",
      "Epoch [77/100], Step [60/143], Loss: 0.0629\n",
      "Epoch [77/100], Step [80/143], Loss: 0.1165\n",
      "Epoch [77/100], Step [100/143], Loss: 0.0508\n",
      "Epoch [77/100], Step [120/143], Loss: 0.0620\n",
      "Epoch [77/100], Step [140/143], Loss: 0.2249\n",
      "Epoch [78/100], Step [20/143], Loss: 0.2009\n",
      "Epoch [78/100], Step [40/143], Loss: 0.0679\n",
      "Epoch [78/100], Step [60/143], Loss: 0.0619\n",
      "Epoch [78/100], Step [80/143], Loss: 0.1144\n",
      "Epoch [78/100], Step [100/143], Loss: 0.0500\n",
      "Epoch [78/100], Step [120/143], Loss: 0.0610\n",
      "Epoch [78/100], Step [140/143], Loss: 0.2236\n",
      "Epoch [79/100], Step [20/143], Loss: 0.2003\n",
      "Epoch [79/100], Step [40/143], Loss: 0.0667\n",
      "Epoch [79/100], Step [60/143], Loss: 0.0609\n",
      "Epoch [79/100], Step [80/143], Loss: 0.1124\n",
      "Epoch [79/100], Step [100/143], Loss: 0.0492\n",
      "Epoch [79/100], Step [120/143], Loss: 0.0600\n",
      "Epoch [79/100], Step [140/143], Loss: 0.2223\n",
      "Epoch [80/100], Step [20/143], Loss: 0.1995\n",
      "Epoch [80/100], Step [40/143], Loss: 0.0656\n",
      "Epoch [80/100], Step [60/143], Loss: 0.0599\n",
      "Epoch [80/100], Step [80/143], Loss: 0.1104\n",
      "Epoch [80/100], Step [100/143], Loss: 0.0485\n",
      "Epoch [80/100], Step [120/143], Loss: 0.0591\n",
      "Epoch [80/100], Step [140/143], Loss: 0.2210\n",
      "Epoch [81/100], Step [20/143], Loss: 0.1988\n",
      "Epoch [81/100], Step [40/143], Loss: 0.0645\n",
      "Epoch [81/100], Step [60/143], Loss: 0.0590\n",
      "Epoch [81/100], Step [80/143], Loss: 0.1085\n",
      "Epoch [81/100], Step [100/143], Loss: 0.0478\n",
      "Epoch [81/100], Step [120/143], Loss: 0.0582\n",
      "Epoch [81/100], Step [140/143], Loss: 0.2198\n",
      "Epoch [82/100], Step [20/143], Loss: 0.1982\n",
      "Epoch [82/100], Step [40/143], Loss: 0.0635\n",
      "Epoch [82/100], Step [60/143], Loss: 0.0581\n",
      "Epoch [82/100], Step [80/143], Loss: 0.1066\n",
      "Epoch [82/100], Step [100/143], Loss: 0.0471\n",
      "Epoch [82/100], Step [120/143], Loss: 0.0573\n",
      "Epoch [82/100], Step [140/143], Loss: 0.2186\n",
      "Epoch [83/100], Step [20/143], Loss: 0.1976\n",
      "Epoch [83/100], Step [40/143], Loss: 0.0626\n",
      "Epoch [83/100], Step [60/143], Loss: 0.0572\n",
      "Epoch [83/100], Step [80/143], Loss: 0.1048\n",
      "Epoch [83/100], Step [100/143], Loss: 0.0464\n",
      "Epoch [83/100], Step [120/143], Loss: 0.0564\n",
      "Epoch [83/100], Step [140/143], Loss: 0.2174\n",
      "Epoch [84/100], Step [20/143], Loss: 0.1969\n",
      "Epoch [84/100], Step [40/143], Loss: 0.0617\n",
      "Epoch [84/100], Step [60/143], Loss: 0.0563\n",
      "Epoch [84/100], Step [80/143], Loss: 0.1031\n",
      "Epoch [84/100], Step [100/143], Loss: 0.0457\n",
      "Epoch [84/100], Step [120/143], Loss: 0.0556\n",
      "Epoch [84/100], Step [140/143], Loss: 0.2163\n",
      "Epoch [85/100], Step [20/143], Loss: 0.1963\n",
      "Epoch [85/100], Step [40/143], Loss: 0.0613\n",
      "Epoch [85/100], Step [60/143], Loss: 0.0555\n",
      "Epoch [85/100], Step [80/143], Loss: 0.1014\n",
      "Epoch [85/100], Step [100/143], Loss: 0.0451\n",
      "Epoch [85/100], Step [120/143], Loss: 0.0548\n",
      "Epoch [85/100], Step [140/143], Loss: 0.2153\n",
      "Epoch [86/100], Step [20/143], Loss: 0.1957\n",
      "Epoch [86/100], Step [40/143], Loss: 0.0596\n",
      "Epoch [86/100], Step [60/143], Loss: 0.0547\n",
      "Epoch [86/100], Step [80/143], Loss: 0.0998\n",
      "Epoch [86/100], Step [100/143], Loss: 0.0445\n",
      "Epoch [86/100], Step [120/143], Loss: 0.0540\n",
      "Epoch [86/100], Step [140/143], Loss: 0.2142\n",
      "Epoch [87/100], Step [20/143], Loss: 0.1951\n",
      "Epoch [87/100], Step [40/143], Loss: 0.0589\n",
      "Epoch [87/100], Step [60/143], Loss: 0.0539\n",
      "Epoch [87/100], Step [80/143], Loss: 0.0982\n",
      "Epoch [87/100], Step [100/143], Loss: 0.0439\n",
      "Epoch [87/100], Step [120/143], Loss: 0.0533\n",
      "Epoch [87/100], Step [140/143], Loss: 0.2132\n",
      "Epoch [88/100], Step [20/143], Loss: 0.1946\n",
      "Epoch [88/100], Step [40/143], Loss: 0.0581\n",
      "Epoch [88/100], Step [60/143], Loss: 0.0532\n",
      "Epoch [88/100], Step [80/143], Loss: 0.0967\n",
      "Epoch [88/100], Step [100/143], Loss: 0.0433\n",
      "Epoch [88/100], Step [120/143], Loss: 0.0525\n",
      "Epoch [88/100], Step [140/143], Loss: 0.2123\n",
      "Epoch [89/100], Step [20/143], Loss: 0.1940\n",
      "Epoch [89/100], Step [40/143], Loss: 0.0579\n",
      "Epoch [89/100], Step [60/143], Loss: 0.0524\n",
      "Epoch [89/100], Step [80/143], Loss: 0.0952\n",
      "Epoch [89/100], Step [100/143], Loss: 0.0427\n",
      "Epoch [89/100], Step [120/143], Loss: 0.0518\n",
      "Epoch [89/100], Step [140/143], Loss: 0.2113\n",
      "Epoch [90/100], Step [20/143], Loss: 0.1935\n",
      "Epoch [90/100], Step [40/143], Loss: 0.0562\n",
      "Epoch [90/100], Step [60/143], Loss: 0.0517\n",
      "Epoch [90/100], Step [80/143], Loss: 0.0937\n",
      "Epoch [90/100], Step [100/143], Loss: 0.0421\n",
      "Epoch [90/100], Step [120/143], Loss: 0.0511\n",
      "Epoch [90/100], Step [140/143], Loss: 0.2104\n",
      "Epoch [91/100], Step [20/143], Loss: 0.1930\n",
      "Epoch [91/100], Step [40/143], Loss: 0.0555\n",
      "Epoch [91/100], Step [60/143], Loss: 0.0510\n",
      "Epoch [91/100], Step [80/143], Loss: 0.0923\n",
      "Epoch [91/100], Step [100/143], Loss: 0.0416\n",
      "Epoch [91/100], Step [120/143], Loss: 0.0504\n",
      "Epoch [91/100], Step [140/143], Loss: 0.2095\n",
      "Epoch [92/100], Step [20/143], Loss: 0.1924\n",
      "Epoch [92/100], Step [40/143], Loss: 0.0549\n",
      "Epoch [92/100], Step [60/143], Loss: 0.0503\n",
      "Epoch [92/100], Step [80/143], Loss: 0.0910\n",
      "Epoch [92/100], Step [100/143], Loss: 0.0410\n",
      "Epoch [92/100], Step [120/143], Loss: 0.0498\n",
      "Epoch [92/100], Step [140/143], Loss: 0.2086\n",
      "Epoch [93/100], Step [20/143], Loss: 0.1920\n",
      "Epoch [93/100], Step [40/143], Loss: 0.0550\n",
      "Epoch [93/100], Step [60/143], Loss: 0.0496\n",
      "Epoch [93/100], Step [80/143], Loss: 0.0896\n",
      "Epoch [93/100], Step [100/143], Loss: 0.0405\n",
      "Epoch [93/100], Step [120/143], Loss: 0.0491\n",
      "Epoch [93/100], Step [140/143], Loss: 0.2078\n",
      "Epoch [94/100], Step [20/143], Loss: 0.1915\n",
      "Epoch [94/100], Step [40/143], Loss: 0.0531\n",
      "Epoch [94/100], Step [60/143], Loss: 0.0490\n",
      "Epoch [94/100], Step [80/143], Loss: 0.0884\n",
      "Epoch [94/100], Step [100/143], Loss: 0.0400\n",
      "Epoch [94/100], Step [120/143], Loss: 0.0485\n",
      "Epoch [94/100], Step [140/143], Loss: 0.2070\n",
      "Epoch [95/100], Step [20/143], Loss: 0.1910\n",
      "Epoch [95/100], Step [40/143], Loss: 0.0524\n",
      "Epoch [95/100], Step [60/143], Loss: 0.0484\n",
      "Epoch [95/100], Step [80/143], Loss: 0.0871\n",
      "Epoch [95/100], Step [100/143], Loss: 0.0395\n",
      "Epoch [95/100], Step [120/143], Loss: 0.0479\n",
      "Epoch [95/100], Step [140/143], Loss: 0.2062\n",
      "Epoch [96/100], Step [20/143], Loss: 0.1906\n",
      "Epoch [96/100], Step [40/143], Loss: 0.0517\n",
      "Epoch [96/100], Step [60/143], Loss: 0.0477\n",
      "Epoch [96/100], Step [80/143], Loss: 0.0859\n",
      "Epoch [96/100], Step [100/143], Loss: 0.0390\n",
      "Epoch [96/100], Step [120/143], Loss: 0.0473\n",
      "Epoch [96/100], Step [140/143], Loss: 0.2054\n",
      "Epoch [97/100], Step [20/143], Loss: 0.1901\n",
      "Epoch [97/100], Step [40/143], Loss: 0.0510\n",
      "Epoch [97/100], Step [60/143], Loss: 0.0471\n",
      "Epoch [97/100], Step [80/143], Loss: 0.0847\n",
      "Epoch [97/100], Step [100/143], Loss: 0.0386\n",
      "Epoch [97/100], Step [120/143], Loss: 0.0467\n",
      "Epoch [97/100], Step [140/143], Loss: 0.2047\n",
      "Epoch [98/100], Step [20/143], Loss: 0.1899\n",
      "Epoch [98/100], Step [40/143], Loss: 0.0504\n",
      "Epoch [98/100], Step [60/143], Loss: 0.0465\n",
      "Epoch [98/100], Step [80/143], Loss: 0.0836\n",
      "Epoch [98/100], Step [100/143], Loss: 0.0381\n",
      "Epoch [98/100], Step [120/143], Loss: 0.0462\n",
      "Epoch [98/100], Step [140/143], Loss: 0.2039\n",
      "Epoch [99/100], Step [20/143], Loss: 0.1893\n",
      "Epoch [99/100], Step [40/143], Loss: 0.0499\n",
      "Epoch [99/100], Step [60/143], Loss: 0.0460\n",
      "Epoch [99/100], Step [80/143], Loss: 0.0824\n",
      "Epoch [99/100], Step [100/143], Loss: 0.0376\n",
      "Epoch [99/100], Step [120/143], Loss: 0.0456\n",
      "Epoch [99/100], Step [140/143], Loss: 0.2032\n",
      "Epoch [100/100], Step [20/143], Loss: 0.1890\n",
      "Epoch [100/100], Step [40/143], Loss: 0.0502\n",
      "Epoch [100/100], Step [60/143], Loss: 0.0454\n",
      "Epoch [100/100], Step [80/143], Loss: 0.0813\n",
      "Epoch [100/100], Step [100/143], Loss: 0.0372\n",
      "Epoch [100/100], Step [120/143], Loss: 0.0451\n",
      "Epoch [100/100], Step [140/143], Loss: 0.2025\n"
     ]
    }
   ],
   "execution_count": 18
  },
  {
   "cell_type": "code",
   "id": "8a4e36b033918def",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-04T06:12:34.777897Z",
     "start_time": "2025-07-04T06:12:34.771201Z"
    }
   },
   "source": [
    "model.eval()\n",
    "\n",
    "def generate_text(content, steps, temperature=0.8):\n",
    "\n",
    "    words = [word for word in content]\n",
    "\n",
    "    hidden = None\n",
    "    for _ in range(steps):\n",
    "        inputs = [word_to_idx[word] for word in words[-SEQ_LENGTH:]] # 取输入的最后SEQ_LENGTH个词的索引\n",
    "        # inputs = [word_to_idx[word] for word in words[-1:]] # 取输入的最后SEQ_LENGTH个词的索引\n",
    "        inputs = torch.LongTensor(inputs)\n",
    "\n",
    "        # 输入形状调整\n",
    "        inputs = inputs.view(1, -1)  # (1, seq_len)\n",
    "\n",
    "        # 前向传播\n",
    "        with torch.no_grad():\n",
    "            # output中包含了每个时间步的输出，推理预测时，只需要取最后一个时间步的输出即可，比如输入“鹰击”，相当于有两个时间步，但是我们只需要第2个时间步的输出，而输出是词汇表中各个词的概率\n",
    "            # 而hidden表示隐藏层，在推理预测时，因为我们会连续预测，外层有一个for循环，所以hidden需要保存，以便下一次循环使用\n",
    "            outputs, hidden = model(inputs, hidden)\n",
    "            last_output = outputs[0, -1, :]  # 取最后一个时间步的输出\n",
    "\n",
    "        # 应用温度采样\n",
    "        # last_output / temperature，相当于将last_output缩小，比如[8,2,2] / 2 = [4,1,1]，使得三个选项对应的数字之间的差别变小了\n",
    "        # 当然如果temperature<1，那么就是放大差别，比如[8,2,2] / 0.5 = [16,4,4]\n",
    "        # probs为做了softmax之后的概率\n",
    "        probs = torch.softmax(last_output / temperature, dim=-1)\n",
    "\n",
    "        # 多项式采样，probs是一个概率，比如是[0.3,0.2,0.5]，那么就是从0,1,2中随机选一个，那么2被选中的概率就是50%\n",
    "        # 谁的概率大，随被采样的概率就大\n",
    "        result_idx = torch.multinomial(probs, 1).item()\n",
    "\n",
    "        #  取概率最大的索引\n",
    "        # result_idx = torch.argmax(probs).item()\n",
    "\n",
    "\n",
    "        # 更新输入序列\n",
    "        words.append(idx_to_word[result_idx])\n",
    "\n",
    "    return ''.join(words)\n",
    "\n",
    "\n",
    "# 20表示预测20次, temperature越大，越随机\n",
    "print(generate_text(\"鹰击\", 20, temperature=0.1))"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "鹰击长空，鱼翔浅底，万类霜天竞自由。\n",
      "怅寥廓\n"
     ]
    }
   ],
   "execution_count": 147
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-04T06:06:59.057795Z",
     "start_time": "2025-07-04T06:06:59.051459Z"
    }
   },
   "cell_type": "code",
   "source": [
    "probs = torch.softmax(torch.tensor([8,2,2], dtype=torch.float), dim=-1)\n",
    "probs"
   ],
   "id": "7d45fe968e113518",
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([0.9951, 0.0025, 0.0025])"
      ]
     },
     "execution_count": 81,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 81
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-04T06:11:00.269820Z",
     "start_time": "2025-07-04T06:11:00.263899Z"
    }
   },
   "cell_type": "code",
   "source": [
    "probs = torch.softmax(torch.tensor([8,2,2], dtype=torch.float) / 0.1, dim=-1)\n",
    "probs"
   ],
   "id": "dd7fb4598cd0135a",
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([1.0000e+00, 8.7565e-27, 8.7565e-27])"
      ]
     },
     "execution_count": 88,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 88
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.18"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
