{
 "cells": [
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-16T07:26:53.479857Z",
     "start_time": "2025-07-16T07:26:53.120294Z"
    }
   },
   "cell_type": "code",
   "source": [
    "from transformers import GPT2Config, GPT2LMHeadModel\n",
    "\n",
    "config = GPT2Config(n_layer=2, n_head=2, n_embd=192)\n",
    "# config = GPT2Config(n_layer=4, n_head=4, n_embd=768)\n",
    "# config = GPT2Config()\n",
    "gpt_model = GPT2LMHeadModel(config)\n",
    "print(gpt_model)"
   ],
   "id": "2422b17b8ef27a54",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "GPT2LMHeadModel(\n",
      "  (transformer): GPT2Model(\n",
      "    (wte): Embedding(50257, 192)\n",
      "    (wpe): Embedding(1024, 192)\n",
      "    (drop): Dropout(p=0.1, inplace=False)\n",
      "    (h): ModuleList(\n",
      "      (0-1): 2 x GPT2Block(\n",
      "        (ln_1): LayerNorm((192,), eps=1e-05, elementwise_affine=True)\n",
      "        (attn): GPT2Attention(\n",
      "          (c_attn): Conv1D(nf=576, nx=192)\n",
      "          (c_proj): Conv1D(nf=192, nx=192)\n",
      "          (attn_dropout): Dropout(p=0.1, inplace=False)\n",
      "          (resid_dropout): Dropout(p=0.1, inplace=False)\n",
      "        )\n",
      "        (ln_2): LayerNorm((192,), eps=1e-05, elementwise_affine=True)\n",
      "        (mlp): GPT2MLP(\n",
      "          (c_fc): Conv1D(nf=768, nx=192)\n",
      "          (c_proj): Conv1D(nf=192, nx=768)\n",
      "          (act): NewGELUActivation()\n",
      "          (dropout): Dropout(p=0.1, inplace=False)\n",
      "        )\n",
      "      )\n",
      "    )\n",
      "    (ln_f): LayerNorm((192,), eps=1e-05, elementwise_affine=True)\n",
      "  )\n",
      "  (lm_head): Linear(in_features=192, out_features=50257, bias=False)\n",
      ")\n"
     ]
    }
   ],
   "execution_count": 25
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-16T07:26:55.033555Z",
     "start_time": "2025-07-16T07:26:53.535558Z"
    }
   },
   "cell_type": "code",
   "source": [
    "from modelscope import AutoTokenizer\n",
    "gpt_tokenizer = AutoTokenizer.from_pretrained(\"openai-community/gpt2\")"
   ],
   "id": "9d28ab81b30c7678",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Downloading Model from https://www.modelscope.cn to directory: /Users/dadudu/.cache/modelscope/hub/models/openai-community/gpt2\n"
     ]
    }
   ],
   "execution_count": 26
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-16T07:26:57.752649Z",
     "start_time": "2025-07-16T07:26:55.086772Z"
    }
   },
   "cell_type": "code",
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "from modelscope.msdatasets import MsDataset\n",
    "\n",
    "# 读取chinese_web_text.jsonl文件\n",
    "dataset = MsDataset.load('chinese_web_text.jsonl')\n",
    "\n",
    "data = dataset.to_hf_dataset().select(range(1))\n",
    "\n",
    "data[:1]"
   ],
   "id": "e41f497c889a41b8",
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2025-07-16 15:26:55,087 - modelscope - WARNING - Use trust_remote_code=True. Will invoke codes from chinese_web_text.jsonl. Please make sure that you can trust the external codes.\n",
      "2025-07-16 15:26:55,088 - modelscope - WARNING - Use trust_remote_code=True. Will invoke codes from json. Please make sure that you can trust the external codes.\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "{'text': [['PVC片材石塑地板、PVC卷材、PVC防静电地板、PVC锁扣地板、高架地板以及悬浮拼装运动地板和微晶石地板等。公司自成立伊始一直专注于地面材料领域的研究与开发，在不断引进国外先进的技术的同时，积极投入产品研发。公司旗下有“捷装”和“心牧”两大品牌，先后推出运动PVC地板、同质透心PVC地板、致密PVC地板、吸音PVC地板、装饰PVC地板、工业PVC地板、PVC防静电地板、防滑PVC地板等，同时代理金象产品橡胶地板，PVC弹性卷材地板、PVC同质透心卷材地板。\\n公司产品获得“3C认证（中国强制产品认证）”、“欧盟CE认证”、\"中国船级社CCS认证\"、\"铁道部科学研究院资质认证\"、\"ISO9000认证\"、\"国家质量检测CNAS\"等特殊领域资格认证的高新技术企业。产品广泛应用于室内家庭、医院、学校、办公楼、工厂、公共场所、超市、商业、体育场馆等各种场所，同时出口到欧美、中东、俄罗斯、日韩等国家。随着市场的发展，公司不断努力，在专注于产品本身的同时运用现代化管理，致力于服务领域，以市场为导向，服务客户为核心的价值观，实现快速、稳定的发展。到目前为止公司营销服务网络已遍布全国，在全国多地均设有办事处或分公司。']],\n",
       " 'info': [{'url': 'https://zh-cn.eturbonews.com/3005332/%E5%8A%A0%E6%8B%BF%E5%A4%A7%E6%B8%B8%E5%AE%A2%E6%96%B0%E8%BE%B9%E5%A2%83%E8%A7%84%E5%88%99-%E7%BE%8E%E5%9B%BD-10-%E4%B8%AA%E5%B7%9E%E5%B0%86%E5%BC%A0%E5%BC%80%E5%8F%8C%E8%87%82%E6%AC%A2%E8%BF%8E%E5%8A%A0%E6%8B%BF%E5%A4%A7%E4%BA%BA/',\n",
       "   'title': '美国10个州将张开双臂欢迎加拿大人',\n",
       "   'source_domain': 'zh-cn.eturbonews.com'}],\n",
       " 'score': [[0.42278623290512307]]}"
      ]
     },
     "execution_count": 27,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 27
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-16T07:26:58.107649Z",
     "start_time": "2025-07-16T07:26:58.105100Z"
    }
   },
   "cell_type": "code",
   "source": "gpt_tokenizer.encode(\"我今天很开心\")",
   "id": "4d969a85d78c089f",
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[22755, 239, 20015, 232, 25465, 36181, 230, 28156, 222, 33232, 225]"
      ]
     },
     "execution_count": 28,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 28
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-16T07:26:58.166551Z",
     "start_time": "2025-07-16T07:26:58.158461Z"
    }
   },
   "cell_type": "code",
   "source": [
    "from torch.utils.data import Dataset, DataLoader\n",
    "\n",
    "class ZhouyuDataset(Dataset):\n",
    "    def __init__(self, data, tokenizer, block_size=64):\n",
    "        self.tokenizer = tokenizer\n",
    "        self.block_size = block_size\n",
    "        self.blocks = []\n",
    "\n",
    "        for i in range(len(data)):\n",
    "            text = data[i]['text'][0] + gpt_tokenizer.eos_token\n",
    "            tokenized = tokenizer.encode(text)\n",
    "\n",
    "            # 分割成长度为block_size的块\n",
    "            for i in range(0, len(tokenized) - block_size + 1, block_size):\n",
    "                self.blocks.append(tokenized[i:i + block_size])\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.blocks)\n",
    "\n",
    "    def __getitem__(self, idx):\n",
    "        input_ids = self.blocks[idx]\n",
    "        return torch.tensor(input_ids)\n",
    "\n",
    "\n",
    "dataset = ZhouyuDataset(data, gpt_tokenizer)\n",
    "data_loader = DataLoader(dataset, batch_size=2, shuffle=False)\n",
    "for input_ids in data_loader:\n",
    "    print(input_ids)\n",
    "    break"
   ],
   "id": "fe1dd619a0366028",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[   47, 15922, 31965,   229, 30266,   238,   163,   253,   111,   161,\n",
      "            94,   239, 28839,   108, 30266,   123, 23513,    47, 15922, 39355,\n",
      "           115, 30266,   238, 23513,    47, 15922,   165,   246,   110,   165,\n",
      "           251,   247, 18796,   113, 28839,   108, 30266,   123, 23513,    47,\n",
      "         15922,   165,   242,   223, 33699,    96, 28839,   108, 30266,   123,\n",
      "         23513,   165, 45865,   162,   252,   114, 28839,   108, 30266,   123,\n",
      "         20015,    98, 20998,   232],\n",
      "        [  162,  8955, 38184,   106,   162,   233,   120, 35318, 32573,   238,\n",
      "         27950,   101, 28839,   108, 30266,   123,   161,   240,   234, 36181,\n",
      "           106,   162,   247,   114,   163,   253,   111, 28839,   108, 30266,\n",
      "           123,   163,   255,   231, 16764, 17739,   105, 20998,   116,   164,\n",
      "           229,   103, 22755,   238, 44165,   233, 27670,   232, 34650,   233,\n",
      "         31660, 33566,   112, 10310,   241, 37345,   101, 12859,   236, 28839,\n",
      "           108,   165,   251,    95]])\n"
     ]
    }
   ],
   "execution_count": 29
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-16T07:26:58.222744Z",
     "start_time": "2025-07-16T07:26:58.220306Z"
    }
   },
   "cell_type": "code",
   "source": "len(data_loader)",
   "id": "8f44244b0dc8df1",
   "outputs": [
    {
     "data": {
      "text/plain": [
       "8"
      ]
     },
     "execution_count": 30,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 30
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-16T07:42:36.758619Z",
     "start_time": "2025-07-16T07:42:03.888492Z"
    }
   },
   "cell_type": "code",
   "source": [
    "import time\n",
    "\n",
    "optimizer = torch.optim.Adam(gpt_model.parameters(), lr=0.001)\n",
    "criterion = nn.CrossEntropyLoss()\n",
    "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
    "gpt_model.to(device)\n",
    "\n",
    "EPOCHS = 100\n",
    "\n",
    "# 记录训练时长\n",
    "start_time = time.time()\n",
    "for epoch in range(EPOCHS):\n",
    "    for i, input_ids in enumerate(data_loader):\n",
    "        input_ids = input_ids.to(device)\n",
    "\n",
    "        outputs = gpt_model(\n",
    "            input_ids, labels=input_ids\n",
    "        )\n",
    "\n",
    "        loss = outputs.loss\n",
    "        optimizer.zero_grad()\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "\n",
    "        # 每100步打印一次损失值\n",
    "        if (i + 1) % 4 == 0:\n",
    "            print('Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}'\n",
    "                  .format(epoch + 1, EPOCHS, i + 1, len(data_loader), loss.item()))\n",
    "\n",
    "end_time = time.time()\n",
    "print('训练完成，耗时：{}'.format(end_time - start_time))"
   ],
   "id": "2eb65b25b022f95f",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch [1/100], Step [4/8], Loss: 0.0813\n",
      "Epoch [1/100], Step [8/8], Loss: 0.2379\n",
      "Epoch [2/100], Step [4/8], Loss: 0.1169\n",
      "Epoch [2/100], Step [8/8], Loss: 0.1320\n",
      "Epoch [3/100], Step [4/8], Loss: 0.0387\n",
      "Epoch [3/100], Step [8/8], Loss: 0.0277\n",
      "Epoch [4/100], Step [4/8], Loss: 0.0213\n",
      "Epoch [4/100], Step [8/8], Loss: 0.0151\n",
      "Epoch [5/100], Step [4/8], Loss: 0.0141\n",
      "Epoch [5/100], Step [8/8], Loss: 0.0060\n",
      "Epoch [6/100], Step [4/8], Loss: 0.0074\n",
      "Epoch [6/100], Step [8/8], Loss: 0.0057\n",
      "Epoch [7/100], Step [4/8], Loss: 0.0198\n",
      "Epoch [7/100], Step [8/8], Loss: 0.0147\n",
      "Epoch [8/100], Step [4/8], Loss: 0.0325\n",
      "Epoch [8/100], Step [8/8], Loss: 0.0140\n",
      "Epoch [9/100], Step [4/8], Loss: 0.0063\n",
      "Epoch [9/100], Step [8/8], Loss: 0.0067\n",
      "Epoch [10/100], Step [4/8], Loss: 0.0051\n",
      "Epoch [10/100], Step [8/8], Loss: 0.0049\n",
      "Epoch [11/100], Step [4/8], Loss: 0.0059\n",
      "Epoch [11/100], Step [8/8], Loss: 0.0027\n",
      "Epoch [12/100], Step [4/8], Loss: 0.0077\n",
      "Epoch [12/100], Step [8/8], Loss: 0.0100\n",
      "Epoch [13/100], Step [4/8], Loss: 0.0366\n",
      "Epoch [13/100], Step [8/8], Loss: 0.0040\n",
      "Epoch [14/100], Step [4/8], Loss: 0.0056\n",
      "Epoch [14/100], Step [8/8], Loss: 0.0024\n",
      "Epoch [15/100], Step [4/8], Loss: 0.0024\n",
      "Epoch [15/100], Step [8/8], Loss: 0.0018\n",
      "Epoch [16/100], Step [4/8], Loss: 0.0094\n",
      "Epoch [16/100], Step [8/8], Loss: 0.0018\n",
      "Epoch [17/100], Step [4/8], Loss: 0.0039\n",
      "Epoch [17/100], Step [8/8], Loss: 0.0023\n",
      "Epoch [18/100], Step [4/8], Loss: 0.0190\n",
      "Epoch [18/100], Step [8/8], Loss: 0.0021\n",
      "Epoch [19/100], Step [4/8], Loss: 0.0637\n",
      "Epoch [19/100], Step [8/8], Loss: 0.0356\n",
      "Epoch [20/100], Step [4/8], Loss: 0.0331\n",
      "Epoch [20/100], Step [8/8], Loss: 0.0356\n",
      "Epoch [21/100], Step [4/8], Loss: 0.0923\n",
      "Epoch [21/100], Step [8/8], Loss: 0.0693\n",
      "Epoch [22/100], Step [4/8], Loss: 0.0753\n",
      "Epoch [22/100], Step [8/8], Loss: 0.0227\n",
      "Epoch [23/100], Step [4/8], Loss: 0.0934\n",
      "Epoch [23/100], Step [8/8], Loss: 0.0758\n",
      "Epoch [24/100], Step [4/8], Loss: 0.0461\n",
      "Epoch [24/100], Step [8/8], Loss: 0.0905\n",
      "Epoch [25/100], Step [4/8], Loss: 0.0539\n",
      "Epoch [25/100], Step [8/8], Loss: 0.0162\n",
      "Epoch [26/100], Step [4/8], Loss: 0.0148\n",
      "Epoch [26/100], Step [8/8], Loss: 0.0032\n",
      "Epoch [27/100], Step [4/8], Loss: 0.1039\n",
      "Epoch [27/100], Step [8/8], Loss: 0.0036\n",
      "Epoch [28/100], Step [4/8], Loss: 0.0278\n",
      "Epoch [28/100], Step [8/8], Loss: 0.0053\n",
      "Epoch [29/100], Step [4/8], Loss: 0.0147\n",
      "Epoch [29/100], Step [8/8], Loss: 0.0399\n",
      "Epoch [30/100], Step [4/8], Loss: 0.0392\n",
      "Epoch [30/100], Step [8/8], Loss: 0.0160\n",
      "Epoch [31/100], Step [4/8], Loss: 0.0111\n",
      "Epoch [31/100], Step [8/8], Loss: 0.0064\n",
      "Epoch [32/100], Step [4/8], Loss: 0.0763\n",
      "Epoch [32/100], Step [8/8], Loss: 0.0038\n",
      "Epoch [33/100], Step [4/8], Loss: 0.0221\n",
      "Epoch [33/100], Step [8/8], Loss: 0.0828\n",
      "Epoch [34/100], Step [4/8], Loss: 0.0052\n",
      "Epoch [34/100], Step [8/8], Loss: 0.0155\n",
      "Epoch [35/100], Step [4/8], Loss: 0.0098\n",
      "Epoch [35/100], Step [8/8], Loss: 0.0023\n",
      "Epoch [36/100], Step [4/8], Loss: 0.0120\n",
      "Epoch [36/100], Step [8/8], Loss: 0.0018\n",
      "Epoch [37/100], Step [4/8], Loss: 0.0043\n",
      "Epoch [37/100], Step [8/8], Loss: 0.0021\n",
      "Epoch [38/100], Step [4/8], Loss: 0.0165\n",
      "Epoch [38/100], Step [8/8], Loss: 0.0032\n",
      "Epoch [39/100], Step [4/8], Loss: 0.0128\n",
      "Epoch [39/100], Step [8/8], Loss: 0.0017\n",
      "Epoch [40/100], Step [4/8], Loss: 0.0029\n",
      "Epoch [40/100], Step [8/8], Loss: 0.0029\n",
      "Epoch [41/100], Step [4/8], Loss: 0.0058\n",
      "Epoch [41/100], Step [8/8], Loss: 0.0016\n",
      "Epoch [42/100], Step [4/8], Loss: 0.0026\n",
      "Epoch [42/100], Step [8/8], Loss: 0.0036\n",
      "Epoch [43/100], Step [4/8], Loss: 0.0065\n",
      "Epoch [43/100], Step [8/8], Loss: 0.0010\n",
      "Epoch [44/100], Step [4/8], Loss: 0.0185\n",
      "Epoch [44/100], Step [8/8], Loss: 0.0011\n",
      "Epoch [45/100], Step [4/8], Loss: 0.0103\n",
      "Epoch [45/100], Step [8/8], Loss: 0.0006\n",
      "Epoch [46/100], Step [4/8], Loss: 0.0046\n",
      "Epoch [46/100], Step [8/8], Loss: 0.0021\n",
      "Epoch [47/100], Step [4/8], Loss: 0.0124\n",
      "Epoch [47/100], Step [8/8], Loss: 0.0011\n",
      "Epoch [48/100], Step [4/8], Loss: 0.0033\n",
      "Epoch [48/100], Step [8/8], Loss: 0.0014\n",
      "Epoch [49/100], Step [4/8], Loss: 0.0027\n",
      "Epoch [49/100], Step [8/8], Loss: 0.0016\n",
      "Epoch [50/100], Step [4/8], Loss: 0.0041\n",
      "Epoch [50/100], Step [8/8], Loss: 0.0064\n",
      "Epoch [51/100], Step [4/8], Loss: 0.0081\n",
      "Epoch [51/100], Step [8/8], Loss: 0.0034\n",
      "Epoch [52/100], Step [4/8], Loss: 0.0045\n",
      "Epoch [52/100], Step [8/8], Loss: 0.0078\n",
      "Epoch [53/100], Step [4/8], Loss: 0.0041\n",
      "Epoch [53/100], Step [8/8], Loss: 0.0010\n",
      "Epoch [54/100], Step [4/8], Loss: 0.0153\n",
      "Epoch [54/100], Step [8/8], Loss: 0.0013\n",
      "Epoch [55/100], Step [4/8], Loss: 0.0182\n",
      "Epoch [55/100], Step [8/8], Loss: 0.0351\n",
      "Epoch [56/100], Step [4/8], Loss: 0.0040\n",
      "Epoch [56/100], Step [8/8], Loss: 0.0012\n",
      "Epoch [57/100], Step [4/8], Loss: 0.0039\n",
      "Epoch [57/100], Step [8/8], Loss: 0.0032\n",
      "Epoch [58/100], Step [4/8], Loss: 0.0018\n",
      "Epoch [58/100], Step [8/8], Loss: 0.0045\n",
      "Epoch [59/100], Step [4/8], Loss: 0.0040\n",
      "Epoch [59/100], Step [8/8], Loss: 0.0082\n",
      "Epoch [60/100], Step [4/8], Loss: 0.0064\n",
      "Epoch [60/100], Step [8/8], Loss: 0.0019\n",
      "Epoch [61/100], Step [4/8], Loss: 0.0012\n",
      "Epoch [61/100], Step [8/8], Loss: 0.0059\n",
      "Epoch [62/100], Step [4/8], Loss: 0.0073\n",
      "Epoch [62/100], Step [8/8], Loss: 0.0015\n",
      "Epoch [63/100], Step [4/8], Loss: 0.0013\n",
      "Epoch [63/100], Step [8/8], Loss: 0.0043\n",
      "Epoch [64/100], Step [4/8], Loss: 0.0020\n",
      "Epoch [64/100], Step [8/8], Loss: 0.0012\n",
      "Epoch [65/100], Step [4/8], Loss: 0.0008\n",
      "Epoch [65/100], Step [8/8], Loss: 0.0004\n",
      "Epoch [66/100], Step [4/8], Loss: 0.0008\n",
      "Epoch [66/100], Step [8/8], Loss: 0.0012\n",
      "Epoch [67/100], Step [4/8], Loss: 0.0006\n",
      "Epoch [67/100], Step [8/8], Loss: 0.0004\n",
      "Epoch [68/100], Step [4/8], Loss: 0.0009\n",
      "Epoch [68/100], Step [8/8], Loss: 0.0006\n",
      "Epoch [69/100], Step [4/8], Loss: 0.0007\n",
      "Epoch [69/100], Step [8/8], Loss: 0.0005\n",
      "Epoch [70/100], Step [4/8], Loss: 0.0027\n",
      "Epoch [70/100], Step [8/8], Loss: 0.0004\n",
      "Epoch [71/100], Step [4/8], Loss: 0.0005\n",
      "Epoch [71/100], Step [8/8], Loss: 0.0002\n",
      "Epoch [72/100], Step [4/8], Loss: 0.0022\n",
      "Epoch [72/100], Step [8/8], Loss: 0.0004\n",
      "Epoch [73/100], Step [4/8], Loss: 0.0006\n",
      "Epoch [73/100], Step [8/8], Loss: 0.0004\n",
      "Epoch [74/100], Step [4/8], Loss: 0.0004\n",
      "Epoch [74/100], Step [8/8], Loss: 0.0004\n",
      "Epoch [75/100], Step [4/8], Loss: 0.0006\n",
      "Epoch [75/100], Step [8/8], Loss: 0.0003\n",
      "Epoch [76/100], Step [4/8], Loss: 0.0022\n",
      "Epoch [76/100], Step [8/8], Loss: 0.0005\n",
      "Epoch [77/100], Step [4/8], Loss: 0.0006\n",
      "Epoch [77/100], Step [8/8], Loss: 0.0003\n",
      "Epoch [78/100], Step [4/8], Loss: 0.0005\n",
      "Epoch [78/100], Step [8/8], Loss: 0.0004\n",
      "Epoch [79/100], Step [4/8], Loss: 0.0007\n",
      "Epoch [79/100], Step [8/8], Loss: 0.0002\n",
      "Epoch [80/100], Step [4/8], Loss: 0.0003\n",
      "Epoch [80/100], Step [8/8], Loss: 0.0004\n",
      "Epoch [81/100], Step [4/8], Loss: 0.0004\n",
      "Epoch [81/100], Step [8/8], Loss: 0.0003\n",
      "Epoch [82/100], Step [4/8], Loss: 0.0038\n",
      "Epoch [82/100], Step [8/8], Loss: 0.0004\n",
      "Epoch [83/100], Step [4/8], Loss: 0.0008\n",
      "Epoch [83/100], Step [8/8], Loss: 0.0003\n",
      "Epoch [84/100], Step [4/8], Loss: 0.0009\n",
      "Epoch [84/100], Step [8/8], Loss: 0.0003\n",
      "Epoch [85/100], Step [4/8], Loss: 0.0013\n",
      "Epoch [85/100], Step [8/8], Loss: 0.0002\n",
      "Epoch [86/100], Step [4/8], Loss: 0.0362\n",
      "Epoch [86/100], Step [8/8], Loss: 0.0115\n",
      "Epoch [87/100], Step [4/8], Loss: 0.0058\n",
      "Epoch [87/100], Step [8/8], Loss: 0.0092\n",
      "Epoch [88/100], Step [4/8], Loss: 0.0036\n",
      "Epoch [88/100], Step [8/8], Loss: 0.0023\n",
      "Epoch [89/100], Step [4/8], Loss: 0.0025\n",
      "Epoch [89/100], Step [8/8], Loss: 0.0078\n",
      "Epoch [90/100], Step [4/8], Loss: 0.0462\n",
      "Epoch [90/100], Step [8/8], Loss: 0.0096\n",
      "Epoch [91/100], Step [4/8], Loss: 0.0115\n",
      "Epoch [91/100], Step [8/8], Loss: 0.0026\n",
      "Epoch [92/100], Step [4/8], Loss: 0.0167\n",
      "Epoch [92/100], Step [8/8], Loss: 0.0106\n",
      "Epoch [93/100], Step [4/8], Loss: 0.0868\n",
      "Epoch [93/100], Step [8/8], Loss: 0.0428\n",
      "Epoch [94/100], Step [4/8], Loss: 0.0861\n",
      "Epoch [94/100], Step [8/8], Loss: 0.2409\n",
      "Epoch [95/100], Step [4/8], Loss: 0.0592\n",
      "Epoch [95/100], Step [8/8], Loss: 0.2358\n",
      "Epoch [96/100], Step [4/8], Loss: 0.1064\n",
      "Epoch [96/100], Step [8/8], Loss: 0.0483\n",
      "Epoch [97/100], Step [4/8], Loss: 0.1448\n",
      "Epoch [97/100], Step [8/8], Loss: 0.1269\n",
      "Epoch [98/100], Step [4/8], Loss: 0.1560\n",
      "Epoch [98/100], Step [8/8], Loss: 0.0425\n",
      "Epoch [99/100], Step [4/8], Loss: 0.2810\n",
      "Epoch [99/100], Step [8/8], Loss: 0.0648\n",
      "Epoch [100/100], Step [4/8], Loss: 0.1198\n",
      "Epoch [100/100], Step [8/8], Loss: 0.0638\n",
      "开始训练时间：1752651723.905466\n",
      "结束训练时间：1752651723.905466\n",
      "训练完成，耗时：32.85186696052551\n"
     ]
    }
   ],
   "execution_count": 36
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-16T07:28:03.785580Z",
     "start_time": "2025-07-16T07:28:02.358967Z"
    }
   },
   "cell_type": "code",
   "source": [
    "# 生成函数\n",
    "def generate(sentence, max_length=500):\n",
    "    input_ids = gpt_tokenizer.encode(sentence, return_tensors='pt', add_special_tokens=False)\n",
    "    input_ids = input_ids.to(device)\n",
    "    output = gpt_model.generate(\n",
    "        input_ids,\n",
    "        max_length=max_length,\n",
    "        pad_token_id=gpt_tokenizer.eos_token_id,\n",
    "        do_sample=True,\n",
    "    )\n",
    "    return gpt_tokenizer.decode(output[0])\n",
    "\n",
    "\n",
    "print(generate(\"PVC片材\"))"
   ],
   "id": "fed9769a193d5a54",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "PVC片材石塑卷板、PVC卷材、PVC防静电地板、PVC锁扣地板、装领�VC地板、箤证�PVC地板、PVC地板、PVC地靦����向，防饲���地板、PVC地板、PVC地板、PVC地板、PVC地靝��高�市�面��，PVC地板、PVC地杆�核�导��司PVC地��PVC地板、PVC地板、PVC地板等，地板�地板�超�PVC地蓁�，地板��厂地靝���地板，超�������以靝��市Ｘ�导各�栆PVC地板、PVC地板、PVC地板�各��PVC地板，时��吷板，服地板，以靝���，������仝�PVC地心�地������学��仂�市�厥��超�地杦��以各�栆PVC地板，�期����PVC地板��发�各��栆PVC�逄�琦�PVC地板�PVC地板�、PVC地板，地板��心�高�実����\n"
     ]
    }
   ],
   "execution_count": 34
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
