{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "3e64f53ad916a82b",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-06T09:55:24.400570Z",
     "start_time": "2025-06-06T09:55:19.733372Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Looking in indexes: http://mirrors.aliyun.com/pypi/simple\n",
      "Collecting modelscope\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/2f/14/027c49872c9f1087a1b9f604a81d5584324d84b1540a88b071e41eaca5b7/modelscope-1.27.1-py3-none-any.whl (5.9 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m5.9/5.9 MB\u001b[0m \u001b[31m284.5 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: tqdm>=4.64.0 in ./miniconda3/lib/python3.10/site-packages (from modelscope) (4.64.1)\n",
      "Requirement already satisfied: urllib3>=1.26 in ./miniconda3/lib/python3.10/site-packages (from modelscope) (1.26.13)\n",
      "Requirement already satisfied: requests>=2.25 in ./miniconda3/lib/python3.10/site-packages (from modelscope) (2.32.3)\n",
      "Requirement already satisfied: setuptools in ./miniconda3/lib/python3.10/site-packages (from modelscope) (65.5.0)\n",
      "Requirement already satisfied: charset-normalizer<4,>=2 in ./miniconda3/lib/python3.10/site-packages (from requests>=2.25->modelscope) (2.0.4)\n",
      "Requirement already satisfied: idna<4,>=2.5 in ./miniconda3/lib/python3.10/site-packages (from requests>=2.25->modelscope) (3.4)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in ./miniconda3/lib/python3.10/site-packages (from requests>=2.25->modelscope) (2022.12.7)\n",
      "Installing collected packages: modelscope\n",
      "Successfully installed modelscope-1.27.1\n",
      "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n",
      "\u001b[0m"
     ]
    }
   ],
   "source": [
    "!pip install \"modelscope\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "af6a8a2a36dff37d",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-06T09:56:06.448327Z",
     "start_time": "2025-06-06T09:55:41.434969Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Looking in indexes: http://mirrors.aliyun.com/pypi/simple\n",
      "Requirement already satisfied: modelscope[framework] in ./miniconda3/lib/python3.10/site-packages (1.27.1)\n",
      "Requirement already satisfied: setuptools in ./miniconda3/lib/python3.10/site-packages (from modelscope[framework]) (65.5.0)\n",
      "Requirement already satisfied: tqdm>=4.64.0 in ./miniconda3/lib/python3.10/site-packages (from modelscope[framework]) (4.64.1)\n",
      "Requirement already satisfied: urllib3>=1.26 in ./miniconda3/lib/python3.10/site-packages (from modelscope[framework]) (1.26.13)\n",
      "Requirement already satisfied: requests>=2.25 in ./miniconda3/lib/python3.10/site-packages (from modelscope[framework]) (2.32.3)\n",
      "Collecting transformers\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/5e/0c/68d03a38f6ab2ba2b2829eb11b334610dd236e7926787f7656001b68e1f2/transformers-4.53.0-py3-none-any.whl (10.8 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m10.8/10.8 MB\u001b[0m \u001b[31m234.9 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:02\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: attrs in ./miniconda3/lib/python3.10/site-packages (from modelscope[framework]) (23.2.0)\n",
      "Requirement already satisfied: python-dateutil>=2.1 in ./miniconda3/lib/python3.10/site-packages (from modelscope[framework]) (2.9.0.post0)\n",
      "Collecting simplejson>=3.3.0\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/bb/9e/da184f0e9bb3a5d7ffcde713bd41b4fe46cca56b6f24d9bd155fac56805a/simplejson-3.20.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (138 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m138.0/138.0 kB\u001b[0m \u001b[31m224.8 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hCollecting datasets<=3.2.0,>=3.0.0\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/d7/84/0df6c5981f5fc722381662ff8cfbdf8aad64bec875f75d80b55bfef394ce/datasets-3.2.0-py3-none-any.whl (480 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m480.6/480.6 kB\u001b[0m \u001b[31m220.7 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hCollecting scipy\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/8e/6d/41991e503e51fc1134502694c5fa7a1671501a17ffa12716a4a9151af3df/scipy-1.15.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (37.7 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m37.7/37.7 MB\u001b[0m \u001b[31m268.1 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:04\u001b[0m\n",
      "\u001b[?25hCollecting einops\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/87/62/9773de14fe6c45c23649e98b83231fffd7b9892b6cf863251dc2afa73643/einops-0.8.1-py3-none-any.whl (64 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m64.4/64.4 kB\u001b[0m \u001b[31m132.1 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hCollecting sortedcontainers>=1.5.9\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl (29 kB)\n",
      "Requirement already satisfied: Pillow in ./miniconda3/lib/python3.10/site-packages (from modelscope[framework]) (10.3.0)\n",
      "Collecting addict\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/6a/00/b08f23b7d7e1e14ce01419a467b583edbb93c6cdb8654e54a9cc579cd61f/addict-2.4.0-py3-none-any.whl (3.8 kB)\n",
      "Collecting huggingface-hub>=0.23.0\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/44/f4/5f3f22e762ad1965f01122b42dae5bf0e009286e2dba601ce1d0dba72424/huggingface_hub-0.33.2-py3-none-any.whl (515 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m515.4/515.4 kB\u001b[0m \u001b[31m277.7 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: pyyaml>=5.1 in ./miniconda3/lib/python3.10/site-packages (from datasets<=3.2.0,>=3.0.0->modelscope[framework]) (6.0.1)\n",
      "Requirement already satisfied: fsspec[http]<=2024.9.0,>=2023.1.0 in ./miniconda3/lib/python3.10/site-packages (from datasets<=3.2.0,>=3.0.0->modelscope[framework]) (2024.6.0)\n",
      "Collecting tqdm>=4.64.0\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl (78 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m78.5/78.5 kB\u001b[0m \u001b[31m272.9 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: packaging in ./miniconda3/lib/python3.10/site-packages (from datasets<=3.2.0,>=3.0.0->modelscope[framework]) (24.1)\n",
      "Collecting multiprocess<0.70.17\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/bc/f7/7ec7fddc92e50714ea3745631f79bd9c96424cb2702632521028e57d3a36/multiprocess-0.70.16-py310-none-any.whl (134 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m134.8/134.8 kB\u001b[0m \u001b[31m289.5 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: numpy>=1.17 in ./miniconda3/lib/python3.10/site-packages (from datasets<=3.2.0,>=3.0.0->modelscope[framework]) (1.26.4)\n",
      "Collecting pyarrow>=15.0.0\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/f1/ca/ae10fba419a6e94329707487835ec721f5a95f3ac9168500bcf7aa3813c7/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_x86_64.whl (42.3 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m42.3/42.3 MB\u001b[0m \u001b[31m292.9 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:04\u001b[0m\n",
      "\u001b[?25hCollecting xxhash\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/f2/07/d9a3059f702dec5b3b703737afb6dda32f304f6e9da181a229dafd052c29/xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (194 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m194.1/194.1 kB\u001b[0m \u001b[31m300.3 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hCollecting pandas\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/66/f8/5508bc45e994e698dbc93607ee6b9b6eb67df978dc10ee2b09df80103d9e/pandas-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (12.3 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m12.3/12.3 MB\u001b[0m \u001b[31m296.1 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:02\u001b[0m\n",
      "\u001b[?25hCollecting dill<0.3.9,>=0.3.0\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/c9/7a/cef76fd8438a42f96db64ddaa85280485a9c395e7df3db8158cfec1eee34/dill-0.3.8-py3-none-any.whl (116 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m116.3/116.3 kB\u001b[0m \u001b[31m296.8 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hCollecting aiohttp\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/e6/7a/d85866a642158e1147c7da5f93ad66b07e5452a84ec4258e5f06b9071e92/aiohttp-3.12.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.6 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.6/1.6 MB\u001b[0m \u001b[31m302.2 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: filelock in ./miniconda3/lib/python3.10/site-packages (from datasets<=3.2.0,>=3.0.0->modelscope[framework]) (3.14.0)\n",
      "Requirement already satisfied: six>=1.5 in ./miniconda3/lib/python3.10/site-packages (from python-dateutil>=2.1->modelscope[framework]) (1.16.0)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in ./miniconda3/lib/python3.10/site-packages (from requests>=2.25->modelscope[framework]) (2022.12.7)\n",
      "Requirement already satisfied: idna<4,>=2.5 in ./miniconda3/lib/python3.10/site-packages (from requests>=2.25->modelscope[framework]) (3.4)\n",
      "Requirement already satisfied: charset-normalizer<4,>=2 in ./miniconda3/lib/python3.10/site-packages (from requests>=2.25->modelscope[framework]) (2.0.4)\n",
      "Collecting regex!=2019.12.17\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/f2/98/26d3830875b53071f1f0ae6d547f1d98e964dd29ad35cbf94439120bb67a/regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (781 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m781.7/781.7 kB\u001b[0m \u001b[31m278.5 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hCollecting safetensors>=0.4.3\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/a6/f8/dae3421624fcc87a89d42e1898a798bc7ff72c61f38973a65d60df8f124c/safetensors-0.5.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (471 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m471.6/471.6 kB\u001b[0m \u001b[31m297.1 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hCollecting tokenizers<0.22,>=0.21\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/c5/74/f41a432a0733f61f3d21b288de6dfa78f7acff309c6f0f323b2833e9189f/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.1 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.1/3.1 MB\u001b[0m \u001b[31m306.8 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hCollecting async-timeout<6.0,>=4.0\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl (6.2 kB)\n",
      "Collecting propcache>=0.2.0\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/7c/bb/38fd08b278ca85cde36d848091ad2b45954bc5f15cce494bb300b9285831/propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (198 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m198.3/198.3 kB\u001b[0m \u001b[31m314.0 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hCollecting yarl<2.0,>=1.17.0\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/65/94/e21269718349582eee81efc5c1c08ee71c816bfc1585b77d0ec3f58089eb/yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (326 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m326.1/326.1 kB\u001b[0m \u001b[31m321.7 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hCollecting multidict<7.0,>=4.5\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/44/11/780615a98fd3775fc309d0234d563941af69ade2df0bb82c91dda6ddaea1/multidict-6.6.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl (241 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m241.6/241.6 kB\u001b[0m \u001b[31m297.7 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hCollecting aiohappyeyeballs>=2.5.0\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl (15 kB)\n",
      "Collecting frozenlist>=1.1.1\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/13/47/f9179ee5ee4f55629e4f28c660b3fdf2775c8bfde8f9c53f2de2d93f52a9/frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (222 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m222.9/222.9 kB\u001b[0m \u001b[31m311.7 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hCollecting aiosignal>=1.1.2\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl (7.6 kB)\n",
      "Requirement already satisfied: typing-extensions>=3.7.4.3 in ./miniconda3/lib/python3.10/site-packages (from huggingface-hub>=0.23.0->datasets<=3.2.0,>=3.0.0->modelscope[framework]) (4.12.2)\n",
      "Collecting hf-xet<2.0.0,>=1.1.2\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/6d/2f/6cad7b5fe86b7652579346cb7f85156c11761df26435651cbba89376cd2c/hf_xet-1.1.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.1 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.1/3.1 MB\u001b[0m \u001b[31m294.2 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hCollecting tzdata>=2022.7\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl (347 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m347.8/347.8 kB\u001b[0m \u001b[31m299.0 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hCollecting pytz>=2020.1\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl (509 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m509.2/509.2 kB\u001b[0m \u001b[31m290.0 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hInstalling collected packages: sortedcontainers, pytz, addict, xxhash, tzdata, tqdm, simplejson, scipy, safetensors, regex, pyarrow, propcache, multidict, hf-xet, frozenlist, einops, dill, async-timeout, aiohappyeyeballs, yarl, pandas, multiprocess, huggingface-hub, aiosignal, tokenizers, aiohttp, transformers, datasets\n",
      "  Attempting uninstall: tqdm\n",
      "    Found existing installation: tqdm 4.64.1\n",
      "    Uninstalling tqdm-4.64.1:\n",
      "      Successfully uninstalled tqdm-4.64.1\n",
      "Successfully installed addict-2.4.0 aiohappyeyeballs-2.6.1 aiohttp-3.12.13 aiosignal-1.3.2 async-timeout-5.0.1 datasets-3.2.0 dill-0.3.8 einops-0.8.1 frozenlist-1.7.0 hf-xet-1.1.5 huggingface-hub-0.33.2 multidict-6.6.3 multiprocess-0.70.16 pandas-2.3.0 propcache-0.3.2 pyarrow-20.0.0 pytz-2025.2 regex-2024.11.6 safetensors-0.5.3 scipy-1.15.3 simplejson-3.20.1 sortedcontainers-2.4.0 tokenizers-0.21.2 tqdm-4.67.1 transformers-4.53.0 tzdata-2025.2 xxhash-3.5.0 yarl-1.20.1\n",
      "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n",
      "\u001b[0m"
     ]
    }
   ],
   "source": [
    "!pip install \"modelscope[framework]\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "initial_id",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-02T08:31:12.391419Z",
     "start_time": "2025-07-02T08:31:07.727705Z"
    }
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2025-07-02 17:00:54,760 - modelscope - WARNING - Use trust_remote_code=True. Will invoke codes from chinese-poetry-collection. Please make sure that you can trust the external codes.\n",
      "2025-07-02 17:00:58,156 - modelscope - WARNING - Reusing dataset dataset_builder (/root/.cache/modelscope/hub/datasets/modelscope/chinese-poetry-collection/master/data_files)\n",
      "2025-07-02 17:00:58,158 - modelscope - INFO - Generating dataset dataset_builder (/root/.cache/modelscope/hub/datasets/modelscope/chinese-poetry-collection/master/data_files)\n",
      "2025-07-02 17:00:58,159 - modelscope - INFO - Loading meta-data file ...\n"
     ]
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "113ff3646fe94ac6a73f3504cd8b1d83",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "0it [00:00, ?it/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "text/plain": [
       "{'text1': ['半生长以客为家，罢直初来瀚海槎。始信人间行不尽，天涯更复有天涯。',\n",
       "  '南州未识异州苹，初向沙头问水神。料得行藏无用卜，乘桴人是北来人。']}"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 从modelscope下载\n",
    "from modelscope.msdatasets import MsDataset\n",
    "\n",
    "ms_dataset = MsDataset.load('modelscope/chinese-poetry-collection', subset_name='default', split='train')\n",
    "\n",
    "# MsDataset 转换为 HuggingFace 的 Dataset\n",
    "hf_dataset = ms_dataset.to_hf_dataset()\n",
    "\n",
    "# 截取前200首诗\n",
    "hf_dataset = hf_dataset.select(range(200))\n",
    "\n",
    "hf_dataset[:2]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "f470a7860fab0370",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-02T08:32:24.251214Z",
     "start_time": "2025-07-02T08:32:22.886834Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Downloading Model from https://www.modelscope.cn to directory: /root/.cache/modelscope/hub/models/openai-community/gpt2\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2025-07-02 17:01:19,481 - modelscope - INFO - Got 2 files, start to download ...\n"
     ]
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "31eb658528f04ee59bbe65aabee043a4",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "Processing 2 items:   0%|          | 0.00/2.00 [00:00<?, ?it/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "a1f81a6d5f094612a4658760744dd82c",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "Downloading [tokenizer.json]:   0%|          | 0.00/1.29M [00:00<?, ?B/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "46f69970a5454dbe8c952a7642196951",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "Downloading [tokenizer_config.json]:   0%|          | 0.00/26.0 [00:00<?, ?B/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2025-07-02 17:01:20,321 - modelscope - INFO - Download model 'openai-community/gpt2' successfully.\n"
     ]
    }
   ],
   "source": [
    "# 从HuggingFace下载\n",
    "# from transformers import GPT2Tokenizer\n",
    "# gpt2_tokenizer = GPT2Tokenizer.from_pretrained(\"gpt2\")\n",
    "\n",
    "# 从ModelScope下载\n",
    "from modelscope import AutoTokenizer, AutoModelForCausalLM\n",
    "gpt2_tokenizer = AutoTokenizer.from_pretrained('openai-community/gpt2')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "3441253d9ef78caa",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-02T08:32:26.361499Z",
     "start_time": "2025-07-02T08:32:26.358924Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "50256\n",
      "50256\n",
      "None\n",
      "50256\n"
     ]
    }
   ],
   "source": [
    "print(gpt2_tokenizer.eos_token_id)\n",
    "print(gpt2_tokenizer.bos_token_id)\n",
    "print(gpt2_tokenizer.pad_token_id)  # 为None, 因为GPT用的是解码器架构，不需要PAD\n",
    "print(gpt2_tokenizer.unk_token_id)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "8157e87cd21e061",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-02T08:32:45.864218Z",
     "start_time": "2025-07-02T08:32:45.855340Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'input_ids': [[39355, 232, 37955, 165, 243, 123, 20015, 98, 22522, 95, 10310, 118, 22522, 114, 171, 120, 234, 163, 121, 95, 33566, 112, 26344, 251, 30266, 98, 163, 222, 248, 38184, 115, 162, 100, 236, 16764, 34650, 233, 46479, 94, 21689, 29785, 112, 26193, 234, 38834, 22887, 121, 171, 120, 234, 25465, 162, 114, 107, 162, 249, 112, 13783, 235, 17312, 231, 25465, 162, 114, 107, 16764], [39355, 245, 32432, 252, 17312, 103, 46237, 228, 28156, 224, 32432, 252, 164, 233, 117, 171, 120, 234, 26344, 251, 28938, 239, 162, 110, 247, 13783, 112, 29785, 106, 36365, 112, 15351, 16764, 23877, 247, 36181, 245, 26193, 234, 164, 245, 237, 33768, 254, 18796, 101, 39355, 250, 171, 120, 234, 20046, 246, 162, 94, 112, 21689, 42468, 44293, 245, 30266, 98, 21689, 16764]], 'attention_mask': [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]}"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "gpt2_tokenizer(['半生长以客为家，罢直初来瀚海槎。始信人间行不尽，天涯更复有天涯。',\n",
    "                '南州未识异州苹，初向沙头问水神。料得行藏无用卜，乘桴人是北来人。'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "1285e91286f1e5cb",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-02T08:33:19.767837Z",
     "start_time": "2025-07-02T08:33:19.702254Z"
    }
   },
   "outputs": [
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "8d7a282d0c5f4ba3b60a4a409cdb363a",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "Map:   0%|          | 0/200 [00:00<?, ? examples/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "dataset = hf_dataset.map(lambda x: gpt2_tokenizer(x['text1']))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "2277156421de4b60",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-02T08:33:25.283132Z",
     "start_time": "2025-07-02T08:33:25.277713Z"
    },
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'text1': ['半生长以客为家，罢直初来瀚海槎。始信人间行不尽，天涯更复有天涯。',\n",
       "  '南州未识异州苹，初向沙头问水神。料得行藏无用卜，乘桴人是北来人。'],\n",
       " 'input_ids': [[39355,\n",
       "   232,\n",
       "   37955,\n",
       "   165,\n",
       "   243,\n",
       "   123,\n",
       "   20015,\n",
       "   98,\n",
       "   22522,\n",
       "   95,\n",
       "   10310,\n",
       "   118,\n",
       "   22522,\n",
       "   114,\n",
       "   171,\n",
       "   120,\n",
       "   234,\n",
       "   163,\n",
       "   121,\n",
       "   95,\n",
       "   33566,\n",
       "   112,\n",
       "   26344,\n",
       "   251,\n",
       "   30266,\n",
       "   98,\n",
       "   163,\n",
       "   222,\n",
       "   248,\n",
       "   38184,\n",
       "   115,\n",
       "   162,\n",
       "   100,\n",
       "   236,\n",
       "   16764,\n",
       "   34650,\n",
       "   233,\n",
       "   46479,\n",
       "   94,\n",
       "   21689,\n",
       "   29785,\n",
       "   112,\n",
       "   26193,\n",
       "   234,\n",
       "   38834,\n",
       "   22887,\n",
       "   121,\n",
       "   171,\n",
       "   120,\n",
       "   234,\n",
       "   25465,\n",
       "   162,\n",
       "   114,\n",
       "   107,\n",
       "   162,\n",
       "   249,\n",
       "   112,\n",
       "   13783,\n",
       "   235,\n",
       "   17312,\n",
       "   231,\n",
       "   25465,\n",
       "   162,\n",
       "   114,\n",
       "   107,\n",
       "   16764],\n",
       "  [39355,\n",
       "   245,\n",
       "   32432,\n",
       "   252,\n",
       "   17312,\n",
       "   103,\n",
       "   46237,\n",
       "   228,\n",
       "   28156,\n",
       "   224,\n",
       "   32432,\n",
       "   252,\n",
       "   164,\n",
       "   233,\n",
       "   117,\n",
       "   171,\n",
       "   120,\n",
       "   234,\n",
       "   26344,\n",
       "   251,\n",
       "   28938,\n",
       "   239,\n",
       "   162,\n",
       "   110,\n",
       "   247,\n",
       "   13783,\n",
       "   112,\n",
       "   29785,\n",
       "   106,\n",
       "   36365,\n",
       "   112,\n",
       "   15351,\n",
       "   16764,\n",
       "   23877,\n",
       "   247,\n",
       "   36181,\n",
       "   245,\n",
       "   26193,\n",
       "   234,\n",
       "   164,\n",
       "   245,\n",
       "   237,\n",
       "   33768,\n",
       "   254,\n",
       "   18796,\n",
       "   101,\n",
       "   39355,\n",
       "   250,\n",
       "   171,\n",
       "   120,\n",
       "   234,\n",
       "   20046,\n",
       "   246,\n",
       "   162,\n",
       "   94,\n",
       "   112,\n",
       "   21689,\n",
       "   42468,\n",
       "   44293,\n",
       "   245,\n",
       "   30266,\n",
       "   98,\n",
       "   21689,\n",
       "   16764]],\n",
       " 'attention_mask': [[1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1],\n",
       "  [1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1,\n",
       "   1]]}"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dataset[:2]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "7c97548ec06f4ec2",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-02T08:33:38.028790Z",
     "start_time": "2025-07-02T08:33:38.015056Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[tensor([39355,   232, 37955,   165,   243,   123, 20015,    98, 22522,    95,\n",
       "         10310,   118, 22522,   114,   171,   120,   234,   163,   121,    95,\n",
       "         33566,   112, 26344,   251, 30266,    98,   163,   222,   248, 38184,\n",
       "           115,   162,   100,   236, 16764, 34650,   233, 46479,    94, 21689,\n",
       "         29785,   112, 26193,   234, 38834, 22887,   121,   171,   120,   234,\n",
       "         25465,   162,   114,   107,   162,   249,   112, 13783,   235, 17312,\n",
       "           231, 25465,   162,   114,   107, 16764, 50256])]"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from torch.utils.data import Dataset, DataLoader\n",
    "import torch\n",
    "\n",
    "WINDOW_SIZE = 5\n",
    "\n",
    "\n",
    "# 自定义Dataset\n",
    "class GPTDataset(Dataset):\n",
    "    def __init__(self, dataset, window_size):\n",
    "        self.data = []\n",
    "        for tokens in dataset['input_ids']:\n",
    "            # 每首诗的结尾添加EOS\n",
    "            tokens = tokens + [gpt2_tokenizer.eos_token_id]\n",
    "\n",
    "            self.data.append((\n",
    "                # torch.tensor(tokens[:-1]),  # 前n-1\n",
    "                # torch.tensor(tokens[1:])    # 后n-1\n",
    "                torch.tensor(tokens)\n",
    "            ))\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.data)\n",
    "\n",
    "    def __getitem__(self, idx):\n",
    "        return self.data[idx]\n",
    "\n",
    "\n",
    "gpt_dataset = GPTDataset(dataset, window_size=WINDOW_SIZE)\n",
    "gpt_dataset[:1]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "6874529499e7772a",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-02T08:33:46.850604Z",
     "start_time": "2025-07-02T08:33:42.224804Z"
    }
   },
   "outputs": [],
   "source": [
    "from transformers import GPT2LMHeadModel, GPT2Config\n",
    "import torch\n",
    "\n",
    "# model = GPT2LMHeadModel.from_pretrained(\"gpt2\")  # 下载的是预训练模型，比较大，有一定的能力，我们可以利用它的网络架构来训练自己的模型\n",
    "\n",
    "# 创建默认配置（与预训练模型结构相同）\n",
    "config = GPT2Config()\n",
    "\n",
    "# 通过配置初始化模型（权重随机初始化）\n",
    "model = GPT2LMHeadModel(config)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "22ff9dcb8e3e66f0",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-02T08:34:22.084230Z",
     "start_time": "2025-07-02T08:34:22.077252Z"
    }
   },
   "outputs": [],
   "source": [
    "\n",
    "from torch.nn.utils.rnn import pad_sequence\n",
    "\n",
    "# 如果batch_size大于1，则需要填充，不然可能批量中第一个样本和第二个样本长度不一样，会报错\n",
    "# gpt2_tokenizer中没有PAD，可以直接用EOS来填充，反正是一首诗的末尾了\n",
    "def collate_fn(batch):\n",
    "    return pad_sequence(batch, batch_first=True, padding_value=gpt2_tokenizer.eos_token_id)\n",
    "\n",
    "# 创建 DataLoader 时指定 collate_fn\n",
    "data_loader = DataLoader(gpt_dataset, batch_size=2, shuffle=True, collate_fn=collate_fn)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "3cd75fee756f1a15",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-02T08:41:59.424483Z",
     "start_time": "2025-07-02T08:35:54.673674Z"
    },
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "`loss_type=None` was set in the config but it is unrecognised.Using the default loss: `ForCausalLMLoss`.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Batch [0/100], Loss: 10.9216\n",
      "Batch [50/100], Loss: 7.1645\n",
      "Epoch [1/100], Loss: 7.8226\n",
      "Batch [0/100], Loss: 7.6620\n",
      "Batch [50/100], Loss: 5.5238\n",
      "Epoch [2/100], Loss: 6.4050\n",
      "Batch [0/100], Loss: 6.1059\n",
      "Batch [50/100], Loss: 5.7475\n",
      "Epoch [3/100], Loss: 5.3398\n",
      "Batch [0/100], Loss: 5.1461\n",
      "Batch [50/100], Loss: 4.0009\n",
      "Epoch [4/100], Loss: 4.5343\n",
      "Batch [0/100], Loss: 4.0592\n",
      "Batch [50/100], Loss: 3.7797\n",
      "Epoch [5/100], Loss: 3.9876\n",
      "Batch [0/100], Loss: 3.4827\n",
      "Batch [50/100], Loss: 3.7072\n",
      "Epoch [6/100], Loss: 3.4801\n",
      "Batch [0/100], Loss: 3.0103\n",
      "Batch [50/100], Loss: 3.3443\n",
      "Epoch [7/100], Loss: 3.2735\n",
      "Batch [0/100], Loss: 3.2629\n",
      "Batch [50/100], Loss: 3.1956\n",
      "Epoch [8/100], Loss: 3.0791\n",
      "Batch [0/100], Loss: 2.8521\n",
      "Batch [50/100], Loss: 3.4251\n",
      "Epoch [9/100], Loss: 2.9433\n",
      "Batch [0/100], Loss: 2.9007\n",
      "Batch [50/100], Loss: 2.4809\n",
      "Epoch [10/100], Loss: 2.7159\n",
      "Batch [0/100], Loss: 2.5688\n",
      "Batch [50/100], Loss: 2.4106\n",
      "Epoch [11/100], Loss: 2.5750\n",
      "Batch [0/100], Loss: 2.3379\n",
      "Batch [50/100], Loss: 2.4780\n",
      "Epoch [12/100], Loss: 2.3773\n",
      "Batch [0/100], Loss: 2.0742\n",
      "Batch [50/100], Loss: 2.5011\n",
      "Epoch [13/100], Loss: 2.3059\n",
      "Batch [0/100], Loss: 2.2303\n",
      "Batch [50/100], Loss: 1.8539\n",
      "Epoch [14/100], Loss: 2.1044\n",
      "Batch [0/100], Loss: 1.9737\n",
      "Batch [50/100], Loss: 2.2802\n",
      "Epoch [15/100], Loss: 1.9907\n",
      "Batch [0/100], Loss: 2.0487\n",
      "Batch [50/100], Loss: 1.8271\n",
      "Epoch [16/100], Loss: 1.8352\n",
      "Batch [0/100], Loss: 1.4536\n",
      "Batch [50/100], Loss: 1.5389\n",
      "Epoch [17/100], Loss: 1.6832\n",
      "Batch [0/100], Loss: 1.6208\n",
      "Batch [50/100], Loss: 1.4202\n",
      "Epoch [18/100], Loss: 1.5595\n",
      "Batch [0/100], Loss: 1.3031\n",
      "Batch [50/100], Loss: 1.3315\n",
      "Epoch [19/100], Loss: 1.4105\n",
      "Batch [0/100], Loss: 1.4045\n",
      "Batch [50/100], Loss: 1.1626\n",
      "Epoch [20/100], Loss: 1.3035\n",
      "Batch [0/100], Loss: 1.1038\n",
      "Batch [50/100], Loss: 1.2613\n",
      "Epoch [21/100], Loss: 1.2010\n",
      "Batch [0/100], Loss: 1.0492\n",
      "Batch [50/100], Loss: 1.0226\n",
      "Epoch [22/100], Loss: 1.0768\n",
      "Batch [0/100], Loss: 0.6937\n",
      "Batch [50/100], Loss: 0.8753\n",
      "Epoch [23/100], Loss: 0.9725\n",
      "Batch [0/100], Loss: 0.8023\n",
      "Batch [50/100], Loss: 0.8467\n",
      "Epoch [24/100], Loss: 0.8673\n",
      "Batch [0/100], Loss: 0.8269\n",
      "Batch [50/100], Loss: 0.9186\n",
      "Epoch [25/100], Loss: 0.7849\n",
      "Batch [0/100], Loss: 0.7075\n",
      "Batch [50/100], Loss: 0.5731\n",
      "Epoch [26/100], Loss: 0.6956\n",
      "Batch [0/100], Loss: 0.7165\n",
      "Batch [50/100], Loss: 0.6212\n",
      "Epoch [27/100], Loss: 0.6308\n",
      "Batch [0/100], Loss: 0.5238\n",
      "Batch [50/100], Loss: 0.5437\n",
      "Epoch [28/100], Loss: 0.5644\n",
      "Batch [0/100], Loss: 0.3508\n",
      "Batch [50/100], Loss: 0.5565\n",
      "Epoch [29/100], Loss: 0.4938\n",
      "Batch [0/100], Loss: 0.5157\n",
      "Batch [50/100], Loss: 0.5056\n",
      "Epoch [30/100], Loss: 0.4440\n",
      "Batch [0/100], Loss: 0.2853\n",
      "Batch [50/100], Loss: 0.3925\n",
      "Epoch [31/100], Loss: 0.3896\n",
      "Batch [0/100], Loss: 0.3419\n",
      "Batch [50/100], Loss: 0.3146\n",
      "Epoch [32/100], Loss: 0.3667\n",
      "Batch [0/100], Loss: 0.2580\n",
      "Batch [50/100], Loss: 0.3144\n",
      "Epoch [33/100], Loss: 0.3267\n",
      "Batch [0/100], Loss: 0.3003\n",
      "Batch [50/100], Loss: 0.2908\n",
      "Epoch [34/100], Loss: 0.2968\n",
      "Batch [0/100], Loss: 0.2339\n",
      "Batch [50/100], Loss: 0.4088\n",
      "Epoch [35/100], Loss: 0.2677\n",
      "Batch [0/100], Loss: 0.1934\n",
      "Batch [50/100], Loss: 0.2889\n",
      "Epoch [36/100], Loss: 0.2467\n",
      "Batch [0/100], Loss: 0.2009\n",
      "Batch [50/100], Loss: 0.2117\n",
      "Epoch [37/100], Loss: 0.2202\n",
      "Batch [0/100], Loss: 0.1722\n",
      "Batch [50/100], Loss: 0.2459\n",
      "Epoch [38/100], Loss: 0.1989\n",
      "Batch [0/100], Loss: 0.2047\n",
      "Batch [50/100], Loss: 0.1257\n",
      "Epoch [39/100], Loss: 0.1918\n",
      "Batch [0/100], Loss: 0.1636\n",
      "Batch [50/100], Loss: 0.1205\n",
      "Epoch [40/100], Loss: 0.1749\n",
      "Batch [0/100], Loss: 0.2087\n",
      "Batch [50/100], Loss: 0.1198\n",
      "Epoch [41/100], Loss: 0.1631\n",
      "Batch [0/100], Loss: 0.1579\n",
      "Batch [50/100], Loss: 0.1109\n",
      "Epoch [42/100], Loss: 0.1511\n",
      "Batch [0/100], Loss: 0.1383\n",
      "Batch [50/100], Loss: 0.1237\n",
      "Epoch [43/100], Loss: 0.1505\n",
      "Batch [0/100], Loss: 0.1433\n",
      "Batch [50/100], Loss: 0.1802\n",
      "Epoch [44/100], Loss: 0.1481\n",
      "Batch [0/100], Loss: 0.1172\n",
      "Batch [50/100], Loss: 0.1385\n",
      "Epoch [45/100], Loss: 0.1363\n",
      "Batch [0/100], Loss: 0.0979\n",
      "Batch [50/100], Loss: 0.0923\n",
      "Epoch [46/100], Loss: 0.1302\n",
      "Batch [0/100], Loss: 0.1203\n",
      "Batch [50/100], Loss: 0.1222\n",
      "Epoch [47/100], Loss: 0.1249\n",
      "Batch [0/100], Loss: 0.1181\n",
      "Batch [50/100], Loss: 0.0639\n",
      "Epoch [48/100], Loss: 0.1158\n",
      "Batch [0/100], Loss: 0.0754\n",
      "Batch [50/100], Loss: 0.0947\n",
      "Epoch [49/100], Loss: 0.1144\n",
      "Batch [0/100], Loss: 0.0586\n",
      "Batch [50/100], Loss: 0.1039\n",
      "Epoch [50/100], Loss: 0.1058\n",
      "Batch [0/100], Loss: 0.1017\n",
      "Batch [50/100], Loss: 0.1331\n",
      "Epoch [51/100], Loss: 0.1082\n",
      "Batch [0/100], Loss: 0.1020\n",
      "Batch [50/100], Loss: 0.1372\n",
      "Epoch [52/100], Loss: 0.1004\n",
      "Batch [0/100], Loss: 0.0848\n",
      "Batch [50/100], Loss: 0.0514\n",
      "Epoch [53/100], Loss: 0.0958\n",
      "Batch [0/100], Loss: 0.0587\n",
      "Batch [50/100], Loss: 0.1133\n",
      "Epoch [54/100], Loss: 0.0918\n",
      "Batch [0/100], Loss: 0.0637\n",
      "Batch [50/100], Loss: 0.1337\n",
      "Epoch [55/100], Loss: 0.0917\n",
      "Batch [0/100], Loss: 0.0360\n",
      "Batch [50/100], Loss: 0.0867\n",
      "Epoch [56/100], Loss: 0.0959\n",
      "Batch [0/100], Loss: 0.0599\n",
      "Batch [50/100], Loss: 0.0790\n",
      "Epoch [57/100], Loss: 0.0935\n",
      "Batch [0/100], Loss: 0.0904\n",
      "Batch [50/100], Loss: 0.1185\n",
      "Epoch [58/100], Loss: 0.0958\n",
      "Batch [0/100], Loss: 0.0671\n",
      "Batch [50/100], Loss: 0.0989\n",
      "Epoch [59/100], Loss: 0.0918\n",
      "Batch [0/100], Loss: 0.0741\n",
      "Batch [50/100], Loss: 0.1117\n",
      "Epoch [60/100], Loss: 0.0926\n",
      "Batch [0/100], Loss: 0.0986\n",
      "Batch [50/100], Loss: 0.0361\n",
      "Epoch [61/100], Loss: 0.0863\n",
      "Batch [0/100], Loss: 0.0702\n",
      "Batch [50/100], Loss: 0.1176\n",
      "Epoch [62/100], Loss: 0.0833\n",
      "Batch [0/100], Loss: 0.0820\n",
      "Batch [50/100], Loss: 0.0806\n",
      "Epoch [63/100], Loss: 0.0845\n",
      "Batch [0/100], Loss: 0.0986\n",
      "Batch [50/100], Loss: 0.0654\n",
      "Epoch [64/100], Loss: 0.0857\n",
      "Batch [0/100], Loss: 0.0673\n",
      "Batch [50/100], Loss: 0.0662\n",
      "Epoch [65/100], Loss: 0.0795\n",
      "Batch [0/100], Loss: 0.0438\n",
      "Batch [50/100], Loss: 0.1492\n",
      "Epoch [66/100], Loss: 0.0795\n",
      "Batch [0/100], Loss: 0.0926\n",
      "Batch [50/100], Loss: 0.0846\n",
      "Epoch [67/100], Loss: 0.0753\n",
      "Batch [0/100], Loss: 0.0779\n",
      "Batch [50/100], Loss: 0.0421\n",
      "Epoch [68/100], Loss: 0.0797\n",
      "Batch [0/100], Loss: 0.0734\n",
      "Batch [50/100], Loss: 0.0957\n",
      "Epoch [69/100], Loss: 0.0766\n",
      "Batch [0/100], Loss: 0.0798\n",
      "Batch [50/100], Loss: 0.0586\n",
      "Epoch [70/100], Loss: 0.0712\n",
      "Batch [0/100], Loss: 0.0476\n",
      "Batch [50/100], Loss: 0.0958\n",
      "Epoch [71/100], Loss: 0.0753\n",
      "Batch [0/100], Loss: 0.0650\n",
      "Batch [50/100], Loss: 0.0585\n",
      "Epoch [72/100], Loss: 0.0709\n",
      "Batch [0/100], Loss: 0.0646\n",
      "Batch [50/100], Loss: 0.0798\n",
      "Epoch [73/100], Loss: 0.0697\n",
      "Batch [0/100], Loss: 0.0788\n",
      "Batch [50/100], Loss: 0.0815\n",
      "Epoch [74/100], Loss: 0.0704\n",
      "Batch [0/100], Loss: 0.0639\n",
      "Batch [50/100], Loss: 0.0584\n",
      "Epoch [75/100], Loss: 0.0715\n",
      "Batch [0/100], Loss: 0.0711\n",
      "Batch [50/100], Loss: 0.0467\n",
      "Epoch [76/100], Loss: 0.0651\n",
      "Batch [0/100], Loss: 0.0448\n",
      "Batch [50/100], Loss: 0.0627\n",
      "Epoch [77/100], Loss: 0.0609\n",
      "Batch [0/100], Loss: 0.0629\n",
      "Batch [50/100], Loss: 0.0701\n",
      "Epoch [78/100], Loss: 0.0678\n",
      "Batch [0/100], Loss: 0.0661\n",
      "Batch [50/100], Loss: 0.0664\n",
      "Epoch [79/100], Loss: 0.0743\n",
      "Batch [0/100], Loss: 0.0670\n",
      "Batch [50/100], Loss: 0.0571\n",
      "Epoch [80/100], Loss: 0.0681\n",
      "Batch [0/100], Loss: 0.0734\n",
      "Batch [50/100], Loss: 0.0378\n",
      "Epoch [81/100], Loss: 0.0607\n",
      "Batch [0/100], Loss: 0.0590\n",
      "Batch [50/100], Loss: 0.1115\n",
      "Epoch [82/100], Loss: 0.0702\n",
      "Batch [0/100], Loss: 0.0602\n",
      "Batch [50/100], Loss: 0.1020\n",
      "Epoch [83/100], Loss: 0.0626\n",
      "Batch [0/100], Loss: 0.0464\n",
      "Batch [50/100], Loss: 0.0317\n",
      "Epoch [84/100], Loss: 0.0601\n",
      "Batch [0/100], Loss: 0.0824\n",
      "Batch [50/100], Loss: 0.0526\n",
      "Epoch [85/100], Loss: 0.0563\n",
      "Batch [0/100], Loss: 0.0642\n",
      "Batch [50/100], Loss: 0.0402\n",
      "Epoch [86/100], Loss: 0.0656\n",
      "Batch [0/100], Loss: 0.0657\n",
      "Batch [50/100], Loss: 0.0329\n",
      "Epoch [87/100], Loss: 0.0668\n",
      "Batch [0/100], Loss: 0.0650\n",
      "Batch [50/100], Loss: 0.0758\n",
      "Epoch [88/100], Loss: 0.0702\n",
      "Batch [0/100], Loss: 0.0780\n",
      "Batch [50/100], Loss: 0.0704\n",
      "Epoch [89/100], Loss: 0.0614\n",
      "Batch [0/100], Loss: 0.0360\n",
      "Batch [50/100], Loss: 0.0498\n",
      "Epoch [90/100], Loss: 0.0546\n",
      "Batch [0/100], Loss: 0.0534\n",
      "Batch [50/100], Loss: 0.0170\n",
      "Epoch [91/100], Loss: 0.0565\n",
      "Batch [0/100], Loss: 0.0526\n",
      "Batch [50/100], Loss: 0.0591\n",
      "Epoch [92/100], Loss: 0.0554\n",
      "Batch [0/100], Loss: 0.0291\n",
      "Batch [50/100], Loss: 0.0427\n",
      "Epoch [93/100], Loss: 0.0595\n",
      "Batch [0/100], Loss: 0.0587\n",
      "Batch [50/100], Loss: 0.0630\n",
      "Epoch [94/100], Loss: 0.0662\n",
      "Batch [0/100], Loss: 0.0444\n",
      "Batch [50/100], Loss: 0.0884\n",
      "Epoch [95/100], Loss: 0.0585\n",
      "Batch [0/100], Loss: 0.0458\n",
      "Batch [50/100], Loss: 0.0469\n",
      "Epoch [96/100], Loss: 0.0651\n",
      "Batch [0/100], Loss: 0.0427\n",
      "Batch [50/100], Loss: 0.0697\n",
      "Epoch [97/100], Loss: 0.0570\n",
      "Batch [0/100], Loss: 0.0613\n",
      "Batch [50/100], Loss: 0.0558\n",
      "Epoch [98/100], Loss: 0.0539\n",
      "Batch [0/100], Loss: 0.0255\n",
      "Batch [50/100], Loss: 0.0393\n",
      "Epoch [99/100], Loss: 0.0528\n",
      "Batch [0/100], Loss: 0.0517\n",
      "Batch [50/100], Loss: 0.0709\n",
      "Epoch [100/100], Loss: 0.0551\n"
     ]
    }
   ],
   "source": [
    "NUM_EPOCHS = 100\n",
    "\n",
    "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"mps:0\")\n",
    "\n",
    "model.to(device)\n",
    "model.train()\n",
    "optimizer = torch.optim.Adam(model.parameters(), lr=1e-5)\n",
    "for epoch in range(NUM_EPOCHS):\n",
    "\n",
    "    total_loss = 0\n",
    "    step = 0\n",
    "    for batch in data_loader:\n",
    "\n",
    "        batch = batch.to(device)\n",
    "\n",
    "        optimizer.zero_grad()\n",
    "        outputs = model(batch, labels=batch)\n",
    "        loss = outputs.loss\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "        total_loss += loss.item()\n",
    "\n",
    "        # 每50个step打印一次损失\n",
    "        if step % 50 == 0:\n",
    "            print(f'Batch [{step}/{len(data_loader)}], Loss: {loss.item():.4f}')\n",
    "        step += 1\n",
    "\n",
    "    avg_loss = total_loss / len(data_loader)\n",
    "    print(f'Epoch [{epoch + 1}/{NUM_EPOCHS}], Loss: {avg_loss:.4f}')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "d22d8f12cc0b23ad",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-02T08:34:59.324166Z",
     "start_time": "2025-07-02T08:34:58.196739Z"
    }
   },
   "outputs": [],
   "source": [
    "# 保存模型\n",
    "model.save_pretrained(\"gpt2_poetry_model\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "id": "b4e63cd004a1c36",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-02T08:35:06.741291Z",
     "start_time": "2025-07-02T08:35:01.458069Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "生成结果 1:\n",
      "半生长以客为家，罢直初来瀚海槎。始信人间行不尽，\n",
      "\n"
     ]
    }
   ],
   "source": [
    "# 设置生成参数\n",
    "generation_config = {\n",
    "    \"max_length\": 50,  # 生成最大长度（包括输入）\n",
    "    \"eos_token_id\": gpt2_tokenizer.eos_token_id,  # 终止条件\n",
    "    \"pad_token_id\": gpt2_tokenizer.eos_token_id,  # 若需填充，使用EOS的ID\n",
    "}\n",
    "\n",
    "# 输入提示（可为空或部分诗句）\n",
    "prompt = \"半生长以客为家，\"  # 示例：输入半句诗\n",
    "input_ids = gpt2_tokenizer.encode(prompt, return_tensors=\"pt\").to(device)\n",
    "\n",
    "# 生成文本\n",
    "model.eval()\n",
    "outputs = model.generate(\n",
    "    input_ids=input_ids,\n",
    "    **generation_config\n",
    ")\n",
    "\n",
    "# 解码并打印结果\n",
    "for i, output in enumerate(outputs):\n",
    "    poem = gpt2_tokenizer.decode(output, skip_special_tokens=True)\n",
    "    print(f\"生成结果 {i+1}:\\n{poem}\\n\")"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.8"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
