{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "d806b2b9",
   "metadata": {},
   "source": [
    "### 模型加载"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 51,
   "id": "d2e88fe2",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Some weights of the model checkpoint at E:\\HuggFace_model\\rbt3 were not used when initializing BertModel: ['cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.dense.weight', 'cls.seq_relationship.bias', 'cls.predictions.transform.LayerNorm.weight', 'cls.predictions.bias', 'cls.predictions.decoder.weight', 'cls.predictions.transform.dense.bias', 'cls.seq_relationship.weight']\n",
      "- This IS expected if you are initializing BertModel from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n",
      "- This IS NOT expected if you are initializing BertModel from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n"
     ]
    }
   ],
   "source": [
    "from transformers import AutoModel, AutoTokenizer\n",
    "import os\n",
    " \n",
    "# 指定下载路径\n",
    "model_path = r\"E:\\HuggFace_model\\rbt3\"\n",
    "os.makedirs(model_path, exist_ok=True)  # 自动创建目录\n",
    " \n",
    "# 加载模型和分词器（自动下载）\n",
    "model = AutoModel.from_pretrained(\n",
    "    model_path,              # 模型名称（官方路径）\n",
    "    # cache_dir=model_path      # 指定缓存目录（实际文件会保存在此路径下）\n",
    ")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "3b0b8a48",
   "metadata": {},
   "source": [
    "### 模型加载参数"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 52,
   "id": "75b4727f",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Some weights of the model checkpoint at E:\\HuggFace_model\\rbt3 were not used when initializing BertModel: ['cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.dense.weight', 'cls.seq_relationship.bias', 'cls.predictions.transform.LayerNorm.weight', 'cls.predictions.bias', 'cls.predictions.decoder.weight', 'cls.predictions.transform.dense.bias', 'cls.seq_relationship.weight']\n",
      "- This IS expected if you are initializing BertModel from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n",
      "- This IS NOT expected if you are initializing BertModel from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n"
     ]
    }
   ],
   "source": [
    "model=AutoModel.from_pretrained(model_path)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 53,
   "id": "3479725a",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "BertConfig {\n",
       "  \"_name_or_path\": \"E:\\\\HuggFace_model\\\\rbt3\",\n",
       "  \"architectures\": [\n",
       "    \"BertForMaskedLM\"\n",
       "  ],\n",
       "  \"attention_probs_dropout_prob\": 0.1,\n",
       "  \"classifier_dropout\": null,\n",
       "  \"directionality\": \"bidi\",\n",
       "  \"hidden_act\": \"gelu\",\n",
       "  \"hidden_dropout_prob\": 0.1,\n",
       "  \"hidden_size\": 768,\n",
       "  \"initializer_range\": 0.02,\n",
       "  \"intermediate_size\": 3072,\n",
       "  \"layer_norm_eps\": 1e-12,\n",
       "  \"max_position_embeddings\": 512,\n",
       "  \"model_type\": \"bert\",\n",
       "  \"num_attention_heads\": 12,\n",
       "  \"num_hidden_layers\": 3,\n",
       "  \"output_past\": true,\n",
       "  \"pad_token_id\": 0,\n",
       "  \"pooler_fc_size\": 768,\n",
       "  \"pooler_num_attention_heads\": 12,\n",
       "  \"pooler_num_fc_layers\": 3,\n",
       "  \"pooler_size_per_head\": 128,\n",
       "  \"pooler_type\": \"first_token_transform\",\n",
       "  \"position_embedding_type\": \"absolute\",\n",
       "  \"transformers_version\": \"4.27.1\",\n",
       "  \"type_vocab_size\": 2,\n",
       "  \"use_cache\": true,\n",
       "  \"vocab_size\": 21128\n",
       "}"
      ]
     },
     "execution_count": 53,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model.config"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "90812090",
   "metadata": {},
   "source": [
    "加载config文件"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 54,
   "id": "637a87fa",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "BertConfig {\n",
       "  \"_name_or_path\": \"E:\\\\HuggFace_model\\\\rbt3\",\n",
       "  \"architectures\": [\n",
       "    \"BertForMaskedLM\"\n",
       "  ],\n",
       "  \"attention_probs_dropout_prob\": 0.1,\n",
       "  \"classifier_dropout\": null,\n",
       "  \"directionality\": \"bidi\",\n",
       "  \"hidden_act\": \"gelu\",\n",
       "  \"hidden_dropout_prob\": 0.1,\n",
       "  \"hidden_size\": 768,\n",
       "  \"initializer_range\": 0.02,\n",
       "  \"intermediate_size\": 3072,\n",
       "  \"layer_norm_eps\": 1e-12,\n",
       "  \"max_position_embeddings\": 512,\n",
       "  \"model_type\": \"bert\",\n",
       "  \"num_attention_heads\": 12,\n",
       "  \"num_hidden_layers\": 3,\n",
       "  \"output_past\": true,\n",
       "  \"pad_token_id\": 0,\n",
       "  \"pooler_fc_size\": 768,\n",
       "  \"pooler_num_attention_heads\": 12,\n",
       "  \"pooler_num_fc_layers\": 3,\n",
       "  \"pooler_size_per_head\": 128,\n",
       "  \"pooler_type\": \"first_token_transform\",\n",
       "  \"position_embedding_type\": \"absolute\",\n",
       "  \"transformers_version\": \"4.27.1\",\n",
       "  \"type_vocab_size\": 2,\n",
       "  \"use_cache\": true,\n",
       "  \"vocab_size\": 21128\n",
       "}"
      ]
     },
     "execution_count": 54,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 加载config文件\n",
    "from transformers import AutoConfig\n",
    "config=AutoConfig.from_pretrained(model_path)\n",
    "config"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 55,
   "id": "80fa232a",
   "metadata": {},
   "outputs": [],
   "source": [
    "from transformers import BertConfig"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 56,
   "id": "610a5745",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "False"
      ]
     },
     "execution_count": 56,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "config.output_attentions#可以在这里修改=XXX"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "660573d4",
   "metadata": {},
   "source": [
    "### 模型调用"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 57,
   "id": "b510016c",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'input_ids': tensor([[ 101, 2207, 2207, 4638, 2769,  738, 3300, 1920, 1920, 4638, 3457, 2682,\n",
       "          102]]), 'token_type_ids': tensor([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])}"
      ]
     },
     "execution_count": 57,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "sen=\"小小的我也有大大的梦想\"\n",
    "tokenizer=AutoTokenizer.from_pretrained(model_path)\n",
    "inputs=tokenizer(sen,return_tensors='pt')\n",
    "inputs"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "3d437f90",
   "metadata": {},
   "source": [
    "### 不带ModelHead的模型调用"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 58,
   "id": "94c4fa06",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Some weights of the model checkpoint at E:\\HuggFace_model\\rbt3 were not used when initializing BertModel: ['cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.dense.weight', 'cls.seq_relationship.bias', 'cls.predictions.transform.LayerNorm.weight', 'cls.predictions.bias', 'cls.predictions.decoder.weight', 'cls.predictions.transform.dense.bias', 'cls.seq_relationship.weight']\n",
      "- This IS expected if you are initializing BertModel from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n",
      "- This IS NOT expected if you are initializing BertModel from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n"
     ]
    }
   ],
   "source": [
    "model=AutoModel.from_pretrained(model_path)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 59,
   "id": "93af937a",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "BaseModelOutputWithPoolingAndCrossAttentions(last_hidden_state=tensor([[[ 0.1265,  0.5405,  0.3838,  ..., -0.5303,  0.1429, -0.1908],\n",
       "         [-0.1862,  1.1517, -0.4200,  ..., -0.2247, -0.1589, -0.6613],\n",
       "         [ 0.5635,  0.6384, -0.3005,  ..., -0.3440, -0.1391, -0.8560],\n",
       "         ...,\n",
       "         [-0.3734,  0.3011,  0.5510,  ..., -0.1795, -0.1457, -0.5103],\n",
       "         [-0.0045,  0.6324, -0.3027,  ..., -0.2366,  0.3897,  0.1222],\n",
       "         [ 0.1208,  0.5427,  0.3821,  ..., -0.5266,  0.1414, -0.1873]]],\n",
       "       grad_fn=<NativeLayerNormBackward0>), pooler_output=tensor([[-2.9094e-02, -9.9951e-01, -9.9991e-01, -9.1952e-01,  9.0408e-01,\n",
       "         -1.5124e-01,  8.1814e-02, -7.5480e-02,  9.9760e-01,  9.9957e-01,\n",
       "         -5.5056e-02, -1.0000e+00,  9.8080e-02,  9.9970e-01, -9.9999e-01,\n",
       "          9.9981e-01,  9.8097e-01,  9.7028e-01, -9.9938e-01, -1.2410e-01,\n",
       "         -9.9652e-01, -9.7274e-01,  3.2955e-01,  9.4098e-01,  9.7523e-01,\n",
       "         -9.9390e-01, -9.9994e-01,  6.1045e-02, -6.1693e-01, -9.9879e-01,\n",
       "         -9.9444e-01, -9.9990e-01,  2.3229e-01, -1.7623e-01,  9.9810e-01,\n",
       "         -9.4230e-01,  1.9611e-01, -9.8855e-01, -9.9753e-01, -9.9828e-01,\n",
       "         -1.8699e-01,  9.9122e-01, -1.2860e-01,  9.9995e-01, -1.0806e-01,\n",
       "          3.6510e-02,  9.9992e-01,  9.9174e-01, -2.6160e-01, -7.3010e-01,\n",
       "         -3.5882e-01, -3.5616e-01, -9.7910e-01,  9.9900e-01,  3.4435e-01,\n",
       "          1.1815e-01,  9.9975e-01, -9.9999e-01, -9.9868e-01,  9.9257e-01,\n",
       "         -9.9940e-01,  9.9410e-01,  9.9604e-01,  9.8209e-01, -8.6602e-01,\n",
       "          9.9980e-01,  9.9842e-01,  7.7186e-01, -2.8653e-01, -9.9995e-01,\n",
       "          2.9763e-01, -9.8359e-01, -9.9932e-01, -5.9935e-02, -3.1361e-01,\n",
       "         -9.8883e-01,  9.9312e-01, -2.0140e-01,  9.9976e-01,  2.7677e-01,\n",
       "         -9.9835e-01,  4.4382e-01,  9.7058e-02,  2.5459e-02,  9.9886e-01,\n",
       "          9.9996e-01,  3.7915e-02, -9.9190e-01, -8.7562e-02, -9.9728e-01,\n",
       "         -6.4141e-01,  9.9757e-01,  9.9988e-01, -9.9970e-01,  9.9992e-01,\n",
       "         -9.2259e-01, -2.8085e-01,  2.0223e-01, -9.9370e-01,  9.5755e-01,\n",
       "         -1.4151e-01, -7.6620e-02,  9.9999e-01,  9.8321e-01,  1.1266e-01,\n",
       "         -9.9993e-01, -9.4782e-01,  9.9914e-01, -9.9654e-01,  1.3356e-01,\n",
       "          1.0000e+00,  6.5074e-01,  1.0000e+00,  9.9988e-01,  9.9987e-01,\n",
       "         -9.9929e-01, -3.5443e-01,  2.1478e-01, -9.9976e-01,  9.8689e-01,\n",
       "         -9.9686e-01,  6.9243e-01, -4.6540e-01, -9.7248e-02,  1.4399e-01,\n",
       "         -9.9985e-01, -7.5107e-02, -1.1208e-02, -9.8220e-01, -9.9534e-01,\n",
       "         -9.9830e-01, -9.9996e-01,  9.7399e-01,  8.6404e-01, -2.3688e-01,\n",
       "         -3.4004e-01,  2.7930e-01, -1.4633e-01, -9.9998e-01, -9.9965e-01,\n",
       "         -9.9999e-01,  2.3025e-01, -9.3475e-01,  9.9796e-01, -9.9525e-01,\n",
       "          9.9904e-01, -9.9880e-01,  9.9981e-01,  9.6848e-01, -1.3191e-01,\n",
       "         -4.6217e-01, -1.2520e-01, -9.9661e-01,  3.9658e-02, -1.0030e-01,\n",
       "          9.9579e-01,  9.9557e-01,  7.9335e-01, -2.1124e-01,  9.9999e-01,\n",
       "         -9.6627e-01,  9.2938e-01, -1.9586e-01,  9.8913e-01,  1.0000e+00,\n",
       "         -9.9993e-01,  2.8184e-01, -1.0000e+00,  3.4593e-01, -3.5440e-02,\n",
       "          9.9988e-01,  9.9846e-01,  4.9637e-01,  9.9849e-01, -8.3757e-01,\n",
       "         -9.9977e-01,  5.2276e-01, -9.9976e-01,  7.3759e-01,  9.9999e-01,\n",
       "         -7.2120e-02,  5.0912e-01,  9.9999e-01,  4.9808e-01,  9.6481e-01,\n",
       "         -1.1099e-01, -1.0081e-01, -9.9664e-01,  1.6072e-01,  9.1026e-01,\n",
       "          9.7408e-01, -9.8366e-01,  9.6091e-02,  9.8378e-01, -7.7793e-02,\n",
       "          3.8734e-02, -9.9357e-01, -9.9219e-01,  9.9956e-01,  9.9865e-01,\n",
       "          3.4795e-02, -2.9090e-01,  9.9999e-01, -4.9119e-03,  9.9945e-01,\n",
       "          2.7125e-01,  8.1436e-01, -1.8353e-01,  9.9907e-01,  4.4071e-01,\n",
       "          8.0531e-01, -1.0053e-01,  9.9983e-01, -8.4155e-01, -9.9980e-01,\n",
       "          1.7603e-02,  5.6780e-01,  2.7389e-01, -9.8000e-01,  3.0289e-01,\n",
       "         -1.3968e-01,  9.9984e-01, -5.7976e-02, -9.9255e-01,  9.9245e-01,\n",
       "         -9.9497e-01,  6.3511e-01, -9.9999e-01, -9.9784e-01,  9.9957e-01,\n",
       "         -1.0232e-01, -1.0000e+00,  5.6551e-01,  1.0000e+00, -9.8029e-01,\n",
       "          9.9878e-01, -1.4903e-01, -9.9942e-01,  2.1227e-02, -2.4681e-01,\n",
       "         -1.0000e+00, -9.9592e-01, -9.9999e-01, -7.7473e-01, -2.8228e-02,\n",
       "          9.9820e-01, -1.0000e+00,  2.2702e-01, -9.9993e-01,  9.9258e-01,\n",
       "          9.9990e-01,  2.2075e-02, -3.9417e-01,  9.9999e-01,  2.2980e-01,\n",
       "         -4.4352e-02, -2.4510e-01,  1.5053e-01,  3.6953e-03,  9.9171e-01,\n",
       "          5.3051e-01,  9.9959e-01, -9.9942e-01,  1.1029e-01, -2.0716e-01,\n",
       "         -9.9980e-01, -4.2609e-01,  9.0020e-01,  6.7070e-02,  6.7128e-02,\n",
       "         -6.9142e-02,  2.7538e-01,  9.9154e-01,  9.9429e-01,  1.0000e+00,\n",
       "          9.6564e-01,  9.9999e-01,  9.9996e-01, -3.1303e-02, -9.6239e-01,\n",
       "         -6.6139e-01,  1.4474e-02, -9.9991e-01, -9.6977e-01, -9.9816e-01,\n",
       "          8.6780e-01,  1.5467e-01,  1.0000e+00, -9.9996e-01,  9.9999e-01,\n",
       "         -8.2825e-01,  2.1543e-01,  8.5623e-02,  4.4190e-01, -6.4154e-01,\n",
       "         -8.8718e-02,  9.9799e-01,  3.5387e-02,  9.6446e-01,  9.8229e-01,\n",
       "         -8.0878e-02, -1.1245e-01,  1.0527e-01, -2.0045e-01,  9.7696e-01,\n",
       "         -9.6948e-01,  9.6515e-01, -1.1852e-01,  9.9725e-01, -1.4809e-01,\n",
       "         -9.9998e-01,  9.9933e-01, -9.9791e-01, -4.7634e-01, -9.9926e-01,\n",
       "         -9.0770e-01,  1.4134e-01, -9.9700e-01,  9.3253e-01,  9.9114e-01,\n",
       "          4.9088e-01, -3.9774e-01, -9.9998e-01, -7.4246e-01,  9.9270e-01,\n",
       "          9.9967e-01, -9.9996e-01,  9.9962e-01,  9.5330e-01, -8.9292e-01,\n",
       "          3.9113e-01,  9.0346e-01, -9.9827e-01,  9.9977e-01, -9.9998e-01,\n",
       "          1.3274e-01,  9.9438e-01, -4.3007e-02, -9.9978e-01, -9.5942e-01,\n",
       "         -5.2344e-02,  5.8918e-01,  1.1475e-01,  9.9953e-01, -1.9087e-01,\n",
       "          3.9238e-02, -9.9986e-01, -9.7629e-01, -5.0471e-01,  8.6698e-02,\n",
       "          9.9943e-01, -9.9999e-01,  5.3060e-01,  9.9916e-01, -9.9966e-01,\n",
       "         -5.1234e-01, -1.0630e-01, -6.8113e-02,  2.1326e-02, -9.9239e-01,\n",
       "         -9.9994e-01, -9.9884e-01,  9.9940e-01, -4.3828e-02, -3.6806e-01,\n",
       "          9.9480e-01,  9.9976e-01,  9.9970e-01, -9.9446e-01,  5.0919e-01,\n",
       "          9.9935e-01, -1.3105e-01,  2.6032e-01, -7.6205e-01, -5.5247e-01,\n",
       "         -9.5282e-01, -9.7244e-01,  1.9124e-01,  1.2962e-01, -1.7488e-02,\n",
       "         -9.9154e-01,  9.9996e-01,  9.9952e-01,  9.9999e-01,  8.8903e-02,\n",
       "         -8.8882e-01,  1.8167e-01, -6.7778e-01, -9.9854e-01,  1.3466e-01,\n",
       "          9.9930e-01, -9.9933e-01,  2.6653e-02,  5.5429e-01, -9.8507e-01,\n",
       "          9.9832e-01, -9.6721e-01,  4.1320e-01, -1.0000e+00, -9.9978e-01,\n",
       "         -1.0000e+00,  9.9997e-01,  1.9674e-01, -1.6672e-01, -9.6718e-01,\n",
       "          9.9998e-01,  9.9387e-01, -9.1735e-01, -4.7193e-01,  9.9975e-01,\n",
       "         -5.7817e-01, -5.7144e-01, -9.9999e-01, -9.9892e-01,  9.9195e-01,\n",
       "          1.4377e-01,  9.9874e-01,  2.0075e-02, -9.9183e-01,  5.8402e-01,\n",
       "          9.9291e-01,  3.2741e-01, -9.8963e-01, -9.8931e-01,  1.2882e-01,\n",
       "          3.9173e-01,  4.5145e-02,  1.9271e-01,  6.4842e-02,  9.9964e-01,\n",
       "          1.2191e-01, -2.0535e-01, -2.2630e-01,  9.9995e-01,  7.2702e-01,\n",
       "         -9.9564e-01, -8.1579e-01, -2.6391e-02, -9.8403e-01, -9.9974e-01,\n",
       "         -9.9916e-01, -9.4794e-05,  1.4957e-01,  1.9306e-01, -9.7332e-01,\n",
       "         -9.9980e-01, -9.9933e-01, -9.2276e-03, -9.8855e-01, -9.7761e-01,\n",
       "         -7.6015e-02, -9.9963e-01, -9.9301e-01,  9.9805e-01, -9.9983e-01,\n",
       "          9.6171e-02,  9.6892e-01,  9.8420e-01, -9.9993e-01,  2.4473e-02,\n",
       "          9.7227e-01, -9.9465e-01,  2.6383e-01, -9.2860e-01,  1.9504e-02,\n",
       "         -8.9152e-01, -9.9999e-01,  2.5658e-02,  9.9989e-01,  9.9943e-01,\n",
       "          9.9739e-01,  9.3473e-01, -4.1782e-01,  9.5926e-01,  9.9930e-01,\n",
       "          9.9997e-01,  4.4268e-02, -1.0447e-01, -9.9999e-01, -4.2350e-01,\n",
       "          5.0844e-01,  2.1355e-01,  7.4195e-01, -9.9969e-01,  6.0343e-02,\n",
       "         -5.3214e-01,  3.5301e-01,  1.0000e+00,  9.8571e-01, -3.2276e-01,\n",
       "         -1.0000e+00,  1.2722e-01, -4.1201e-01,  4.5704e-02, -9.9053e-01,\n",
       "          2.4799e-01,  9.9999e-01, -9.8644e-01, -2.2987e-01, -9.9801e-01,\n",
       "         -9.9951e-01,  9.9988e-01, -9.9996e-01,  9.9990e-01,  7.4756e-01,\n",
       "         -8.4391e-01, -1.4693e-01, -7.9194e-01,  8.8611e-02,  2.6037e-02,\n",
       "         -1.6280e-01,  9.0116e-02, -3.4240e-02, -9.9998e-01, -7.9399e-02,\n",
       "          9.9669e-01, -1.4876e-01, -8.7017e-01, -9.9867e-01,  1.7052e-01,\n",
       "          9.8400e-01, -9.9928e-01, -9.9975e-01, -2.1707e-02, -3.2120e-01,\n",
       "          3.5809e-01, -4.4313e-01, -1.2111e-01, -6.8106e-02, -9.8377e-01,\n",
       "          8.7137e-02,  9.4949e-01, -6.3541e-01,  8.1962e-01, -9.6095e-01,\n",
       "         -9.6891e-01, -9.7449e-02,  9.9966e-01,  9.9873e-01, -9.9967e-01,\n",
       "         -9.9995e-01, -1.1422e-02, -1.6441e-02, -3.7902e-01,  9.9740e-01,\n",
       "         -8.4094e-02, -9.8938e-01,  1.1875e-01,  8.7534e-02, -3.5972e-01,\n",
       "         -9.6046e-01,  9.9999e-01, -9.9760e-01,  1.0000e+00, -1.0000e+00,\n",
       "         -9.8287e-01, -2.0768e-02,  9.9994e-01, -9.9995e-01, -2.9216e-01,\n",
       "          9.9957e-01, -9.9999e-01, -6.2141e-02, -7.8329e-01,  5.4677e-01,\n",
       "         -1.1496e-01, -6.5187e-02,  6.8955e-01, -9.9936e-01,  2.8135e-02,\n",
       "         -9.9960e-01,  8.7018e-01,  9.6348e-01, -9.9972e-01, -2.3465e-01,\n",
       "         -1.0000e+00,  5.0616e-02,  9.5081e-02, -9.9998e-01,  9.9366e-01,\n",
       "          9.9990e-01, -1.1587e-01,  4.8332e-01, -9.9930e-01,  6.1339e-02,\n",
       "         -2.5975e-01, -9.9837e-01,  8.1738e-03, -9.9978e-01,  3.4635e-01,\n",
       "         -9.9772e-01,  9.6904e-01, -9.9985e-01,  9.9506e-01,  9.9421e-01,\n",
       "         -2.7910e-01, -7.7729e-01, -4.2697e-02, -4.6522e-01, -9.9989e-01,\n",
       "          2.6352e-01, -9.9925e-01, -9.9744e-01,  1.0755e-01,  9.9917e-01,\n",
       "          9.8249e-01,  2.1915e-01,  9.9602e-01, -9.9757e-01, -1.1538e-02,\n",
       "          5.0421e-01,  8.8524e-01,  1.0000e+00, -9.9975e-01, -9.9800e-01,\n",
       "          9.8943e-01, -9.9990e-01, -9.5533e-01,  1.0000e+00, -6.6503e-01,\n",
       "          9.9990e-01,  7.8463e-02, -9.9767e-01, -2.2858e-02,  2.8967e-01,\n",
       "          9.9735e-01, -2.8426e-01, -1.1538e-01,  9.9555e-01, -4.2244e-02,\n",
       "         -7.4108e-02, -6.8273e-01,  9.7689e-01, -1.0551e-01, -1.6000e-01,\n",
       "          9.9085e-01, -9.8683e-01, -9.9957e-01, -9.9759e-01,  6.2463e-02,\n",
       "         -3.0167e-01, -2.7628e-01,  2.5520e-02,  6.7733e-01,  9.9966e-01,\n",
       "         -9.9256e-01,  9.9526e-01, -1.0000e+00, -9.9996e-01,  1.3513e-02,\n",
       "          5.5599e-02,  9.9528e-01,  4.7480e-02, -7.7508e-01,  1.7707e-01,\n",
       "         -9.9295e-01,  9.6266e-01, -9.9716e-01,  9.7839e-01,  1.1253e-01,\n",
       "          1.0181e-01,  9.9992e-01,  9.9952e-01, -8.5731e-02, -9.9334e-01,\n",
       "         -9.8716e-01, -1.1318e-02, -9.9801e-01,  9.9921e-01,  4.8220e-02,\n",
       "          2.0000e-02,  2.2754e-02, -1.6751e-01, -9.9727e-01, -9.9875e-01,\n",
       "          6.7572e-02,  9.8995e-01, -9.9953e-01,  9.7194e-01, -9.9098e-01,\n",
       "          9.9384e-01,  9.9794e-01,  1.0000e+00,  4.7587e-02,  9.8210e-01,\n",
       "         -9.9752e-01, -9.8157e-01,  9.8927e-01,  9.8831e-01,  1.0000e+00,\n",
       "          9.8758e-01,  7.4550e-01,  2.3097e-01, -9.9999e-01,  9.6466e-01,\n",
       "          6.9937e-02, -1.6827e-01,  2.1783e-01, -9.4327e-01, -9.9996e-01,\n",
       "          9.9998e-01, -9.9999e-01, -9.9993e-01, -9.3118e-01, -9.9966e-01,\n",
       "          9.9743e-01,  9.5570e-01,  9.9933e-01,  7.1054e-01, -9.9968e-01,\n",
       "         -9.9342e-01,  5.1537e-02, -9.7247e-01, -9.6316e-01, -4.4939e-02,\n",
       "         -9.9999e-01, -3.5937e-02,  1.2907e-02, -9.4969e-01,  4.4759e-01,\n",
       "         -9.5024e-01,  7.3222e-01,  9.6105e-01, -2.4801e-01,  9.0707e-01,\n",
       "         -9.9283e-01, -9.9579e-01,  1.2176e-01, -1.0000e+00,  9.4894e-01,\n",
       "          9.9989e-01,  3.6741e-02,  6.6101e-01, -8.8345e-01,  6.1203e-02,\n",
       "         -9.9999e-01, -1.0000e+00,  9.8662e-01,  9.9988e-01,  2.3324e-01,\n",
       "         -9.9774e-01,  4.4472e-02, -9.9944e-01, -5.6007e-02,  9.0908e-01,\n",
       "          9.9745e-01, -9.9994e-01,  9.9842e-01, -9.6154e-01,  2.2708e-01,\n",
       "          9.9709e-01, -1.0000e+00,  8.6516e-01, -9.9898e-01,  9.9950e-01,\n",
       "         -1.0000e+00,  9.9981e-01, -3.7142e-01,  6.0262e-02, -1.3808e-01,\n",
       "          9.4821e-01, -9.9981e-01, -2.0339e-01,  9.7496e-01,  9.7356e-01,\n",
       "         -6.0120e-02,  9.9381e-01,  1.4474e-01]], grad_fn=<TanhBackward0>), hidden_states=None, past_key_values=None, attentions=(tensor([[[[4.7339e-01, 2.0157e-04, 1.6024e-04,  ..., 2.2801e-04,\n",
       "           1.6578e-04, 5.2252e-01],\n",
       "          [7.3942e-03, 1.0723e-01, 1.5628e-01,  ..., 8.0811e-02,\n",
       "           9.7105e-02, 2.0707e-03],\n",
       "          [1.5121e-02, 1.4215e-01, 1.5029e-01,  ..., 8.3301e-02,\n",
       "           1.0341e-01, 2.7057e-03],\n",
       "          ...,\n",
       "          [2.9945e-02, 7.0421e-02, 7.2816e-02,  ..., 2.7880e-01,\n",
       "           1.3033e-01, 6.4063e-03],\n",
       "          [2.8656e-02, 5.5870e-02, 4.5547e-02,  ..., 1.6733e-01,\n",
       "           1.1745e-01, 1.1685e-02],\n",
       "          [4.4029e-01, 5.6773e-04, 4.0400e-04,  ..., 4.2358e-04,\n",
       "           2.9588e-04, 5.5453e-01]],\n",
       "\n",
       "         [[9.8947e-01, 2.3263e-04, 2.0225e-04,  ..., 7.0940e-05,\n",
       "           4.2816e-05, 3.0036e-03],\n",
       "          [6.9084e-02, 5.0950e-04, 9.2979e-01,  ..., 1.3500e-06,\n",
       "           2.1109e-10, 1.4403e-04],\n",
       "          [2.9034e-02, 9.6594e-01, 2.0383e-04,  ..., 9.7950e-09,\n",
       "           1.9201e-07, 1.8223e-07],\n",
       "          ...,\n",
       "          [2.3573e-02, 5.8630e-06, 1.0208e-07,  ..., 2.3398e-05,\n",
       "           9.4223e-01, 4.2026e-04],\n",
       "          [1.0768e-02, 5.3642e-10, 2.1844e-07,  ..., 9.7457e-01,\n",
       "           1.5369e-04, 1.4442e-02],\n",
       "          [9.7167e-01, 1.6687e-04, 2.6362e-07,  ..., 1.3797e-04,\n",
       "           9.0269e-03, 4.6818e-03]],\n",
       "\n",
       "         [[1.8457e-01, 3.8332e-02, 2.8800e-02,  ..., 4.0477e-02,\n",
       "           5.1682e-02, 2.3733e-01],\n",
       "          [5.4918e-01, 2.0316e-01, 9.2199e-02,  ..., 3.2089e-03,\n",
       "           5.3291e-03, 6.3769e-02],\n",
       "          [3.4421e-01, 4.1482e-01, 1.3522e-01,  ..., 1.9955e-03,\n",
       "           2.5283e-03, 3.8519e-02],\n",
       "          ...,\n",
       "          [1.5498e-01, 3.4474e-02, 3.1945e-02,  ..., 1.2613e-01,\n",
       "           2.0283e-02, 4.2934e-02],\n",
       "          [1.4582e-01, 3.8880e-02, 5.0443e-02,  ..., 6.3590e-02,\n",
       "           6.8925e-02, 4.9240e-02],\n",
       "          [4.1185e-02, 3.8534e-02, 3.9695e-02,  ..., 1.1075e-01,\n",
       "           4.7893e-02, 1.3656e-01]],\n",
       "\n",
       "         ...,\n",
       "\n",
       "         [[6.6272e-01, 4.2920e-02, 3.8075e-02,  ..., 2.4612e-02,\n",
       "           2.4799e-02, 7.3273e-02],\n",
       "          [5.5611e-01, 5.2675e-03, 6.6442e-03,  ..., 2.5692e-02,\n",
       "           1.6479e-02, 2.4560e-01],\n",
       "          [5.8443e-01, 1.0078e-02, 5.4145e-03,  ..., 2.6422e-02,\n",
       "           2.3033e-02, 2.1771e-01],\n",
       "          ...,\n",
       "          [5.5592e-01, 4.7155e-02, 3.1719e-02,  ..., 2.3203e-01,\n",
       "           2.4290e-02, 1.7455e-02],\n",
       "          [3.4102e-01, 4.8507e-02, 5.2026e-02,  ..., 7.5923e-02,\n",
       "           2.3174e-01, 8.5760e-02],\n",
       "          [3.7989e-01, 6.2278e-02, 6.5617e-02,  ..., 1.4786e-02,\n",
       "           9.8158e-03, 1.3650e-01]],\n",
       "\n",
       "         [[9.7685e-01, 3.9495e-03, 3.0947e-03,  ..., 1.1939e-03,\n",
       "           1.8019e-03, 3.9849e-03],\n",
       "          [4.6951e-03, 1.7083e-02, 4.2804e-01,  ..., 2.8031e-03,\n",
       "           1.3974e-03, 9.0067e-03],\n",
       "          [1.5016e-03, 1.1248e-03, 1.7732e-02,  ..., 4.4981e-03,\n",
       "           2.5857e-04, 3.2184e-03],\n",
       "          ...,\n",
       "          [6.1237e-03, 4.3619e-04, 1.1747e-03,  ..., 2.6967e-02,\n",
       "           3.4746e-01, 5.9659e-01],\n",
       "          [1.2573e-03, 1.7370e-05, 1.7997e-04,  ..., 1.7219e-03,\n",
       "           5.3681e-03, 9.8352e-01],\n",
       "          [9.9457e-01, 1.9066e-05, 8.0891e-06,  ..., 1.1931e-04,\n",
       "           3.5866e-04, 4.7029e-03]],\n",
       "\n",
       "         [[4.3026e-01, 2.6039e-02, 1.9992e-02,  ..., 1.5640e-02,\n",
       "           1.3671e-02, 3.1744e-01],\n",
       "          [7.0061e-01, 6.8495e-02, 1.6126e-02,  ..., 9.1919e-03,\n",
       "           5.2350e-04, 3.7767e-03],\n",
       "          [2.3568e-01, 6.1989e-01, 3.4948e-02,  ..., 2.7839e-03,\n",
       "           1.2899e-03, 8.8733e-04],\n",
       "          ...,\n",
       "          [1.3580e-01, 1.6464e-03, 2.9664e-03,  ..., 1.7722e-02,\n",
       "           8.4160e-03, 2.2945e-02],\n",
       "          [3.8122e-02, 1.0532e-04, 6.6357e-05,  ..., 8.7458e-01,\n",
       "           2.5437e-02, 7.9264e-03],\n",
       "          [1.0136e-01, 3.7974e-03, 1.0306e-03,  ..., 3.8167e-02,\n",
       "           1.4358e-01, 6.8383e-01]]]], grad_fn=<SoftmaxBackward0>), tensor([[[[4.4834e-01, 6.2453e-03, 9.6037e-03,  ..., 1.2230e-02,\n",
       "           1.3305e-02, 4.3331e-01],\n",
       "          [4.9353e-01, 1.7371e-03, 4.6629e-03,  ..., 1.9118e-03,\n",
       "           1.5086e-05, 4.8898e-01],\n",
       "          [2.0275e-01, 5.8239e-01, 5.4162e-04,  ..., 4.6522e-06,\n",
       "           6.0709e-04, 1.8587e-01],\n",
       "          ...,\n",
       "          [3.3765e-01, 9.7052e-05, 7.3041e-05,  ..., 5.2308e-03,\n",
       "           4.2044e-03, 3.2854e-01],\n",
       "          [8.0185e-02, 4.2621e-06, 5.2506e-06,  ..., 8.3455e-01,\n",
       "           8.3153e-05, 8.1654e-02],\n",
       "          [4.4946e-01, 6.1254e-03, 9.0586e-03,  ..., 1.1778e-02,\n",
       "           1.4080e-02, 4.3439e-01]],\n",
       "\n",
       "         [[4.5866e-01, 1.0913e-02, 9.0922e-03,  ..., 8.3537e-03,\n",
       "           6.8308e-03, 4.4644e-01],\n",
       "          [2.1441e-01, 2.1856e-02, 2.2931e-02,  ..., 4.9742e-02,\n",
       "           7.4729e-02, 2.1691e-01],\n",
       "          [1.5582e-01, 3.1951e-02, 2.9726e-02,  ..., 7.8569e-02,\n",
       "           1.0661e-01, 1.5737e-01],\n",
       "          ...,\n",
       "          [4.0520e-01, 1.0686e-02, 1.1761e-02,  ..., 2.3171e-02,\n",
       "           5.1389e-02, 4.0410e-01],\n",
       "          [2.6092e-01, 3.3466e-02, 3.2810e-02,  ..., 5.2695e-02,\n",
       "           1.3674e-01, 2.5969e-01],\n",
       "          [4.6113e-01, 1.0021e-02, 8.3512e-03,  ..., 8.0308e-03,\n",
       "           6.4883e-03, 4.4919e-01]],\n",
       "\n",
       "         [[4.9207e-01, 1.3082e-02, 6.7034e-03,  ..., 5.0877e-03,\n",
       "           2.6920e-03, 4.4407e-01],\n",
       "          [4.6119e-01, 4.5001e-02, 4.7820e-02,  ..., 4.0987e-04,\n",
       "           7.7164e-04, 4.2816e-01],\n",
       "          [3.9862e-01, 1.1313e-01, 7.2937e-02,  ..., 5.8466e-04,\n",
       "           1.1929e-03, 3.7088e-01],\n",
       "          ...,\n",
       "          [3.5864e-01, 3.5251e-04, 1.9934e-04,  ..., 2.8219e-01,\n",
       "           2.2779e-02, 3.3037e-01],\n",
       "          [3.8628e-01, 2.0212e-04, 1.6556e-04,  ..., 2.2012e-01,\n",
       "           2.7915e-02, 3.6070e-01],\n",
       "          [4.9156e-01, 1.3739e-02, 6.8590e-03,  ..., 5.4474e-03,\n",
       "           2.8176e-03, 4.4310e-01]],\n",
       "\n",
       "         ...,\n",
       "\n",
       "         [[4.4705e-01, 1.1853e-02, 1.7489e-02,  ..., 1.2464e-02,\n",
       "           2.0732e-02, 4.2542e-01],\n",
       "          [1.9314e-01, 1.1948e-02, 2.8280e-02,  ..., 5.1179e-02,\n",
       "           2.8573e-02, 1.9882e-01],\n",
       "          [2.2520e-01, 5.3305e-03, 1.3576e-02,  ..., 8.6171e-02,\n",
       "           3.0279e-02, 2.3462e-01],\n",
       "          ...,\n",
       "          [4.1623e-01, 2.0165e-03, 1.9175e-03,  ..., 5.7639e-02,\n",
       "           6.8122e-02, 4.1974e-01],\n",
       "          [4.2768e-01, 2.8992e-02, 1.2727e-02,  ..., 2.0744e-02,\n",
       "           2.4712e-02, 4.1618e-01],\n",
       "          [4.4836e-01, 1.1442e-02, 1.6813e-02,  ..., 1.2114e-02,\n",
       "           2.0362e-02, 4.2705e-01]],\n",
       "\n",
       "         [[4.7792e-01, 3.5186e-03, 4.0918e-03,  ..., 6.6713e-03,\n",
       "           5.2401e-03, 4.5058e-01],\n",
       "          [3.6746e-01, 4.2753e-02, 3.1952e-02,  ..., 2.9704e-02,\n",
       "           3.4063e-02, 3.5978e-01],\n",
       "          [2.4423e-01, 8.6132e-02, 5.1927e-02,  ..., 4.3383e-02,\n",
       "           4.9595e-02, 2.3705e-01],\n",
       "          ...,\n",
       "          [2.0769e-01, 6.9585e-02, 4.1590e-02,  ..., 2.5058e-02,\n",
       "           3.1345e-02, 2.0289e-01],\n",
       "          [2.1574e-01, 5.6030e-02, 3.6902e-02,  ..., 2.2581e-02,\n",
       "           3.3132e-02, 2.1746e-01],\n",
       "          [4.7750e-01, 3.5688e-03, 4.1430e-03,  ..., 6.5471e-03,\n",
       "           5.1201e-03, 4.5068e-01]],\n",
       "\n",
       "         [[4.8036e-01, 1.0769e-02, 1.4052e-02,  ..., 6.7828e-03,\n",
       "           2.1379e-03, 4.6202e-01],\n",
       "          [3.6297e-01, 3.3872e-02, 3.6546e-02,  ..., 1.6188e-02,\n",
       "           3.7629e-02, 3.4824e-01],\n",
       "          [3.3658e-01, 5.2015e-02, 4.1505e-02,  ..., 1.6350e-02,\n",
       "           2.2712e-02, 3.2447e-01],\n",
       "          ...,\n",
       "          [3.5534e-01, 1.4981e-02, 1.2946e-02,  ..., 4.8009e-02,\n",
       "           6.1134e-02, 3.4361e-01],\n",
       "          [3.6191e-01, 2.9100e-02, 2.1762e-02,  ..., 6.1674e-02,\n",
       "           2.9048e-02, 3.5496e-01],\n",
       "          [4.8014e-01, 1.0876e-02, 1.4150e-02,  ..., 6.8309e-03,\n",
       "           2.0997e-03, 4.6207e-01]]]], grad_fn=<SoftmaxBackward0>), tensor([[[[3.9891e-01, 3.2784e-02, 2.4687e-02,  ..., 1.7321e-02,\n",
       "           3.9670e-02, 3.9297e-01],\n",
       "          [4.7971e-01, 1.5037e-02, 8.6546e-03,  ..., 1.0274e-03,\n",
       "           3.5430e-03, 4.7458e-01],\n",
       "          [4.8897e-01, 1.1879e-02, 6.5953e-03,  ..., 3.1353e-04,\n",
       "           1.7954e-03, 4.8185e-01],\n",
       "          ...,\n",
       "          [2.6914e-01, 4.2734e-02, 3.2177e-02,  ..., 1.2982e-02,\n",
       "           9.1105e-03, 2.6440e-01],\n",
       "          [2.3589e-01, 3.3962e-02, 3.2721e-02,  ..., 1.9179e-02,\n",
       "           1.7260e-02, 2.3182e-01],\n",
       "          [3.9961e-01, 3.2576e-02, 2.4565e-02,  ..., 1.7303e-02,\n",
       "           3.9498e-02, 3.9367e-01]],\n",
       "\n",
       "         [[1.6141e-02, 7.8894e-02, 5.8066e-02,  ..., 4.4682e-02,\n",
       "           2.4308e-01, 1.6040e-02],\n",
       "          [7.2478e-02, 5.1906e-02, 7.4902e-01,  ..., 1.0675e-03,\n",
       "           1.8460e-03, 7.1537e-02],\n",
       "          [2.0008e-01, 4.2457e-01, 9.5542e-02,  ..., 1.5464e-03,\n",
       "           2.8781e-03, 1.9680e-01],\n",
       "          ...,\n",
       "          [3.0002e-01, 2.0261e-03, 1.3745e-03,  ..., 2.2949e-02,\n",
       "           3.0709e-01, 2.9671e-01],\n",
       "          [3.6473e-01, 1.1348e-03, 1.7345e-03,  ..., 2.4132e-01,\n",
       "           1.9515e-02, 3.6302e-01],\n",
       "          [1.6240e-02, 7.9222e-02, 5.7985e-02,  ..., 4.4954e-02,\n",
       "           2.4429e-01, 1.6138e-02]],\n",
       "\n",
       "         [[4.8673e-01, 3.0284e-03, 2.1756e-03,  ..., 8.7364e-04,\n",
       "           4.0764e-03, 4.8189e-01],\n",
       "          [4.8429e-01, 4.3746e-03, 1.1648e-03,  ..., 6.4299e-03,\n",
       "           3.6043e-03, 4.7635e-01],\n",
       "          [4.8060e-01, 1.1210e-03, 7.4627e-03,  ..., 3.8803e-03,\n",
       "           6.5564e-03, 4.7239e-01],\n",
       "          ...,\n",
       "          [4.7546e-01, 2.7732e-03, 1.2585e-03,  ..., 2.9364e-02,\n",
       "           1.2574e-02, 4.6843e-01],\n",
       "          [4.3836e-01, 1.2185e-03, 6.5413e-03,  ..., 4.0262e-02,\n",
       "           7.0128e-02, 4.3225e-01],\n",
       "          [4.8666e-01, 3.0616e-03, 2.1799e-03,  ..., 8.8530e-04,\n",
       "           4.1063e-03, 4.8183e-01]],\n",
       "\n",
       "         ...,\n",
       "\n",
       "         [[3.7269e-02, 9.1285e-02, 7.5535e-02,  ..., 1.3422e-01,\n",
       "           1.8385e-01, 3.6876e-02],\n",
       "          [3.0592e-01, 1.5257e-02, 4.9301e-02,  ..., 1.6543e-02,\n",
       "           1.0398e-02, 3.0112e-01],\n",
       "          [3.7352e-01, 9.1805e-03, 1.5990e-02,  ..., 1.6323e-02,\n",
       "           1.0456e-02, 3.6761e-01],\n",
       "          ...,\n",
       "          [4.6736e-01, 5.0917e-04, 1.7224e-03,  ..., 2.1807e-02,\n",
       "           2.8633e-02, 4.6424e-01],\n",
       "          [4.9249e-01, 5.9115e-04, 6.6500e-04,  ..., 7.5498e-03,\n",
       "           4.9124e-03, 4.8815e-01],\n",
       "          [3.7640e-02, 9.1436e-02, 7.5698e-02,  ..., 1.3385e-01,\n",
       "           1.8321e-01, 3.7244e-02]],\n",
       "\n",
       "         [[2.9748e-02, 4.3872e-02, 3.0524e-02,  ..., 1.3963e-01,\n",
       "           5.1429e-01, 2.9596e-02],\n",
       "          [4.9266e-01, 7.6430e-03, 8.5675e-03,  ..., 2.9946e-04,\n",
       "           2.7567e-04, 4.8621e-01],\n",
       "          [4.7265e-01, 2.7260e-02, 2.4605e-02,  ..., 8.2206e-04,\n",
       "           4.1817e-04, 4.6497e-01],\n",
       "          ...,\n",
       "          [2.3097e-01, 6.0428e-03, 1.4093e-02,  ..., 2.2270e-02,\n",
       "           1.1651e-02, 2.2686e-01],\n",
       "          [1.8344e-01, 5.4326e-03, 2.0475e-02,  ..., 5.4883e-02,\n",
       "           2.7051e-02, 1.8030e-01],\n",
       "          [2.9758e-02, 4.3411e-02, 3.0347e-02,  ..., 1.3997e-01,\n",
       "           5.1565e-01, 2.9606e-02]],\n",
       "\n",
       "         [[2.0361e-01, 2.9099e-02, 2.2362e-02,  ..., 1.0769e-01,\n",
       "           1.2781e-01, 1.9970e-01],\n",
       "          [1.4954e-01, 3.7568e-03, 5.0977e-01,  ..., 2.1051e-03,\n",
       "           2.4752e-03, 1.4714e-01],\n",
       "          [2.1207e-01, 2.7262e-03, 1.3863e-02,  ..., 1.7853e-03,\n",
       "           1.4961e-03, 2.0937e-01],\n",
       "          ...,\n",
       "          [4.5067e-01, 6.0439e-04, 1.3214e-03,  ..., 1.3587e-02,\n",
       "           4.3396e-02, 4.4994e-01],\n",
       "          [4.9368e-01, 2.6799e-04, 1.1765e-03,  ..., 2.5516e-03,\n",
       "           3.7241e-03, 4.9480e-01],\n",
       "          [2.0471e-01, 2.9008e-02, 2.2125e-02,  ..., 1.0741e-01,\n",
       "           1.2701e-01, 2.0078e-01]]]], grad_fn=<SoftmaxBackward0>)), cross_attentions=None)"
      ]
     },
     "execution_count": 59,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "output=model(**inputs,output_attentions=True)\n",
    "output"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 60,
   "id": "be02aa3e",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([1, 13, 768])"
      ]
     },
     "execution_count": 60,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "output.last_hidden_state.size()\n",
    "#一条数据，['[CLS]', '小', '小', '的', '我', '也', '有', '大', '大', '的', '梦', '想', '[SEP]']=> 13长度\n",
    "# 768 ，代表每个词编码成768维的向量，这些数字共同表示该词的语义、语法和上下文特征"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "1207f1d1",
   "metadata": {},
   "source": [
    "### 带Model Head的模型"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "1ef8ee96",
   "metadata": {},
   "source": [
    "**实际中需要**"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 61,
   "id": "04abc118",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Some weights of the model checkpoint at E:\\HuggFace_model\\rbt3 were not used when initializing BertForSequenceClassification: ['cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.dense.weight', 'cls.seq_relationship.bias', 'cls.predictions.transform.LayerNorm.weight', 'cls.predictions.bias', 'cls.predictions.decoder.weight', 'cls.predictions.transform.dense.bias', 'cls.seq_relationship.weight']\n",
      "- This IS expected if you are initializing BertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n",
      "- This IS NOT expected if you are initializing BertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n",
      "Some weights of BertForSequenceClassification were not initialized from the model checkpoint at E:\\HuggFace_model\\rbt3 and are newly initialized: ['classifier.weight', 'classifier.bias']\n",
      "You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "SequenceClassifierOutput(loss=None, logits=tensor([[0.7718, 0.0737]], grad_fn=<AddmmBackward0>), hidden_states=None, attentions=None)"
      ]
     },
     "execution_count": 61,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from transformers import AutoModelForSequenceClassification\n",
    "clz_model=AutoModelForSequenceClassification.from_pretrained(model_path)\n",
    "clz_model(**inputs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 62,
   "id": "f4b661d9",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "2"
      ]
     },
     "execution_count": 62,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "\"\"\"上面写logits为2个\"\"\"\n",
    "clz_model.config.num_labels"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "556c191f",
   "metadata": {},
   "source": [
    "## 模型微调代码实例"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 63,
   "id": "4cff0f87",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 使用的数据集 https://github.com/SophonPlus/ChineseNlpCorpus   其中的酒店评论"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "febc7981",
   "metadata": {},
   "source": [
    "### 文本分类实例"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "7ff505c4",
   "metadata": {},
   "source": [
    "#### Step1:导入相关包"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 64,
   "id": "53335eb7",
   "metadata": {},
   "outputs": [],
   "source": [
    "from transformers import AutoModel,AutoTokenizer,AutoModelForSequenceClassification"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "d6f2451f",
   "metadata": {},
   "source": [
    "#### step2:加载数据"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 65,
   "id": "d5e6982f",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>label</th>\n",
       "      <th>review</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1</td>\n",
       "      <td>距离川沙公路较近,但是公交指示不对,如果是\"蔡陆线\"的话,会非常麻烦.建议用别的路线.房间较...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>1</td>\n",
       "      <td>商务大床房，房间很大，床有2M宽，整体感觉经济实惠不错!</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>1</td>\n",
       "      <td>早餐太差，无论去多少人，那边也不加食品的。酒店应该重视一下这个问题了。房间本身很好。</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>1</td>\n",
       "      <td>宾馆在小街道上，不大好找，但还好北京热心同胞很多~宾馆设施跟介绍的差不多，房间很小，确实挺小...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>1</td>\n",
       "      <td>CBD中心,周围没什么店铺,说5星有点勉强.不知道为什么卫生间没有电吹风</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   label                                             review\n",
       "0      1  距离川沙公路较近,但是公交指示不对,如果是\"蔡陆线\"的话,会非常麻烦.建议用别的路线.房间较...\n",
       "1      1                       商务大床房，房间很大，床有2M宽，整体感觉经济实惠不错!\n",
       "2      1         早餐太差，无论去多少人，那边也不加食品的。酒店应该重视一下这个问题了。房间本身很好。\n",
       "3      1  宾馆在小街道上，不大好找，但还好北京热心同胞很多~宾馆设施跟介绍的差不多，房间很小，确实挺小...\n",
       "4      1               CBD中心,周围没什么店铺,说5星有点勉强.不知道为什么卫生间没有电吹风"
      ]
     },
     "execution_count": 65,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import pandas as pd\n",
    "data=pd.read_csv(\"ChineseNlpCorpus-master\\ChnSentiCorp_htl_all.csv\")\n",
    "data.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 66,
   "id": "daab0c9c",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "7766"
      ]
     },
     "execution_count": 66,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(data)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 67,
   "id": "3f05b747",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "7765"
      ]
     },
     "execution_count": 67,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 清理空的数据\n",
    "data=data.dropna()\n",
    "len(data)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "a2cfb209",
   "metadata": {},
   "source": [
    "#### Step3：创建Dataset"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 68,
   "id": "129156f1",
   "metadata": {},
   "outputs": [],
   "source": [
    "from torch.utils.data import Dataset\n",
    "\n",
    "class MyDataSet(Dataset):\n",
    "    def __init__(self):\n",
    "        super().__init__()\n",
    "        self.data=pd.read_csv(\"ChineseNlpCorpus-master\\ChnSentiCorp_htl_all.csv\")\n",
    "        self.data=self.data.dropna()\n",
    "\n",
    "    def __getitem__(self, index):\n",
    "        \"\"\"分别是x和y\"\"\"\n",
    "        return self.data.iloc[index][\"review\"],self.data.iloc[index][\"label\"]\n",
    "    \n",
    "    def __len__(self):\n",
    "        return len(self.data)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 69,
   "id": "bcc20eaa",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "('距离川沙公路较近,但是公交指示不对,如果是\"蔡陆线\"的话,会非常麻烦.建议用别的路线.房间较为简单.', 1)\n",
      "('商务大床房，房间很大，床有2M宽，整体感觉经济实惠不错!', 1)\n",
      "('早餐太差，无论去多少人，那边也不加食品的。酒店应该重视一下这个问题了。房间本身很好。', 1)\n",
      "('宾馆在小街道上，不大好找，但还好北京热心同胞很多~宾馆设施跟介绍的差不多，房间很小，确实挺小，但加上低价位因素，还是无超所值的；环境不错，就在小胡同内，安静整洁，暖气好足-_-||。。。呵还有一大优势就是从宾馆出发，步行不到十分钟就可以到梅兰芳故居等等，京味小胡同，北海距离好近呢。总之，不错。推荐给节约消费的自助游朋友~比较划算，附近特色小吃很多~', 1)\n",
      "('CBD中心,周围没什么店铺,说5星有点勉强.不知道为什么卫生间没有电吹风', 1)\n"
     ]
    }
   ],
   "source": [
    "dataset=MyDataSet()\n",
    "for i in range(5):\n",
    "    print(dataset[i])#前面是话语（x），后面是标签(y)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "1c051bf1",
   "metadata": {},
   "source": [
    "#### Step4:划分数据集"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 70,
   "id": "ca5eff72",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(6989, 776)"
      ]
     },
     "execution_count": 70,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from torch.utils.data import random_split\n",
    "\n",
    "trainset,valiset=random_split(dataset,lengths=[0.9,0.1])\n",
    "len(trainset),len(valiset)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 71,
   "id": "164bcd23",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "('新的风格还是蛮精致的，掩盖了房间狭小的天生不足，但是有些细节尚不周到，比如那个烧水的电壶，塑料味道很重，恐怕是不合格产品.', 1)\n",
      "('住了这里一周，感觉去张江这边公司很近且打车方便，房间和环境个人感觉还不错，如果要求不高，路对面的小店吃吃饭也可以，总体感觉不错!', 1)\n",
      "('总体感觉一般，结账的时候等候时间太长，很慢。。', 0)\n",
      "('优点：大堂和客房部的服务非常好，很热情（因为没有冰箱特地帮我们从别出弄来了冰块）特色大床房送茶香枕、一些杂志比较有意思地理位置不错，周围有家乐福等购物广场，交通也算方便送的早餐很棒，自助餐非常丰盛，中西式都有，加菜速度很快缺点：隔音极差，晚上被隔壁的客人说话吵醒最近酒店在装修，早上6:30就开工，完全睡不着了没有小冰箱电视没有卫星频道设施如马桶、毛巾稍有陈旧，不过还行', 1)\n",
      "('太令人失望了。通过携程预定的，应含双早，可是，总台没给餐券，餐厅要收每人10元，才上餐。浴室的龙头坏的，头天入住报修后，到第二天退房也没人修。浴帘脏得看不出本来的颜色。太差劲了。', 0)\n",
      "('除了在湖边上这点好处之外.没有任何可以一提的优点.房间老旧,卧具破烂,脏兮兮的,我真服了!!!', 0)\n",
      "('这个酒店真不错，房间超大，卫浴设施很干净，窗子很大阳光充足。美中不足，餐厅不敢恭维。但服务人员态度超好，本人在住店期间身体不适得到了家人一般的照顾，感激得很！强力推荐！', 1)\n",
      "('今年9月份去的，设施很好。有一张躺椅，很是惬意。服务员服务态度很好，恰逢我过生日，还有电话祝贺及赠送国盘，在边疆由此待遇很是享受。酒店西式早餐38元美味，品种较齐全，性价比高。距离五一夜市咫尺之遥。附近一家早餐点（汉餐）经济实惠。补充点评2007年9月27日：另外，酒店铺的是纯羊毛地毯，走进房间，感觉味道很清新，没有刺鼻的羊膻味。但是千万注意不要弄脏了，赔偿金额很高。', 1)\n",
      "('我本人办的有锦江之星的会员卡，相对来说锦江住的比较多，石家庄的锦江我只住过平安大街店，感觉非常不错，这次是因为平安大街的锦江之星没有商务房了，所以才选择了从未住过的如家。如家，外人的评价一直挺不错的，我就亲身去感受了一下，感觉非常失望。这是我第一次入住如家，不知道是不是这家酒店的使用率太高了，设施都比较陈旧。浴巾的边沿部分都磨烂了，被子也感觉洗了很多次旧旧的。周六外出游玩回来的时候竟然发现浴室的梳子没有更换、香皂没有更换，黄色牙刷的刷头与握柄中间竟然有黑黑的东西，看起来好脏。一般酒店的隔音都不会很好，可是我没想到如家国大店的隔音竟然差到门口塑料袋的摩擦声都可以听得到。而且浴室特别小，唯一比较满意的地方大概就是工作人员的服务了，还不错，总之，我不会再去第二次如家了，太差了，让我很失望，对不起这个价格。补充点评2008年7月28日：以上评价只代表我和我老公的意见，只把我们入住时遇到的情况写了出来，可能我们遇到的只是个案，不过让我感到失望是真的。另外再补充一点，如家的前台工作人员，穿的花衬衫确实有热带风情，只是感觉很花哨，不喜欢。', 0)\n",
      "('整体来说感觉还是非常好的.,,宽带免费...服务还算不错...就是地方可能偏了一点...楼下没有出租车打...要出门自己打车麻烦了点...价格可以接受很合理....', 1)\n"
     ]
    }
   ],
   "source": [
    "#切分并且加入随机后，有好评也有差评（原数据前面全是好评，后面全是差评）\n",
    "for i in range(10):\n",
    "    print(trainset[i])"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "578dc2bd",
   "metadata": {},
   "source": [
    "#### Step5:创建Dataloader"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 72,
   "id": "04495e84",
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 73,
   "id": "c658b046",
   "metadata": {},
   "outputs": [],
   "source": [
    "tokenizer=AutoTokenizer.from_pretrained(r\"E:\\HuggFace_model\\rbt3\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 80,
   "id": "2b01ea79",
   "metadata": {},
   "outputs": [],
   "source": [
    "\"\"\"\n",
    "假设你有一个装满水果的篮子（Dataset），每次从篮子里抓一把水果（一个 batch），\n",
    "但抓出来的水果可能有不同大小或形状（例如苹果、香蕉、葡萄混合）。\n",
    "collate_fn 的作用就是告诉程序如何把这一把水果整理成统一格式（例如按大小排序、填充成相同形状、打包成盒子等），\n",
    "以便后续处理（如喂给模型）\n",
    "\"\"\"\n",
    "def collate_func(batch):\n",
    "    texts,labels=[],[]\n",
    "    for item in batch:\n",
    "        texts.append(item[0])\n",
    "        labels.append(item[1])\n",
    "    inputs=tokenizer(texts,max_length=128,padding=\"max_length\",truncation=True,return_tensors=\"pt\")\n",
    "    inputs[\"labels\"]=torch.tensor(labels)#y值设为叫labels\n",
    "\n",
    "    return inputs\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 81,
   "id": "43ea91f3",
   "metadata": {},
   "outputs": [],
   "source": [
    "from torch.utils.data import DataLoader\n",
    "\n",
    "train_loader=DataLoader(trainset,batch_size=32,shuffle=True,collate_fn=collate_func)\n",
    "valid_loader=DataLoader(valiset,batch_size=64,shuffle=False,collate_fn=collate_func)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 76,
   "id": "bfc029ee",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'input_ids': tensor([[ 101, 2769,  857,  ...,    0,    0,    0],\n",
       "        [ 101, 6821,  702,  ...,  511, 1184,  102],\n",
       "        [ 101, 1282,  671,  ..., 7506, 1744,  102],\n",
       "        ...,\n",
       "        [ 101, 2791, 7313,  ...,    0,    0,    0],\n",
       "        [ 101, 6133, 1041,  ...,    0,    0,    0],\n",
       "        [ 101, 6820,  679,  ...,    0,    0,    0]]), 'token_type_ids': tensor([[0, 0, 0,  ..., 0, 0, 0],\n",
       "        [0, 0, 0,  ..., 0, 0, 0],\n",
       "        [0, 0, 0,  ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0,  ..., 0, 0, 0],\n",
       "        [0, 0, 0,  ..., 0, 0, 0],\n",
       "        [0, 0, 0,  ..., 0, 0, 0]]), 'attention_mask': tensor([[1, 1, 1,  ..., 0, 0, 0],\n",
       "        [1, 1, 1,  ..., 1, 1, 1],\n",
       "        [1, 1, 1,  ..., 1, 1, 1],\n",
       "        ...,\n",
       "        [1, 1, 1,  ..., 0, 0, 0],\n",
       "        [1, 1, 1,  ..., 0, 0, 0],\n",
       "        [1, 1, 1,  ..., 0, 0, 0]]), 'labels': tensor([1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0,\n",
       "        1, 0, 1, 1, 1, 1, 1, 1])}"
      ]
     },
     "execution_count": 76,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "next(enumerate(train_loader))[1]"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "459fae81",
   "metadata": {},
   "source": [
    "#### Step6:创建模型与优化器"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 85,
   "id": "331fc892",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Some weights of the model checkpoint at E:\\HuggFace_model\\rbt3 were not used when initializing BertForSequenceClassification: ['cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.dense.weight', 'cls.seq_relationship.bias', 'cls.predictions.transform.LayerNorm.weight', 'cls.predictions.bias', 'cls.predictions.decoder.weight', 'cls.predictions.transform.dense.bias', 'cls.seq_relationship.weight']\n",
      "- This IS expected if you are initializing BertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n",
      "- This IS NOT expected if you are initializing BertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n",
      "Some weights of BertForSequenceClassification were not initialized from the model checkpoint at E:\\HuggFace_model\\rbt3 and are newly initialized: ['classifier.weight', 'classifier.bias']\n",
      "You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n"
     ]
    }
   ],
   "source": [
    "from torch.optim import Adam\n",
    "model=AutoModelForSequenceClassification.from_pretrained(r\"E:\\HuggFace_model\\rbt3\")#模型\n",
    "\n",
    "if torch.cuda.is_available():\n",
    "    model=model.cuda()\n",
    "\n",
    "\"\"\"因为做的是迁移学习，不需要那么高的学习率\"\"\"\n",
    "optimizer=Adam(model.parameters(),lr=2e-5)#优化器"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "199fcfd5",
   "metadata": {},
   "source": [
    "#### Step7:训练与验证"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 86,
   "id": "1c79083b",
   "metadata": {},
   "outputs": [],
   "source": [
    "\n",
    "def evaluate():\n",
    "    model.eval()\n",
    "    acc_num=0\n",
    "    with torch.inference_mode():#禁止修改张量，禁止梯度计算\n",
    "        for batch in valid_loader:\n",
    "            if torch.cuda.is_available():\n",
    "                batch={k:v.cuda() for k,v in batch.items()}\n",
    "            output=model(**batch)\n",
    "            pred=torch.argmax(output.logits,dim=-1)\n",
    "            acc_num+=(pred.long()==batch[\"labels\"].long()).float().sum()\n",
    "    return acc_num/len(valiset)\n",
    "\n",
    "def train(epoch=3,log_step=100):\n",
    "    global_step=0\n",
    "    for ep in range(epoch):\n",
    "        model.train()\n",
    "        for batch in train_loader:\n",
    "            if torch.cuda.is_available():\n",
    "                \"\"\"\n",
    "                转移后的 batch 中的每个张量（v）都在 GPU 上，而字典结构（k）本身不涉及设备，只是字段名。\n",
    "                \"\"\"\n",
    "                batch={k:v.cuda() for k,v in batch.items()}#k是键，id,labels等等，v就是值，把这些值搬到GPU\n",
    "            optimizer.zero_grad()\n",
    "            output=model(**batch)\n",
    "            output.loss.backward()#model的output有loss，来做反向传播\n",
    "            optimizer.step()\n",
    "            if global_step%100==0:\n",
    "                print(f\"{ep+1}轮，global_step:{global_step},loss:{output.loss.item()}\")\n",
    "            global_step+=1\n",
    "        acc=evaluate()\n",
    "        print(f\"ep:{ep},acc:{acc}\")\n",
    "\n"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "ac48f707",
   "metadata": {},
   "source": [
    "#### Step8:训练"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 88,
   "id": "8c1bb588",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1轮，global_step:0,loss:0.21332047879695892\n",
      "1轮，global_step:100,loss:0.6665377616882324\n",
      "1轮，global_step:200,loss:0.12025110423564911\n",
      "ep:0,acc:0.875\n",
      "2轮，global_step:300,loss:0.024049125611782074\n",
      "2轮，global_step:400,loss:0.18739260733127594\n",
      "ep:1,acc:0.8737112879753113\n",
      "3轮，global_step:500,loss:0.2844948172569275\n",
      "3轮，global_step:600,loss:0.11428964138031006\n",
      "ep:2,acc:0.8891752362251282\n"
     ]
    }
   ],
   "source": [
    "train()"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "b72d098a",
   "metadata": {},
   "source": [
    "#### Step9：模型预测"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 112,
   "id": "0b7fb4a9",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "输入:我觉得这家酒店很好吃,\n",
      "情绪判别:好评\n"
     ]
    }
   ],
   "source": [
    "sen=\"我觉得这家酒店很好吃\"\n",
    "\n",
    "id2label={0:\"差评\",1:\"好评\"}\n",
    "\n",
    "model.eval()\n",
    "with torch.inference_mode():\n",
    "    inputs=tokenizer(sen,return_tensors=\"pt\")\n",
    "    inputs={k:v.cuda() for k,v in inputs.items()}\n",
    "    logits=model(**inputs).logits\n",
    "    # print(logits.reshape(-1))\n",
    "    pred=torch.argmax(logits.reshape(-1),dim=0)\n",
    "    print(f\"输入:{sen},\\n情绪判别:{id2label.get(pred.item())}\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 114,
   "id": "94da8d0a",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[{'label': '好评', 'score': 0.9988840222358704}]"
      ]
     },
     "execution_count": 114,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "\"\"\"快捷版本\"\"\"\n",
    "from transformers import pipeline\n",
    "\n",
    "model.config.id2label=id2label\n",
    "\n",
    "pipe=pipeline(\"text-classification\",model=model,tokenizer=tokenizer,device=0)\n",
    "pipe(sen)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "8f5ef28e",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "transformers",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.21"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
