{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "5c3e8d84-8bd1-4908-a349-6462cd8d296c",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T09:45:20.725354Z",
     "iopub.status.busy": "2022-07-01T09:45:20.724781Z",
     "iopub.status.idle": "2022-07-01T09:45:22.261208Z",
     "shell.execute_reply": "2022-07-01T09:45:22.260267Z",
     "shell.execute_reply.started": "2022-07-01T09:45:20.725304Z"
    }
   },
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "import numpy as np\n",
    "\n",
    "train_cn = pd.read_excel('汽车领域多语种迁移学习挑战赛初赛公开数据_A榜/汽车领域多语种迁移学习挑战赛初赛训练集/中文_trian.xlsx')\n",
    "train_ja = pd.read_excel('汽车领域多语种迁移学习挑战赛初赛公开数据_A榜/汽车领域多语种迁移学习挑战赛初赛训练集/日语_train.xlsx')\n",
    "train_en = pd.read_excel('汽车领域多语种迁移学习挑战赛初赛公开数据_A榜/汽车领域多语种迁移学习挑战赛初赛训练集/英文_train.xlsx')\n",
    "\n",
    "test_ja = pd.read_excel('汽车领域多语种迁移学习挑战赛初赛公开数据_A榜/testA.xlsx', sheet_name='日语_testA')\n",
    "test_en = pd.read_excel('汽车领域多语种迁移学习挑战赛初赛公开数据_A榜/testA.xlsx', sheet_name='英文_testA')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "id": "a8a9b01c-1b8c-4fdf-bf78-66b2a9b0c8c7",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T10:29:22.868375Z",
     "iopub.status.busy": "2022-07-01T10:29:22.867775Z",
     "iopub.status.idle": "2022-07-01T10:29:22.881996Z",
     "shell.execute_reply": "2022-07-01T10:29:22.881316Z",
     "shell.execute_reply.started": "2022-07-01T10:29:22.868325Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>原始文本</th>\n",
       "      <th>意图</th>\n",
       "      <th>槽值1</th>\n",
       "      <th>槽值2</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>16.5度</td>\n",
       "      <td>adjust_ac_temperature_to_number</td>\n",
       "      <td>offset:16.5</td>\n",
       "      <td>NaN</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>16度</td>\n",
       "      <td>adjust_ac_temperature_to_number</td>\n",
       "      <td>offset:16</td>\n",
       "      <td>NaN</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "    原始文本                               意图          槽值1  槽值2\n",
       "0  16.5度  adjust_ac_temperature_to_number  offset:16.5  NaN\n",
       "1    16度  adjust_ac_temperature_to_number    offset:16  NaN"
      ]
     },
     "execution_count": 49,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_cn[~train_cn['槽值1'].isnull()].head(2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "id": "1eb320fa-d24b-4276-b21e-94bccd85bf8e",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T10:29:19.868025Z",
     "iopub.status.busy": "2022-07-01T10:29:19.867442Z",
     "iopub.status.idle": "2022-07-01T10:29:19.881444Z",
     "shell.execute_reply": "2022-07-01T10:29:19.880254Z",
     "shell.execute_reply.started": "2022-07-01T10:29:19.867976Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>原始文本</th>\n",
       "      <th>中文翻译</th>\n",
       "      <th>意图</th>\n",
       "      <th>槽值1</th>\n",
       "      <th>槽值2</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>35</th>\n",
       "      <td>エアコンの冷房モードをオンにして</td>\n",
       "      <td>开启空调的制冷模式</td>\n",
       "      <td>open_ac_mode</td>\n",
       "      <td>mode:冷房モード</td>\n",
       "      <td>NaN</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>36</th>\n",
       "      <td>エアコンの冷房モードをつけて</td>\n",
       "      <td>开启空调的制冷模式</td>\n",
       "      <td>open_ac_mode</td>\n",
       "      <td>mode:冷房モード</td>\n",
       "      <td>NaN</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                原始文本       中文翻译            意图         槽值1  槽值2\n",
       "35  エアコンの冷房モードをオンにして  开启空调的制冷模式  open_ac_mode  mode:冷房モード  NaN\n",
       "36    エアコンの冷房モードをつけて  开启空调的制冷模式  open_ac_mode  mode:冷房モード  NaN"
      ]
     },
     "execution_count": 48,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_ja[~train_ja['槽值1'].isnull()].head(2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "id": "c021f213-d394-4030-827d-6daad16fd01f",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T10:07:59.819027Z",
     "iopub.status.busy": "2022-07-01T10:07:59.818434Z",
     "iopub.status.idle": "2022-07-01T10:07:59.833945Z",
     "shell.execute_reply": "2022-07-01T10:07:59.833308Z",
     "shell.execute_reply.started": "2022-07-01T10:07:59.818978Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(36, 33, 71)"
      ]
     },
     "execution_count": 37,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_cn['原始文本'].apply(len).max(), train_ja['原始文本'].apply(len).max(), train_en['原始文本'].apply(len).max()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "id": "ab0d143f-8bdb-42ba-80d4-5b7289ef200e",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T10:15:48.817802Z",
     "iopub.status.busy": "2022-07-01T10:15:48.817197Z",
     "iopub.status.idle": "2022-07-01T10:15:48.822597Z",
     "shell.execute_reply": "2022-07-01T10:15:48.821496Z",
     "shell.execute_reply.started": "2022-07-01T10:15:48.817753Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "s = '菅田将暉のバースデイイブを再生してくれ'\n",
    "tag1 = '菅田将暉'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "id": "5198591e-95c6-4c20-bb19-a1cb9cc600f9",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T10:16:33.819916Z",
     "iopub.status.busy": "2022-07-01T10:16:33.819334Z",
     "iopub.status.idle": "2022-07-01T10:16:33.825594Z",
     "shell.execute_reply": "2022-07-01T10:16:33.824627Z",
     "shell.execute_reply.started": "2022-07-01T10:16:33.819868Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "ner_label = np.zeros(len(s))\n",
    "ner_label[s.index(tag1): s.index(tag1) + len(tag1)] = 1\n",
    "ner_label[s.index(tag2): s.index(tag2) + len(tag2)] = 2"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "id": "0c1c8ed1-05ec-4650-b37a-bb21d7f890c8",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T10:16:36.818161Z",
     "iopub.status.busy": "2022-07-01T10:16:36.817559Z",
     "iopub.status.idle": "2022-07-01T10:16:36.824184Z",
     "shell.execute_reply": "2022-07-01T10:16:36.823557Z",
     "shell.execute_reply.started": "2022-07-01T10:16:36.818113Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "       0., 0.])"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "ner_label"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "7f68dfa5-5062-4d72-879a-a1c8393e7fd8",
   "metadata": {},
   "source": [
    "# transformer基础"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "bf44b1f6-bd7f-4f19-be00-c06ffabf3055",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T09:49:31.772372Z",
     "iopub.status.busy": "2022-07-01T09:49:31.771754Z",
     "iopub.status.idle": "2022-07-01T09:49:43.299333Z",
     "shell.execute_reply": "2022-07-01T09:49:43.298147Z",
     "shell.execute_reply.started": "2022-07-01T09:49:31.772322Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Some weights of the model checkpoint at bert-base-chinese were not used when initializing BertForSequenceClassification: ['cls.seq_relationship.bias', 'cls.predictions.decoder.weight', 'cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.LayerNorm.weight', 'cls.seq_relationship.weight', 'cls.predictions.bias', 'cls.predictions.transform.dense.weight', 'cls.predictions.transform.dense.bias']\n",
      "- This IS expected if you are initializing BertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n",
      "- This IS NOT expected if you are initializing BertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n",
      "Some weights of BertForSequenceClassification were not initialized from the model checkpoint at bert-base-chinese and are newly initialized: ['classifier.weight', 'classifier.bias']\n",
      "You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n"
     ]
    }
   ],
   "source": [
    "from transformers import AutoTokenizer, AutoModelForSequenceClassification\n",
    "# AutoTokenizer：分词器\n",
    "\n",
    "# Auto：自动识别的\n",
    "model_name = \"bert-base-chinese\"\n",
    "pt_model = AutoModelForSequenceClassification.from_pretrained(model_name)\n",
    "tokenizer = AutoTokenizer.from_pretrained(model_name)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 144,
   "id": "b4ab6ad7-3e37-44d6-85da-3b211a88929a",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T13:06:47.871741Z",
     "iopub.status.busy": "2022-07-01T13:06:47.871150Z",
     "iopub.status.idle": "2022-07-01T13:06:47.879178Z",
     "shell.execute_reply": "2022-07-01T13:06:47.878622Z",
     "shell.execute_reply.started": "2022-07-01T13:06:47.871691Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'input_ids': [[101, 8204, 4867, 3767, 3602, 10064, 3976, 2135, 7700, 4577, 7162, 3775, 1882, 102], [101, 2080, 3442, 2892, 10064, 2262, 3031, 4778, 8576, 3976, 102, 0, 0, 0]], 'token_type_ids': [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], 'attention_mask': [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0]]}"
      ]
     },
     "execution_count": 144,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "inputs = tokenizer([\"阿水很帅，我也这样觉得。\", \"不对啊，你在欺骗我\"], truncation=True, max_length=20, padding=True)\n",
    "inputs\n",
    "# input_ids：这个字在vocab次序\n",
    "# token_type_ids：字符是第一个句子的，还是第二个句子的\n",
    "# attention_mask：字符是不是padding的？"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "d1aef659-2e84-439a-9815-c61e640c1a6f",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T09:54:29.816951Z",
     "iopub.status.busy": "2022-07-01T09:54:29.816431Z",
     "iopub.status.idle": "2022-07-01T09:54:33.087222Z",
     "shell.execute_reply": "2022-07-01T09:54:33.086483Z",
     "shell.execute_reply.started": "2022-07-01T09:54:29.816912Z"
    }
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Some weights of the model checkpoint at hfl/chinese-roberta-wwm-ext were not used when initializing BertModel: ['cls.seq_relationship.bias', 'cls.predictions.decoder.weight', 'cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.LayerNorm.weight', 'cls.seq_relationship.weight', 'cls.predictions.bias', 'cls.predictions.transform.dense.weight', 'cls.predictions.transform.dense.bias']\n",
      "- This IS expected if you are initializing BertModel from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n",
      "- This IS NOT expected if you are initializing BertModel from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n"
     ]
    }
   ],
   "source": [
    "from transformers import AutoTokenizer, AutoModelForMaskedLM, AutoConfig, BertModel, AutoModel\n",
    "model = AutoModel.from_pretrained(\"hfl/chinese-roberta-wwm-ext\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "id": "fbc18613-7d55-4b54-8265-9aad1b59efcc",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T10:04:48.905362Z",
     "iopub.status.busy": "2022-07-01T10:04:48.904762Z",
     "iopub.status.idle": "2022-07-01T10:04:48.974613Z",
     "shell.execute_reply": "2022-07-01T10:04:48.974132Z",
     "shell.execute_reply.started": "2022-07-01T10:04:48.905313Z"
    },
    "scrolled": true,
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "BaseModelOutputWithPoolingAndCrossAttentions(last_hidden_state=tensor([[[ 2.8968e-01,  2.8604e-01,  1.7832e-01,  ..., -3.5887e-01,\n",
       "           1.9458e-02, -3.0741e-02],\n",
       "         [ 5.2763e-02,  4.7332e-01,  6.1362e-01,  ...,  4.0184e-01,\n",
       "           9.5352e-02, -8.2413e-01],\n",
       "         [ 7.3144e-01,  1.6262e-01,  8.1929e-02,  ...,  2.2394e-01,\n",
       "          -4.6239e-01,  1.6442e-04],\n",
       "         ...,\n",
       "         [ 2.5093e+00,  6.9676e-01,  7.8148e-01,  ..., -3.6061e-02,\n",
       "           9.9588e-02, -3.1432e-01],\n",
       "         [ 2.1131e+00,  2.4205e-01, -1.4203e-01,  ..., -1.3002e+00,\n",
       "           4.6712e-01, -2.1039e-01],\n",
       "         [ 2.8968e-01,  2.8604e-01,  1.7832e-01,  ..., -3.5887e-01,\n",
       "           1.9458e-02, -3.0741e-02]]], grad_fn=<NativeLayerNormBackward>), pooler_output=tensor([[ 9.9662e-01,  9.6177e-01,  9.8323e-01,  9.5575e-01,  7.7640e-01,\n",
       "          2.6194e-01, -9.2609e-01, -3.0311e-01,  4.8948e-01, -9.8587e-01,\n",
       "          9.9946e-01,  9.8744e-01, -6.3099e-01, -8.3612e-01,  9.5591e-01,\n",
       "         -9.9262e-01,  6.1859e-01,  6.6866e-01,  1.9476e-01, -6.4648e-01,\n",
       "          9.9280e-01, -9.9070e-01, -8.8831e-01,  2.8478e-01, -6.6962e-02,\n",
       "          7.5795e-01,  7.6470e-01,  4.5130e-01, -9.9861e-01,  9.7443e-01,\n",
       "          7.4833e-01,  9.9371e-01,  4.6914e-01, -9.9648e-01, -9.9934e-01,\n",
       "          5.7210e-01,  3.7124e-01,  9.6331e-01,  6.3858e-01, -9.8107e-01,\n",
       "         -8.9316e-01,  8.3263e-02, -4.6151e-01, -9.4186e-01,  6.9095e-01,\n",
       "          6.8568e-01, -9.9787e-01, -9.9543e-01, -4.0585e-01,  9.6780e-01,\n",
       "         -8.0134e-01, -9.9334e-01,  8.7023e-01, -4.3723e-01, -2.2839e-01,\n",
       "          9.4797e-01, -9.8931e-01,  2.9845e-02,  9.9639e-01, -5.5515e-02,\n",
       "          9.9607e-01, -6.8409e-01,  7.2724e-02, -9.9572e-01,  9.9436e-01,\n",
       "         -9.9646e-01, -9.7150e-01,  7.5425e-01,  9.5027e-01,  9.9921e-01,\n",
       "          1.5703e-01,  9.9396e-01,  9.9892e-01,  7.9675e-02, -3.6200e-01,\n",
       "          9.8880e-01, -4.9195e-01,  9.3279e-01, -9.9976e-01,  3.0524e-01,\n",
       "          9.9903e-01,  9.3540e-01, -9.8996e-01, -1.8243e-01, -9.6692e-01,\n",
       "         -9.8614e-01, -8.9456e-01,  9.7893e-01, -2.1291e-01,  8.7954e-01,\n",
       "          9.8765e-01, -9.8890e-01, -9.9967e-01,  9.5075e-01, -9.8297e-01,\n",
       "         -9.5798e-01, -9.7144e-01,  9.9843e-01, -6.6192e-01, -8.9953e-01,\n",
       "         -7.5912e-01,  2.8442e-01, -9.8924e-01, -9.9449e-01,  4.6580e-01,\n",
       "          9.9273e-01,  5.0176e-01, -9.9553e-01,  9.9865e-01,  7.0036e-01,\n",
       "         -9.9924e-01, -9.4778e-01, -9.9530e-01, -6.7059e-02, -8.7289e-01,\n",
       "          9.9888e-01,  3.8952e-01,  8.3163e-01,  3.4750e-01, -9.9179e-01,\n",
       "          9.6703e-01, -9.7625e-01, -9.7122e-01, -3.2871e-01,  9.5063e-01,\n",
       "          9.9808e-01,  9.9555e-01, -7.7396e-01,  9.8374e-01,  9.9725e-01,\n",
       "         -2.2444e-01,  9.7600e-01, -9.7645e-01,  9.8954e-01,  8.0221e-01,\n",
       "         -9.9019e-01, -4.1962e-01, -7.9233e-01,  9.9954e-01,  9.9527e-01,\n",
       "          2.4879e-01, -8.6868e-02,  9.8841e-01, -9.3322e-01,  9.9773e-01,\n",
       "         -9.9959e-01,  9.8506e-01, -9.9911e-01, -3.1220e-01,  9.2587e-01,\n",
       "         -2.1263e-01,  9.9964e-01, -2.3983e-01,  9.9889e-01, -9.8421e-01,\n",
       "         -9.9544e-01,  6.1965e-01,  4.3926e-01,  9.9542e-01, -9.9600e-01,\n",
       "          8.9019e-01, -4.8843e-01,  1.6582e-01,  8.9265e-01, -9.9719e-01,\n",
       "          9.9635e-01, -8.6030e-01,  9.9809e-01,  9.5871e-01, -4.0589e-01,\n",
       "         -9.4254e-01, -9.3729e-01,  3.5383e-01, -9.8353e-01, -7.4083e-01,\n",
       "          8.9433e-01, -9.4522e-01,  9.9928e-01, -8.2835e-02, -5.7500e-01,\n",
       "          7.2965e-01, -8.9189e-01, -9.7878e-01,  9.4325e-01, -6.9578e-01,\n",
       "          9.5799e-01,  8.3684e-01,  4.0190e-01,  9.6444e-01,  1.8499e-01,\n",
       "         -9.1384e-01,  9.7347e-01,  8.5854e-01,  2.2260e-01,  9.8794e-01,\n",
       "          7.9640e-01, -7.3601e-01, -9.4541e-01, -9.9944e-01, -6.8094e-01,\n",
       "          9.9829e-01, -9.0907e-01, -9.3625e-01,  4.7164e-01, -9.9650e-01,\n",
       "          7.6353e-01, -7.1358e-01, -6.2092e-01, -5.6354e-01, -9.9867e-01,\n",
       "          1.4209e-01, -9.6820e-01, -9.6071e-01,  5.5919e-01,  2.3790e-01,\n",
       "         -1.6888e-01, -9.9432e-01,  4.9599e-01,  9.5755e-01,  3.3774e-01,\n",
       "          9.1718e-01, -6.0351e-01, -9.8299e-01,  6.8086e-01, -6.0843e-01,\n",
       "          8.1916e-01,  9.8872e-01,  9.9748e-01,  9.5784e-01, -7.7471e-01,\n",
       "          2.8085e-01,  9.9100e-01,  8.3012e-01, -9.9926e-01,  4.4278e-01,\n",
       "         -9.7071e-01, -7.0786e-01,  9.9737e-01, -9.8745e-01,  9.6966e-01,\n",
       "          9.9915e-01, -7.7344e-01,  9.9929e-01, -7.7413e-01, -9.9155e-01,\n",
       "         -9.8995e-01,  9.9823e-01,  7.1987e-01,  9.9898e-01, -9.2822e-01,\n",
       "         -9.5216e-01,  2.2958e-01, -8.1463e-01, -9.9843e-01, -9.9830e-01,\n",
       "          5.7264e-01,  9.7394e-01,  9.9816e-01,  7.4313e-01, -9.8148e-01,\n",
       "         -9.5043e-01, -9.9054e-01,  9.9931e-01, -9.5431e-01,  9.7896e-01,\n",
       "          9.8253e-01, -3.7208e-01,  1.8194e-01,  7.6833e-01, -8.7961e-01,\n",
       "         -9.6618e-01, -4.4561e-02, -9.9605e-01, -9.6751e-01, -9.9792e-01,\n",
       "          9.6579e-01, -9.8654e-01, -9.9947e-01,  9.3269e-01,  9.9887e-01,\n",
       "         -3.0529e-02, -9.9746e-01,  9.6286e-01,  9.9148e-01,  1.9521e-02,\n",
       "          5.0697e-01,  9.3269e-01, -9.9957e-01,  9.9925e-01, -9.9349e-01,\n",
       "          9.5777e-01, -9.8076e-01, -9.9609e-01, -3.5720e-01,  9.7934e-01,\n",
       "          9.9540e-01, -9.3737e-01,  6.1172e-01, -9.7988e-01,  8.7739e-02,\n",
       "          3.2861e-01,  9.9119e-01, -9.0030e-01, -9.9521e-02, -9.3327e-01,\n",
       "         -8.0340e-01,  9.3901e-01, -9.5657e-01, -9.6070e-01,  8.0434e-01,\n",
       "          9.9813e-01, -9.1553e-01,  9.9951e-01,  9.9781e-01,  9.9991e-01,\n",
       "         -2.5886e-01, -9.5501e-01,  9.9724e-01, -3.6691e-01,  5.3701e-01,\n",
       "         -6.5171e-01,  1.6319e-01,  9.6006e-01,  1.6775e-03, -2.8698e-01,\n",
       "         -9.9705e-01,  9.9453e-01, -5.3239e-01,  6.9393e-01,  6.9001e-01,\n",
       "         -9.8351e-01, -3.1290e-02,  9.8088e-01, -9.3781e-01,  9.6258e-01,\n",
       "         -9.7893e-01, -4.5946e-01, -4.9536e-01,  9.9820e-01,  9.1258e-01,\n",
       "          9.9264e-02, -8.7818e-01,  9.9715e-01, -9.9660e-01,  9.6659e-01,\n",
       "         -9.9834e-01,  9.9725e-01, -9.4589e-01, -2.8125e-02, -9.3730e-01,\n",
       "         -9.9126e-01,  9.9581e-01,  9.9047e-01,  9.4391e-01,  9.8722e-01,\n",
       "         -8.0295e-01,  9.8362e-01,  7.0818e-01,  8.2152e-01,  9.7774e-01,\n",
       "          3.7711e-01,  9.9789e-01, -9.8869e-01, -8.1014e-01,  1.9954e-01,\n",
       "         -9.9655e-01, -8.9439e-01, -9.9910e-01,  2.2657e-02, -9.8724e-01,\n",
       "         -8.6843e-01, -2.2724e-01, -8.1059e-01,  4.1613e-01, -6.3526e-01,\n",
       "         -7.4024e-01, -6.2247e-01,  1.2896e-01,  6.6272e-01,  5.0016e-01,\n",
       "          8.0643e-01, -9.8723e-01, -5.4740e-01, -9.9913e-01, -9.8623e-01,\n",
       "         -1.1866e-01,  9.9724e-01, -9.9763e-01,  9.3674e-01, -9.9758e-01,\n",
       "         -3.1784e-01, -2.3618e-01, -9.2769e-01, -6.3726e-01,  1.7276e-01,\n",
       "         -9.9315e-01,  9.5259e-01,  9.1085e-01,  9.9923e-01,  9.8708e-01,\n",
       "          9.8206e-01, -2.8523e-01, -9.4534e-01, -9.8950e-01, -9.9668e-01,\n",
       "         -9.9947e-01, -9.9605e-01,  6.4996e-01,  1.5127e-01, -9.9608e-01,\n",
       "         -1.9295e-01,  9.7932e-01,  9.9777e-01,  9.5854e-01, -9.9373e-01,\n",
       "         -6.6273e-01, -9.8458e-01, -2.6582e-01,  9.9154e-01, -5.3754e-01,\n",
       "         -9.4585e-01,  8.2354e-01, -4.0246e-02,  9.9560e-01, -6.5416e-01,\n",
       "          2.8690e-01,  7.2600e-01,  8.1046e-01,  8.2516e-01, -9.9263e-01,\n",
       "          6.0173e-01,  9.9917e-01,  8.7987e-01, -9.9429e-01, -5.0335e-01,\n",
       "         -1.1022e-01, -9.9429e-01, -5.3829e-01,  8.8487e-01,  9.9373e-01,\n",
       "         -9.9818e-01, -1.8580e-01, -8.6768e-01,  9.2838e-01,  8.9045e-01,\n",
       "          9.6988e-01,  9.7706e-01,  7.1352e-01,  9.8528e-01, -2.6251e-01,\n",
       "          4.9104e-01,  9.8970e-01,  5.2726e-01, -9.9555e-01,  9.6371e-01,\n",
       "         -2.4472e-01,  3.6716e-01, -9.8935e-01,  9.9602e-01, -8.8196e-01,\n",
       "          9.9807e-01,  7.6261e-01, -2.4602e-01, -9.5164e-01, -9.8941e-01,\n",
       "          9.8656e-01,  9.9870e-01, -2.6014e-01, -8.5737e-01, -9.8712e-01,\n",
       "         -9.9773e-01, -9.7569e-01, -9.4232e-01,  6.6659e-01, -9.1384e-01,\n",
       "         -9.8447e-01,  5.4985e-01,  2.3634e-01,  9.9983e-01,  9.9690e-01,\n",
       "          9.9847e-01, -9.0701e-01, -8.5440e-01,  9.9606e-01, -5.1507e-01,\n",
       "          6.9772e-01, -1.7164e-01, -9.9709e-01, -9.8446e-01, -9.9655e-01,\n",
       "          9.6401e-01,  6.7544e-01,  4.6296e-01, -8.7468e-01,  7.1318e-01,\n",
       "          8.0878e-01, -9.9844e-01, -8.2683e-01, -9.5416e-01,  9.5676e-01,\n",
       "          9.9937e-01, -9.4323e-01,  8.4261e-01, -9.7109e-01,  8.8490e-02,\n",
       "          8.5638e-01,  9.8090e-01,  9.9485e-01, -9.5588e-01,  3.5888e-01,\n",
       "         -3.5685e-01, -4.4537e-01,  9.4316e-01,  9.5237e-01, -9.2699e-01,\n",
       "         -5.0645e-01,  9.7107e-01, -9.1878e-01,  6.4073e-01, -6.1707e-01,\n",
       "          8.3786e-01,  9.6218e-01,  9.9875e-01,  6.8577e-01,  9.8741e-01,\n",
       "         -1.8369e-01,  8.6876e-01,  9.9853e-01, -2.9611e-01,  9.4470e-01,\n",
       "         -3.2736e-01, -9.4787e-01, -1.9237e-01,  9.7293e-01,  8.5424e-01,\n",
       "          3.9985e-01, -2.3335e-01, -9.8681e-01,  9.9109e-01,  9.7090e-01,\n",
       "          9.9957e-01, -3.9974e-01,  9.9242e-01, -5.0345e-01,  8.6380e-01,\n",
       "          7.3693e-01,  6.8620e-01,  6.2118e-01,  5.9789e-01,  9.9272e-01,\n",
       "          9.9537e-01, -9.9678e-01, -9.3170e-01, -9.9924e-01,  9.9942e-01,\n",
       "          9.4643e-01,  7.1641e-01, -9.9756e-01,  9.9417e-01, -3.3184e-01,\n",
       "          4.7296e-02,  9.8960e-01,  6.3338e-01, -9.9128e-01,  8.1844e-01,\n",
       "         -9.5638e-01,  4.7236e-01, -8.3007e-01,  6.7590e-01, -5.4537e-01,\n",
       "          9.9260e-01, -9.7392e-01,  3.5526e-01,  9.9847e-01,  3.8747e-01,\n",
       "          9.7598e-01,  1.3573e-01, -9.9359e-01,  9.6991e-01, -7.4889e-01,\n",
       "         -9.8761e-01,  5.6887e-01,  9.9496e-01,  9.6144e-01, -6.2367e-03,\n",
       "          3.0459e-04,  9.5794e-01, -9.7873e-01,  9.9554e-01, -9.9737e-01,\n",
       "         -1.8685e-01, -9.6488e-01,  9.9566e-01, -9.7753e-01, -9.9781e-01,\n",
       "         -3.6239e-01,  8.5919e-01,  7.6315e-01,  7.8590e-02,  9.9803e-01,\n",
       "         -7.1844e-01, -9.1208e-01, -7.9298e-01, -5.3986e-01, -9.7685e-01,\n",
       "         -9.7327e-01, -1.2070e-01, -9.6068e-01, -5.1325e-01, -4.7477e-01,\n",
       "         -3.7484e-01, -9.4905e-01, -9.8381e-01,  9.9754e-01, -9.5500e-01,\n",
       "         -9.5332e-01,  9.9891e-01, -5.2662e-01, -9.9814e-01,  6.5782e-01,\n",
       "         -7.8858e-01,  2.9055e-01,  8.8596e-01,  7.3168e-01,  4.5181e-01,\n",
       "         -9.9992e-01,  6.0155e-01,  9.9758e-01, -9.9327e-01, -9.0210e-01,\n",
       "         -8.9391e-01, -1.7055e-01, -6.2452e-01,  5.9038e-01,  9.4364e-01,\n",
       "         -6.1853e-01,  7.9187e-01, -4.7386e-01,  9.5615e-01, -5.9197e-02,\n",
       "          4.7891e-01, -6.2291e-01, -4.7107e-01, -9.2354e-01, -9.6988e-01,\n",
       "         -9.9722e-01, -9.9239e-01,  9.9931e-01,  9.8566e-01,  9.9718e-01,\n",
       "         -8.6140e-01, -4.6449e-01,  7.7008e-01,  9.8902e-01, -9.9819e-01,\n",
       "         -8.4641e-01,  5.0974e-01,  6.7781e-01,  3.3581e-01, -9.6012e-01,\n",
       "         -1.0624e-01, -9.9897e-01, -8.1381e-01,  4.3434e-01, -3.5364e-01,\n",
       "          1.6731e-01,  9.9898e-01,  9.2089e-01, -9.8799e-01, -6.6438e-01,\n",
       "         -9.9134e-01, -9.9020e-01,  9.9761e-01,  9.5245e-01,  9.8107e-01,\n",
       "         -8.6661e-01, -6.6729e-01,  8.0834e-01, -1.9978e-01, -7.2814e-01,\n",
       "         -9.9335e-01, -9.9484e-01, -9.7727e-01,  7.4353e-01, -9.9382e-01,\n",
       "         -9.9338e-01,  9.9308e-01,  9.9803e-01,  8.6301e-01, -9.9731e-01,\n",
       "         -7.1828e-01,  9.9866e-01,  9.5002e-01,  9.9976e-01,  4.2847e-01,\n",
       "          9.9753e-01, -9.5387e-01,  9.8079e-01, -7.9323e-01,  9.9918e-01,\n",
       "         -9.9477e-01,  9.9899e-01,  9.9963e-01,  6.0913e-01,  9.8793e-01,\n",
       "         -9.8892e-01,  8.1100e-01, -3.6801e-01, -6.6456e-01, -5.0661e-01,\n",
       "          7.1336e-02, -9.4614e-01, -8.7603e-01,  9.5308e-01, -9.9160e-01,\n",
       "          9.9835e-01, -1.3687e-01,  3.0258e-01,  9.4953e-01, -1.8563e-01,\n",
       "          8.7507e-01,  9.7353e-01, -9.9741e-01,  5.0375e-01,  9.7328e-01,\n",
       "          9.7243e-01,  9.9875e-01,  9.6014e-01,  9.6691e-01, -8.6245e-01,\n",
       "         -9.9890e-01,  3.9871e-01, -8.1264e-01, -4.2548e-01, -9.8494e-01,\n",
       "          9.9477e-01,  9.9020e-01, -9.9366e-01,  4.5106e-01,  1.8740e-01,\n",
       "          7.7529e-01,  9.6581e-01,  9.2946e-01, -2.8555e-01,  8.2665e-01,\n",
       "          8.1364e-01,  9.6807e-01, -9.8704e-01,  8.1367e-01, -9.8128e-01,\n",
       "          3.4697e-01,  9.8020e-01, -9.6536e-01,  9.9821e-01, -9.9365e-01,\n",
       "          9.9121e-01, -9.0998e-01,  3.6595e-01,  9.8225e-01,  9.7303e-01,\n",
       "         -8.3394e-01,  9.9919e-01,  8.5846e-01, -7.1041e-01, -7.6754e-01,\n",
       "         -9.1742e-01, -8.8104e-01,  3.3957e-04]], grad_fn=<TanhBackward>), hidden_states=None, past_key_values=None, attentions=None, cross_attentions=None)"
      ]
     },
     "execution_count": 28,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import torch\n",
    "item = {key: torch.tensor(inputs[key]).view(1, -1) for key, val in inputs.items()}\n",
    "\n",
    "output = model(input_ids = item['input_ids'], attention_mask = item['attention_mask'])\n",
    "output"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "id": "278d30f5-104f-4e5b-a136-07321e2e4136",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T10:05:06.861763Z",
     "iopub.status.busy": "2022-07-01T10:05:06.861131Z",
     "iopub.status.idle": "2022-07-01T10:05:06.868667Z",
     "shell.execute_reply": "2022-07-01T10:05:06.867858Z",
     "shell.execute_reply.started": "2022-07-01T10:05:06.861693Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(torch.Size([1, 24, 768]), torch.Size([1, 768]))"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "output.last_hidden_state.shape, output.pooler_output.shape"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "6b1ba925-7ed4-4601-8744-6a4b2a13fa84",
   "metadata": {},
   "source": [
    "# 意图分类模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 55,
   "id": "d28567cc-8355-4a9a-8475-b7d7edbb9b68",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T10:34:21.870035Z",
     "iopub.status.busy": "2022-07-01T10:34:21.869428Z",
     "iopub.status.idle": "2022-07-01T10:34:22.045713Z",
     "shell.execute_reply": "2022-07-01T10:34:22.045177Z",
     "shell.execute_reply.started": "2022-07-01T10:34:21.869986Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "for tag in ['intent', 'device', 'mode', 'offset', 'endloc', 'landmark', 'singer', 'song']:\n",
    "    train_ja['槽值1'] = train_ja['槽值1'].str.replace(f'{tag}:', '')\n",
    "    train_ja['槽值2'] = train_ja['槽值2'].str.replace(f'{tag}:', '')\n",
    "    \n",
    "    train_cn['槽值1'] = train_cn['槽值1'].str.replace(f'{tag}:', '')\n",
    "    train_cn['槽值2'] = train_cn['槽值2'].str.replace(f'{tag}:', '')\n",
    "\n",
    "    train_en['槽值1'] = train_en['槽值1'].str.replace(f'{tag}:', '')\n",
    "    train_en['槽值2'] = train_en['槽值2'].str.replace(f'{tag}:', '')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 126,
   "id": "ef1c6097-0737-4805-ac12-ecbc98ceb7a5",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T11:32:38.867908Z",
     "iopub.status.busy": "2022-07-01T11:32:38.866993Z",
     "iopub.status.idle": "2022-07-01T11:32:38.885068Z",
     "shell.execute_reply": "2022-07-01T11:32:38.884238Z",
     "shell.execute_reply.started": "2022-07-01T11:32:38.867826Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "train_df = pd.concat([\n",
    "    train_ja[['原始文本', '意图', '槽值1', '槽值2']],\n",
    "    train_cn[['原始文本', '意图', '槽值1', '槽值2']].sample(10000),\n",
    "    train_en[['原始文本', '意图', '槽值1', '槽值2']],\n",
    "],axis = 0)\n",
    "train_df = train_df.sample(frac=1.0)\n",
    "train_df['意图_encode'], lbl_ecode = pd.factorize(train_df['意图'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 127,
   "id": "54b5596a-b97e-44c0-97e2-1fc95e1256b3",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T11:32:39.870145Z",
     "iopub.status.busy": "2022-07-01T11:32:39.869506Z",
     "iopub.status.idle": "2022-07-01T11:32:39.876854Z",
     "shell.execute_reply": "2022-07-01T11:32:39.875938Z",
     "shell.execute_reply.started": "2022-07-01T11:32:39.870095Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "18"
      ]
     },
     "execution_count": 127,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(lbl_ecode)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 128,
   "id": "bf264879-9d3f-4480-b91d-9fa04a33a15d",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T11:32:40.868119Z",
     "iopub.status.busy": "2022-07-01T11:32:40.867527Z",
     "iopub.status.idle": "2022-07-01T11:32:50.195447Z",
     "shell.execute_reply": "2022-07-01T11:32:50.194400Z",
     "shell.execute_reply.started": "2022-07-01T11:32:40.868070Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "tokenizer = AutoTokenizer.from_pretrained(\"bert-base-multilingual-cased\")\n",
    "config = AutoConfig.from_pretrained(\"bert-base-multilingual-cased\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 129,
   "id": "8a3ba464-56f7-440e-9eba-cc1a3999ae16",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T11:32:50.196661Z",
     "iopub.status.busy": "2022-07-01T11:32:50.196456Z",
     "iopub.status.idle": "2022-07-01T11:32:50.203303Z",
     "shell.execute_reply": "2022-07-01T11:32:50.202697Z",
     "shell.execute_reply.started": "2022-07-01T11:32:50.196643Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "from torch.utils.data import Dataset, DataLoader, TensorDataset\n",
    "import torch\n",
    "from torch import nn\n",
    "\n",
    "# 数据集读取\n",
    "class XunFeiDataset(Dataset):\n",
    "    def __init__(self, encodings, intent):\n",
    "        self.encodings = encodings\n",
    "        self.intent = intent\n",
    "    \n",
    "    # 读取单个样本\n",
    "    def __getitem__(self, idx):        \n",
    "            \n",
    "        item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}\n",
    "        item['label'] = torch.tensor(int(self.intent[idx]))\n",
    "        return item\n",
    "    \n",
    "    def __len__(self):\n",
    "        return len(self.intent)\n",
    "    \n",
    "class XunFeiModel(nn.Module):\n",
    "    def __init__(self, num_labels): \n",
    "        super(XunFeiModel,self).__init__() \n",
    "        self.model = model = AutoModel.from_pretrained(\"bert-base-multilingual-cased\")\n",
    "        self.dropout = nn.Dropout(0.1) \n",
    "        self.classifier = nn.Linear(768, num_labels)\n",
    "\n",
    "    def forward(self, input_ids=None, attention_mask=None,labels=None):\n",
    "        outputs = self.model(input_ids=input_ids, attention_mask=attention_mask)\n",
    "        sequence_output = self.dropout(outputs[0]) #outputs[0]=last hidden state\n",
    "        logits = self.classifier(sequence_output[:,0,:].view(-1,768))\n",
    "        \n",
    "        return logits"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 131,
   "id": "21ed95e5-97a4-4c83-9c36-35db2c542f5b",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T11:32:57.868261Z",
     "iopub.status.busy": "2022-07-01T11:32:57.867673Z",
     "iopub.status.idle": "2022-07-01T11:32:58.040976Z",
     "shell.execute_reply": "2022-07-01T11:32:58.040471Z",
     "shell.execute_reply.started": "2022-07-01T11:32:57.868211Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "train_encoding = tokenizer(train_df['原始文本'].tolist()[:-500], truncation=True, padding=True, max_length=40)\n",
    "val_encoding = tokenizer(train_df['原始文本'].tolist()[-500:], truncation=True, padding=True, max_length=40)\n",
    "\n",
    "train_dataset = XunFeiDataset(train_encoding, train_df['意图_encode'].tolist()[:-500])\n",
    "val_dataset = XunFeiDataset(val_encoding, train_df['意图_encode'].tolist()[-500:])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 133,
   "id": "ea7c4c5d-90b8-4197-80ba-fbdd7335e2f7",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T11:33:01.865481Z",
     "iopub.status.busy": "2022-07-01T11:33:01.864896Z",
     "iopub.status.idle": "2022-07-01T11:33:01.869880Z",
     "shell.execute_reply": "2022-07-01T11:33:01.869363Z",
     "shell.execute_reply.started": "2022-07-01T11:33:01.865434Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "# 单个读取到批量读取\n",
    "train_loader = DataLoader(train_dataset, batch_size=16, shuffle=True)\n",
    "val_dataloader = DataLoader(val_dataset, batch_size=16, shuffle=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 134,
   "id": "e7ab0389-a6bf-4b14-8af9-49bb6c3748f9",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T11:33:02.868177Z",
     "iopub.status.busy": "2022-07-01T11:33:02.867606Z",
     "iopub.status.idle": "2022-07-01T11:33:06.649094Z",
     "shell.execute_reply": "2022-07-01T11:33:06.648409Z",
     "shell.execute_reply.started": "2022-07-01T11:33:02.868129Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Some weights of the model checkpoint at bert-base-multilingual-cased were not used when initializing BertModel: ['cls.seq_relationship.bias', 'cls.predictions.decoder.weight', 'cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.LayerNorm.weight', 'cls.seq_relationship.weight', 'cls.predictions.bias', 'cls.predictions.transform.dense.weight', 'cls.predictions.transform.dense.bias']\n",
      "- This IS expected if you are initializing BertModel from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n",
      "- This IS NOT expected if you are initializing BertModel from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n"
     ]
    }
   ],
   "source": [
    "model = XunFeiModel(18)\n",
    "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
    "# device = 'cpu'\n",
    "model = model.to(device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 135,
   "id": "630fb829-df67-4d86-b62a-e3ec1d7dc794",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T11:33:06.650167Z",
     "iopub.status.busy": "2022-07-01T11:33:06.649993Z",
     "iopub.status.idle": "2022-07-01T11:33:06.655412Z",
     "shell.execute_reply": "2022-07-01T11:33:06.654941Z",
     "shell.execute_reply.started": "2022-07-01T11:33:06.650152Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "from torch.nn import CrossEntropyLoss\n",
    "from torch.optim import AdamW\n",
    "\n",
    "loss_fn = CrossEntropyLoss() # ingore index = -1\n",
    "optim = AdamW(model.parameters(), lr=5e-5)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 136,
   "id": "d70d5440-d3d3-4ace-8172-cd5684b4145c",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T11:33:07.863250Z",
     "iopub.status.busy": "2022-07-01T11:33:07.862805Z",
     "iopub.status.idle": "2022-07-01T11:33:07.873248Z",
     "shell.execute_reply": "2022-07-01T11:33:07.872631Z",
     "shell.execute_reply.started": "2022-07-01T11:33:07.863224Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "def train():\n",
    "    model.train()\n",
    "    total_train_loss = 0\n",
    "    iter_num = 0\n",
    "    total_iter = len(train_loader)\n",
    "    for batch in train_loader:\n",
    "        # 正向传播\n",
    "        optim.zero_grad()\n",
    "        \n",
    "        input_ids = batch['input_ids'].to(device)\n",
    "        attention_mask = batch['attention_mask'].to(device)\n",
    "        label = batch['label'].to(device)\n",
    "\n",
    "        pred = model(\n",
    "            input_ids, \n",
    "            attention_mask\n",
    "        )\n",
    "        \n",
    "        loss = loss_fn(pred, label)\n",
    "        \n",
    "        # 反向梯度信息\n",
    "        loss.backward()\n",
    "        torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0)\n",
    "        \n",
    "        # 参数更新\n",
    "        optim.step()\n",
    "\n",
    "        iter_num += 1\n",
    "        \n",
    "        if(iter_num % 100 == 0):\n",
    "            print(\"iter_num: %d, loss: %.4f, %.2f%% %.4f\" % (\n",
    "                iter_num, loss.item(), iter_num/total_iter*100, \n",
    "                (pred.argmax(1) == label).float().data.cpu().numpy().mean(),\n",
    "            ))\n",
    "\n",
    "def validation():\n",
    "    model.eval()\n",
    "    label_acc = 0\n",
    "    for batch in val_dataloader:\n",
    "        with torch.no_grad():\n",
    "            input_ids = batch['input_ids'].to(device)\n",
    "            attention_mask = batch['attention_mask'].to(device)\n",
    "            label = batch['label'].to(device)\n",
    "\n",
    "            pred = model(\n",
    "                input_ids, \n",
    "                attention_mask\n",
    "            )\n",
    "    \n",
    "            label_acc += (pred.argmax(1) == label).float().sum().item()\n",
    "    \n",
    "    label_acc = label_acc / len(val_dataloader.dataset)\n",
    "\n",
    "    print(\"-------------------------------\")\n",
    "    print(\"Accuracy: %.4f\" % (label_acc))\n",
    "    print(\"-------------------------------\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 137,
   "id": "7a4e2341-1ab9-4139-a1c6-b1025d841c5a",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T11:33:08.891504Z",
     "iopub.status.busy": "2022-07-01T11:33:08.890921Z",
     "iopub.status.idle": "2022-07-01T11:35:34.832808Z",
     "shell.execute_reply": "2022-07-01T11:35:34.832382Z",
     "shell.execute_reply.started": "2022-07-01T11:33:08.891458Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "iter_num: 100, loss: 0.9366, 13.91% 0.7500\n",
      "iter_num: 200, loss: 0.3068, 27.82% 0.8750\n",
      "iter_num: 300, loss: 0.2977, 41.72% 0.9375\n",
      "iter_num: 400, loss: 0.5186, 55.63% 0.8125\n",
      "iter_num: 500, loss: 0.3027, 69.54% 0.8750\n",
      "iter_num: 600, loss: 0.0159, 83.45% 1.0000\n",
      "iter_num: 700, loss: 0.0124, 97.36% 1.0000\n",
      "-------------------------------\n",
      "Accuracy: 0.9240\n",
      "-------------------------------\n",
      "iter_num: 100, loss: 0.3482, 13.91% 0.8750\n",
      "iter_num: 200, loss: 0.0354, 27.82% 1.0000\n",
      "iter_num: 300, loss: 0.2516, 41.72% 0.9375\n",
      "iter_num: 400, loss: 0.1396, 55.63% 0.9375\n",
      "iter_num: 500, loss: 0.0116, 69.54% 1.0000\n",
      "iter_num: 600, loss: 0.4513, 83.45% 0.9375\n",
      "iter_num: 700, loss: 0.0085, 97.36% 1.0000\n",
      "-------------------------------\n",
      "Accuracy: 0.9200\n",
      "-------------------------------\n"
     ]
    }
   ],
   "source": [
    "for epoch in range(2):\n",
    "    train()\n",
    "    validation()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 138,
   "id": "86725181-505a-4f8e-bbcd-4ded2d3639d3",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T11:35:49.867758Z",
     "iopub.status.busy": "2022-07-01T11:35:49.867284Z",
     "iopub.status.idle": "2022-07-01T11:35:49.874297Z",
     "shell.execute_reply": "2022-07-01T11:35:49.873320Z",
     "shell.execute_reply.started": "2022-07-01T11:35:49.867724Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "def prediction():\n",
    "    model.eval()\n",
    "    test_label = []\n",
    "    for batch in test_dataloader:\n",
    "        with torch.no_grad():\n",
    "            input_ids = batch['input_ids'].to(device)\n",
    "            attention_mask = batch['attention_mask'].to(device)\n",
    "\n",
    "            pred = model(input_ids, attention_mask)\n",
    "            test_label += list(pred.argmax(1).data.cpu().numpy())\n",
    "    return test_label"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 139,
   "id": "866ecef8-ee57-44ce-9c30-2a16c0ae6ef1",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T11:35:53.867719Z",
     "iopub.status.busy": "2022-07-01T11:35:53.867117Z",
     "iopub.status.idle": "2022-07-01T11:35:55.145881Z",
     "shell.execute_reply": "2022-07-01T11:35:55.145323Z",
     "shell.execute_reply.started": "2022-07-01T11:35:53.867669Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "test_encoding = tokenizer(test_en['原始文本'].tolist(), truncation=True, padding=True, max_length=40)\n",
    "test_dataset = XunFeiDataset(test_encoding, [0] * len(test_en))\n",
    "test_dataloader = DataLoader(test_dataset, batch_size=16, shuffle=False)\n",
    "\n",
    "test_en_intent = prediction()\n",
    "\n",
    "test_encoding = tokenizer(test_ja['原始文本'].tolist(), truncation=True, padding=True, max_length=40)\n",
    "test_dataset = XunFeiDataset(test_encoding, [0] * len(test_ja))\n",
    "test_dataloader = DataLoader(test_dataset, batch_size=16, shuffle=False)\n",
    "\n",
    "test_ja_intent = prediction()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 140,
   "id": "89058e60-ee0a-40fa-9349-904040206913",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2022-07-01T11:35:57.865778Z",
     "iopub.status.busy": "2022-07-01T11:35:57.865146Z",
     "iopub.status.idle": "2022-07-01T11:35:58.497837Z",
     "shell.execute_reply": "2022-07-01T11:35:58.496692Z",
     "shell.execute_reply.started": "2022-07-01T11:35:57.865708Z"
    },
    "scrolled": true,
    "tags": []
   },
   "outputs": [],
   "source": [
    "test_ja['意图'] = [lbl_ecode[x] for x in test_ja_intent]\n",
    "test_en['意图'] = [lbl_ecode[x] for x in test_en_intent]\n",
    "test_en['槽值1'] = np.nan\n",
    "test_en['槽值2'] = np.nan\n",
    "\n",
    "test_ja['槽值1'] = np.nan\n",
    "test_ja['槽值2'] = np.nan\n",
    "\n",
    "writer = pd.ExcelWriter('submit.xlsx')\n",
    "test_en[['意图', '槽值1', '槽值2']].to_excel(writer, sheet_name='英文_testA', index=None)\n",
    "test_ja[['意图', '槽值1', '槽值2']].to_excel(writer, sheet_name='日语_testA', index=None)\n",
    "writer.save()\n",
    "writer.close()"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "ca7bf2c6-2ce3-472b-882a-cf445901309c",
   "metadata": {},
   "source": [
    "# 槽值识别模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "ca76f5a9-fe9b-4145-8990-6c5e19ce710b",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 数据集读取\n",
    "class XunFeiDataset2(Dataset):\n",
    "    def __init__(self, encodings, label_i, label_j):\n",
    "        self.encodings = encodings\n",
    "        self.intent = intent\n",
    "        self.tag1 = tag1\n",
    "        self.tag2 = tag2\n",
    "    \n",
    "    # 读取单个样本\n",
    "    def __getitem__(self, idx):        \n",
    "        \n",
    "        tags_single = [0] + list(tags_single) + [0]\n",
    "        tags_single = tags_single + [0] * (32 - len(tags_single))\n",
    "        tags_single = tags_single[:maxlen]\n",
    "            \n",
    "        item = {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}\n",
    "        item['label_i'] = torch.tensor(int(self.label_i[idx]))\n",
    "        item['label_j'] = torch.tensor(int(self.label_j[idx]))\n",
    "        return item\n",
    "    \n",
    "    def __len__(self):\n",
    "        return len(self.label_i)\n",
    "    \n",
    "class XunFeiModel2(nn.Module):\n",
    "    def __init__(self, num_labels_i, num_labels_j): \n",
    "        super(XunFeiModel,self).__init__() \n",
    "\n",
    "        #Load Model with given checkpoint and extract its body\n",
    "        self.model = AutoModel.from_pretrained(\"hfl/chinese-roberta-wwm-ext\")\n",
    "        self.dropout = nn.Dropout(0.1) \n",
    "        self.classifier_i = nn.Linear(768, num_labels_i)\n",
    "        self.classifier_j = nn.Linear(768, num_labels_j)\n",
    "\n",
    "    def forward(self, input_ids=None, attention_mask=None,labels=None):\n",
    "        outputs = self.model(input_ids=input_ids, attention_mask=attention_mask)\n",
    "        sequence_output = self.dropout(outputs[0]) #outputs[0]=last hidden state\n",
    "        \n",
    "        # 问诊方向\n",
    "        logits_i = self.classifier_i(sequence_output[:,0,:].view(-1,768))\n",
    "        # 疾病标签\n",
    "        logits_j = self.classifier_j(sequence_output[:,0,:].view(-1,768))\n",
    "        \n",
    "        return logits_i, logits_j"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3.6 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.9"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
