{"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"bert_softmax.ipynb","provenance":[],"collapsed_sections":[],"authorship_tag":"ABX9TyO2tfkI+03H8HCKdAqMUYAs"},"kernelspec":{"name":"python3","display_name":"Python 3"},"language_info":{"name":"python"},"widgets":{"application/vnd.jupyter.widget-state+json":{"259f728994f44848b5139ab76461bab0":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_view_name":"HBoxView","_dom_classes":[],"_model_name":"HBoxModel","_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.5.0","box_style":"","layout":"IPY_MODEL_cab2c7e1181743adb5bed504506f054d","_model_module":"@jupyter-widgets/controls","children":["IPY_MODEL_dfa5111c0f4a4c7da819c67cbeedeff8","IPY_MODEL_ed456021f67549f1bbee77a8d7ec0979","IPY_MODEL_8443489235424da58a2250761773d08a"]}},"cab2c7e1181743adb5bed504506f054d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"dfa5111c0f4a4c7da819c67cbeedeff8":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_view_name":"HTMLView","style":"IPY_MODEL_650f48df35704899834646bb7a61297f","_dom_classes":[],"description":"","_model_name":"HTMLModel","placeholder":"​","_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","value":"Downloading: ","_view_count":null,"_view_module_version":"1.5.0","description_tooltip":null,"_model_module":"@jupyter-widgets/controls","layout":"IPY_MODEL_bd9a6eeb9dec4b1cba6f79981e324668"}},"ed456021f67549f1bbee77a8d7ec0979":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_view_name":"ProgressView","style":"IPY_MODEL_814c210354434bebbf8be814fed57744","_dom_classes":[],"description":"","_model_name":"FloatProgressModel","bar_style":"success","max":2482,"_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","value":2482,"_view_count":null,"_view_module_version":"1.5.0","orientation":"horizontal","min":0,"description_tooltip":null,"_model_module":"@jupyter-widgets/controls","layout":"IPY_MODEL_6bd5161987bf47878f704b498e118f0d"}},"8443489235424da58a2250761773d08a":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_view_name":"HTMLView","style":"IPY_MODEL_d2c4caa541de4268b79738eaea2c365a","_dom_classes":[],"description":"","_model_name":"HTMLModel","placeholder":"​","_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","value":" 6.34k/? [00:00&lt;00:00, 140kB/s]","_view_count":null,"_view_module_version":"1.5.0","description_tooltip":null,"_model_module":"@jupyter-widgets/controls","layout":"IPY_MODEL_73a1d734b6164fcd92f19fd1382c0b12"}},"650f48df35704899834646bb7a61297f":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_view_name":"StyleView","_model_name":"DescriptionStyleModel","description_width":"","_view_module":"@jupyter-widgets/base","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.2.0","_model_module":"@jupyter-widgets/controls"}},"bd9a6eeb9dec4b1cba6f79981e324668":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"814c210354434bebbf8be814fed57744":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_view_name":"StyleView","_model_name":"ProgressStyleModel","description_width":"","_view_module":"@jupyter-widgets/base","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.2.0","bar_color":null,"_model_module":"@jupyter-widgets/controls"}},"6bd5161987bf47878f704b498e118f0d":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"d2c4caa541de4268b79738eaea2c365a":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_view_name":"StyleView","_model_name":"DescriptionStyleModel","description_width":"","_view_module":"@jupyter-widgets/base","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.2.0","_model_module":"@jupyter-widgets/controls"}},"73a1d734b6164fcd92f19fd1382c0b12":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}}}},"accelerator":"GPU"},"cells":[{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"hw0bsFiqWXy6","executionInfo":{"status":"ok","timestamp":1639988063808,"user_tz":-480,"elapsed":3323,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"6841d6fb-f6cc-4acf-b323-9724f8f10ad9"},"source":["from google.colab import drive\n","drive.mount('/content/drive')"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n"]}]},{"cell_type":"code","metadata":{"id":"zalA7BN8XOEo","colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1639988082381,"user_tz":-480,"elapsed":18589,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"96aa7720-369b-4338-e991-a0da64a77fdd"},"source":["import os\n","os.chdir('/content/drive/MyDrive/chinese task/CLUENER2020')\n","\n","#安装\n","!pip install transformers datasets seqeval"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Collecting transformers\n","  Downloading transformers-4.14.1-py3-none-any.whl (3.4 MB)\n","\u001b[K     |████████████████████████████████| 3.4 MB 13.0 MB/s \n","\u001b[?25hCollecting datasets\n","  Downloading datasets-1.16.1-py3-none-any.whl (298 kB)\n","\u001b[K     |████████████████████████████████| 298 kB 45.5 MB/s \n","\u001b[?25hCollecting seqeval\n","  Downloading seqeval-1.2.2.tar.gz (43 kB)\n","\u001b[K     |████████████████████████████████| 43 kB 2.4 MB/s \n","\u001b[?25hCollecting pyyaml>=5.1\n","  Downloading PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (596 kB)\n","\u001b[K     |████████████████████████████████| 596 kB 42.4 MB/s \n","\u001b[?25hCollecting sacremoses\n","  Downloading sacremoses-0.0.46-py3-none-any.whl (895 kB)\n","\u001b[K     |████████████████████████████████| 895 kB 31.7 MB/s \n","\u001b[?25hRequirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from transformers) (2.23.0)\n","Collecting huggingface-hub<1.0,>=0.1.0\n","  Downloading huggingface_hub-0.2.1-py3-none-any.whl (61 kB)\n","\u001b[K     |████████████████████████████████| 61 kB 377 kB/s \n","\u001b[?25hRequirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.7/dist-packages (from transformers) (2019.12.20)\n","Requirement already satisfied: filelock in /usr/local/lib/python3.7/dist-packages (from transformers) (3.4.0)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.7/dist-packages (from transformers) (21.3)\n","Requirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.7/dist-packages (from transformers) (4.62.3)\n","Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.7/dist-packages (from transformers) (4.8.2)\n","Collecting tokenizers<0.11,>=0.10.1\n","  Downloading tokenizers-0.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (3.3 MB)\n","\u001b[K     |████████████████████████████████| 3.3 MB 35.0 MB/s \n","\u001b[?25hRequirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.7/dist-packages (from transformers) (1.19.5)\n","Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.7/dist-packages (from huggingface-hub<1.0,>=0.1.0->transformers) (3.10.0.2)\n","Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /usr/local/lib/python3.7/dist-packages (from packaging>=20.0->transformers) (3.0.6)\n","Collecting xxhash\n","  Downloading xxhash-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl (243 kB)\n","\u001b[K     |████████████████████████████████| 243 kB 52.0 MB/s \n","\u001b[?25hRequirement already satisfied: pandas in /usr/local/lib/python3.7/dist-packages (from datasets) (1.1.5)\n","Requirement already satisfied: dill in /usr/local/lib/python3.7/dist-packages (from datasets) (0.3.4)\n","Requirement already satisfied: multiprocess in /usr/local/lib/python3.7/dist-packages (from datasets) (0.70.12.2)\n","Collecting fsspec[http]>=2021.05.0\n","  Downloading fsspec-2021.11.1-py3-none-any.whl (132 kB)\n","\u001b[K     |████████████████████████████████| 132 kB 53.7 MB/s \n","\u001b[?25hRequirement already satisfied: pyarrow!=4.0.0,>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from datasets) (3.0.0)\n","Collecting aiohttp\n","  Downloading aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (1.1 MB)\n","\u001b[K     |████████████████████████████████| 1.1 MB 27.5 MB/s \n","\u001b[?25hRequirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (3.0.4)\n","Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (2.10)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (2021.10.8)\n","Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (1.24.3)\n","Requirement already satisfied: scikit-learn>=0.21.3 in /usr/local/lib/python3.7/dist-packages (from seqeval) (1.0.1)\n","Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from scikit-learn>=0.21.3->seqeval) (3.0.0)\n","Requirement already satisfied: scipy>=1.1.0 in /usr/local/lib/python3.7/dist-packages (from scikit-learn>=0.21.3->seqeval) (1.4.1)\n","Requirement already satisfied: joblib>=0.11 in /usr/local/lib/python3.7/dist-packages (from scikit-learn>=0.21.3->seqeval) (1.1.0)\n","Collecting asynctest==0.13.0\n","  Downloading asynctest-0.13.0-py3-none-any.whl (26 kB)\n","Collecting aiosignal>=1.1.2\n","  Downloading aiosignal-1.2.0-py3-none-any.whl (8.2 kB)\n","Collecting yarl<2.0,>=1.0\n","  Downloading yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (271 kB)\n","\u001b[K     |████████████████████████████████| 271 kB 53.5 MB/s \n","\u001b[?25hCollecting multidict<7.0,>=4.5\n","  Downloading multidict-5.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (160 kB)\n","\u001b[K     |████████████████████████████████| 160 kB 49.4 MB/s \n","\u001b[?25hCollecting async-timeout<5.0,>=4.0.0a3\n","  Downloading async_timeout-4.0.1-py3-none-any.whl (5.7 kB)\n","Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.7/dist-packages (from aiohttp->datasets) (21.2.0)\n","Collecting frozenlist>=1.1.1\n","  Downloading frozenlist-1.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (192 kB)\n","\u001b[K     |████████████████████████████████| 192 kB 48.9 MB/s \n","\u001b[?25hRequirement already satisfied: charset-normalizer<3.0,>=2.0 in /usr/local/lib/python3.7/dist-packages (from aiohttp->datasets) (2.0.8)\n","Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.7/dist-packages (from importlib-metadata->transformers) (3.6.0)\n","Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.7/dist-packages (from pandas->datasets) (2018.9)\n","Requirement already satisfied: python-dateutil>=2.7.3 in /usr/local/lib/python3.7/dist-packages (from pandas->datasets) (2.8.2)\n","Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.7.3->pandas->datasets) (1.15.0)\n","Requirement already satisfied: click in /usr/local/lib/python3.7/dist-packages (from sacremoses->transformers) (7.1.2)\n","Building wheels for collected packages: seqeval\n","  Building wheel for seqeval (setup.py) ... \u001b[?25l\u001b[?25hdone\n","  Created wheel for seqeval: filename=seqeval-1.2.2-py3-none-any.whl size=16181 sha256=60964d9ca05e717771a87beaac2a661fd6428f46e16861848bf0a0598c212a1e\n","  Stored in directory: /root/.cache/pip/wheels/05/96/ee/7cac4e74f3b19e3158dce26a20a1c86b3533c43ec72a549fd7\n","Successfully built seqeval\n","Installing collected packages: multidict, frozenlist, yarl, asynctest, async-timeout, aiosignal, pyyaml, fsspec, aiohttp, xxhash, tokenizers, sacremoses, huggingface-hub, transformers, seqeval, datasets\n","  Attempting uninstall: pyyaml\n","    Found existing installation: PyYAML 3.13\n","    Uninstalling PyYAML-3.13:\n","      Successfully uninstalled PyYAML-3.13\n","Successfully installed aiohttp-3.8.1 aiosignal-1.2.0 async-timeout-4.0.1 asynctest-0.13.0 datasets-1.16.1 frozenlist-1.2.0 fsspec-2021.11.1 huggingface-hub-0.2.1 multidict-5.2.0 pyyaml-6.0 sacremoses-0.0.46 seqeval-1.2.2 tokenizers-0.10.3 transformers-4.14.1 xxhash-2.0.2 yarl-1.7.2\n"]}]},{"cell_type":"code","metadata":{"id":"0a0Y29QSXciS"},"source":["import os\n","import json\n","import logging\n","import numpy as np\n","import pandas as pd\n","import config"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"9_0f6C8cI-uu","executionInfo":{"status":"ok","timestamp":1639988091599,"user_tz":-480,"elapsed":1954,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"20a8f970-6adf-4a54-9dbc-1531c2520355"},"source":["#加载处理完的npz数据集\n","#不加allow_pickle=True会报错Object arrays cannot be loaded when allow_pickle=False，numpy新版本中默认为False。\n","train_data=np.load('./data/train.npz',allow_pickle=True)\n","val_data=np.load('./data/dev.npz',allow_pickle=True)\n","test_data=np.load('./data/test.npz',allow_pickle=True)\n","\n","test_data.files"],"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":["['words', 'labels']"]},"metadata":{},"execution_count":5}]},{"cell_type":"markdown","source":["数据从npz格式加载到pandas，标签用数字替换，以便输入模型"],"metadata":{"id":"D89Ibexnkqhy"}},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/","height":423},"id":"4DGMH-td8_Sn","executionInfo":{"status":"ok","timestamp":1639988093113,"user_tz":-480,"elapsed":1518,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"01a92a67-d9c1-4bd2-9089-80cb1e11ef54"},"source":["#转换为dataframe格式\n","import pandas as pd\n","#补个随机frac\n","train_df=pd.concat([pd.DataFrame(train_data['words'],columns=['words']),\n","          pd.DataFrame(train_data['labels'],columns=['labels'])],axis=1).sample(frac=1.0).rename(columns={'labels':'labels0'})\n","#测试集和验证集不需要shuffle\n","val_df=pd.concat([pd.DataFrame(val_data['words'],columns=['words']),\n","          pd.DataFrame(val_data['labels'],columns=['labels'])],axis=1).rename(columns={'labels':'labels0'})\n","\n","test_df=pd.concat([pd.DataFrame(test_data['words'],columns=['words']),\n","          pd.DataFrame(test_data['labels'],columns=['labels'])],axis=1).rename(columns={'labels':'labels0'})\n","\n","\n","#将训练验证集的BIOS标签转换为数字索引，此时word和labels已经对齐了\n","def trans(labels):\n","  labels=list(labels)\n","  nums=[]\n","  for label in labels:\n","    nums.append(config.label2id[label])\n","  return nums\n","    \n","train_df['labels0']=train_df['labels0'].map(lambda x: trans(x))\n","val_df['labels0']=val_df['labels0'].map(lambda x: trans(x))\n","\n","test_df['labels0']=test_df['labels0'].map(lambda x: trans(x))\n","val_df"],"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/html":["\n","  <div id=\"df-5609f3bd-3bca-4575-a382-26b669529129\">\n","    <div class=\"colab-df-container\">\n","      <div>\n","<style scoped>\n","    .dataframe tbody tr th:only-of-type {\n","        vertical-align: middle;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: right;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>words</th>\n","      <th>labels0</th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>0</th>\n","      <td>[彭, 小, 军, 认, 为, ，, 国, 内, 银, 行, 现, 在, 走, 的, 是, ...</td>\n","      <td>[7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>1</th>\n","      <td>[温, 格, 的, 球, 队, 终, 于, 又, 踢, 了, 一, 场, 经, 典, 的, ...</td>\n","      <td>[7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...</td>\n","    </tr>\n","    <tr>\n","      <th>2</th>\n","      <td>[突, 袭, 黑, 暗, 雅, 典, 娜, 》, 中, R, i, d, d, i, c, ...</td>\n","      <td>[4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17, 17, ...</td>\n","    </tr>\n","    <tr>\n","      <th>3</th>\n","      <td>[郑, 阿, 姨, 就, 赶, 到, 文, 汇, 路, 排, 队, 拿, 钱, ，, 希, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>4</th>\n","      <td>[我, 想, 站, 在, 雪, 山, 脚, 下, 你, 会, 被, 那, 巍, 峨, 的, ...</td>\n","      <td>[0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>...</th>\n","      <td>...</td>\n","      <td>...</td>\n","    </tr>\n","    <tr>\n","      <th>1338</th>\n","      <td>[在, 这, 个, 非, 常, 喜, 庆, 的, 日, 子, 里, ，, 我, 们, 首, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1339</th>\n","      <td>[姜, 哲, 中, ：, 公, 共, 之, 敌, 1, -, 1, 》, 、, 《, 神, ...</td>\n","      <td>[6, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16...</td>\n","    </tr>\n","    <tr>\n","      <th>1340</th>\n","      <td>[目, 前, ，, 日, 本, 松, 山, 海, 上, 保, 安, 部, 正, 在, 就, ...</td>\n","      <td>[0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15, 15, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>1341</th>\n","      <td>[也, 就, 是, 说, 英, 国, 人, 在, 世, 博, 会, 上, 的, 英, 国, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 10, 20, 20, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1342</th>\n","      <td>[另, 外, 意, 大, 利, 的, P, l, a, y, G, e, n, e, r, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 2, 12, 12, 12, 12, 12, 12, ...</td>\n","    </tr>\n","  </tbody>\n","</table>\n","<p>1343 rows × 2 columns</p>\n","</div>\n","      <button class=\"colab-df-convert\" onclick=\"convertToInteractive('df-5609f3bd-3bca-4575-a382-26b669529129')\"\n","              title=\"Convert this dataframe to an interactive table.\"\n","              style=\"display:none;\">\n","        \n","  <svg xmlns=\"http://www.w3.org/2000/svg\" height=\"24px\"viewBox=\"0 0 24 24\"\n","       width=\"24px\">\n","    <path d=\"M0 0h24v24H0V0z\" fill=\"none\"/>\n","    <path d=\"M18.56 5.44l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94zm-11 1L8.5 8.5l.94-2.06 2.06-.94-2.06-.94L8.5 2.5l-.94 2.06-2.06.94zm10 10l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94z\"/><path d=\"M17.41 7.96l-1.37-1.37c-.4-.4-.92-.59-1.43-.59-.52 0-1.04.2-1.43.59L10.3 9.45l-7.72 7.72c-.78.78-.78 2.05 0 2.83L4 21.41c.39.39.9.59 1.41.59.51 0 1.02-.2 1.41-.59l7.78-7.78 2.81-2.81c.8-.78.8-2.07 0-2.86zM5.41 20L4 18.59l7.72-7.72 1.47 1.35L5.41 20z\"/>\n","  </svg>\n","      </button>\n","      \n","  <style>\n","    .colab-df-container {\n","      display:flex;\n","      flex-wrap:wrap;\n","      gap: 12px;\n","    }\n","\n","    .colab-df-convert {\n","      background-color: #E8F0FE;\n","      border: none;\n","      border-radius: 50%;\n","      cursor: pointer;\n","      display: none;\n","      fill: #1967D2;\n","      height: 32px;\n","      padding: 0 0 0 0;\n","      width: 32px;\n","    }\n","\n","    .colab-df-convert:hover {\n","      background-color: #E2EBFA;\n","      box-shadow: 0px 1px 2px rgba(60, 64, 67, 0.3), 0px 1px 3px 1px rgba(60, 64, 67, 0.15);\n","      fill: #174EA6;\n","    }\n","\n","    [theme=dark] .colab-df-convert {\n","      background-color: #3B4455;\n","      fill: #D2E3FC;\n","    }\n","\n","    [theme=dark] .colab-df-convert:hover {\n","      background-color: #434B5C;\n","      box-shadow: 0px 1px 3px 1px rgba(0, 0, 0, 0.15);\n","      filter: drop-shadow(0px 1px 2px rgba(0, 0, 0, 0.3));\n","      fill: #FFFFFF;\n","    }\n","  </style>\n","\n","      <script>\n","        const buttonEl =\n","          document.querySelector('#df-5609f3bd-3bca-4575-a382-26b669529129 button.colab-df-convert');\n","        buttonEl.style.display =\n","          google.colab.kernel.accessAllowed ? 'block' : 'none';\n","\n","        async function convertToInteractive(key) {\n","          const element = document.querySelector('#df-5609f3bd-3bca-4575-a382-26b669529129');\n","          const dataTable =\n","            await google.colab.kernel.invokeFunction('convertToInteractive',\n","                                                     [key], {});\n","          if (!dataTable) return;\n","\n","          const docLinkHtml = 'Like what you see? Visit the ' +\n","            '<a target=\"_blank\" href=https://colab.research.google.com/notebooks/data_table.ipynb>data table notebook</a>'\n","            + ' to learn more about interactive tables.';\n","          element.innerHTML = '';\n","          dataTable['output_type'] = 'display_data';\n","          await google.colab.output.renderOutput(dataTable, element);\n","          const docLink = document.createElement('div');\n","          docLink.innerHTML = docLinkHtml;\n","          element.appendChild(docLink);\n","        }\n","      </script>\n","    </div>\n","  </div>\n","  "],"text/plain":["                                                  words                                            labels0\n","0     [彭, 小, 军, 认, 为, ，, 国, 内, 银, 行, 现, 在, 走, 的, 是, ...  [7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...\n","1     [温, 格, 的, 球, 队, 终, 于, 又, 踢, 了, 一, 场, 经, 典, 的, ...  [7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...\n","2     [突, 袭, 黑, 暗, 雅, 典, 娜, 》, 中, R, i, d, d, i, c, ...  [4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17, 17, ...\n","3     [郑, 阿, 姨, 就, 赶, 到, 文, 汇, 路, 排, 队, 拿, 钱, ，, 希, ...  [0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0, 0, 0...\n","4     [我, 想, 站, 在, 雪, 山, 脚, 下, 你, 会, 被, 那, 巍, 峨, 的, ...  [0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0...\n","...                                                 ...                                                ...\n","1338  [在, 这, 个, 非, 常, 喜, 庆, 的, 日, 子, 里, ，, 我, 们, 首, ...  [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","1339  [姜, 哲, 中, ：, 公, 共, 之, 敌, 1, -, 1, 》, 、, 《, 神, ...  [6, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16...\n","1340  [目, 前, ，, 日, 本, 松, 山, 海, 上, 保, 安, 部, 正, 在, 就, ...  [0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15, 15, 0...\n","1341  [也, 就, 是, 说, 英, 国, 人, 在, 世, 博, 会, 上, 的, 英, 国, ...  [0, 0, 0, 0, 0, 0, 0, 0, 10, 20, 20, 0, 0, 0, ...\n","1342  [另, 外, 意, 大, 利, 的, P, l, a, y, G, e, n, e, r, ...  [0, 0, 0, 0, 0, 0, 2, 12, 12, 12, 12, 12, 12, ...\n","\n","[1343 rows x 2 columns]"]},"metadata":{},"execution_count":6}]},{"cell_type":"markdown","source":["###word_ids可以将每一个subtokens位置对应一个word的下标。并且特殊字符对应了None。有了这个list，我们就能将subtokens和words还有标注的labels对齐啦，并将[cls]和[sep]位置的标签用-100填充。-100经过softmax会被忽略。"],"metadata":{"id":"V5V3sxi-htk4"}},{"cell_type":"code","source":["#这部分是word_ids的测试内容，不在主代码中\n","import transformers\n","assert isinstance(tokenizer, transformers.PreTrainedTokenizerFast)\n","example=['Germany',\"'s\",'representative','to','the','European','Union',\"'s\",'veterinary','committee','Werner','Zwingmann','said','on',\n","    'Wednesday','consumers','should','buy','sheepmeat','from','countries','other','than','Britain','until','the','scientific','advice','was','clearer','.']\n","tokenized_input=tokenizer(example[\"tokens\"],is_split_into_words=True)\n","tokens = tokenizer.convert_ids_to_tokens(tokenized_input[\"input_ids\"])\n","print(tokens)\n","\"\"\"\n","['[CLS]','ge','##rman','##y',\"'\",'s','re','##pr','##ese','##nt','##at','##ive','to','the','eur','##ope','##an','union' \"'\",'s','ve','##ter','##ina',\n","'##ry','com','##mit','##tee','we','##rner','z','##wing','##mann','said','on','wed','##nes','##day','con','##su','##mer','##s','sh','##ould','buy','she',\n","'##ep','##me','##at','from','count','##ries','other','than','br','##ita','##in','unt','##il','the','sci','##ent','##if','##ic','ad','##vice','was','cl','##ear','##er','.','[SEP]']\n","#is_split_into_words=True可以将字符串列表分词\n","\"\"\"\n","print(tokenized_input.word_ids())\n","#[None, 0, 0, 0, 1, 1, 2, 2, 2, 2, 2, 2, 3, 4, 5, 5, 5, 6, 7, 7, 8, 8, 8, 8, 9, 9, 9, 10, 10, 11, 11, 11, 12, 13, 14, 14, 14, 15, 15, \n","15, 15, 16, 16, 17, 18, 18, 18, 18, 19, 20, 20, 21, 22, 23, 23, 23, 24, 24, 25, 26, 26, 26, 26, 27, 27, 28, 29, 29, 29, 30, None]"],"metadata":{"id":"P_9382h4hpAO"},"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"vc0BSBLIIrJQ"},"source":["\"\"\"\n","将word_ids值为none的部分,即特殊符号[cls]和[sep]位置的标签转化为-100。\n","我们有两种对齐label的方式：\n","\n","1.label_all_tokens=True，多个subtokens对齐一个word，对齐一个label\n","2.label_all_tokens=False，多个subtokens的第一个subtoken对齐word，对齐一个label，其他subtokens直接赋予-100.\n","\"\"\"\n","label_all_tokens=True\n","def tokenize_and_align_labels(examples):\n","  tokenized_inputs=tokenizer(examples[\"words\"],truncation=True,is_split_into_words=True)#数据分词\n","\n","  pad_labels = []#创建labels列表\n","  for i,label in enumerate(examples['labels0']):\n","    word_ids=tokenized_inputs.word_ids(batch_index=i)#取出索引i的编码数据的word_ids属性\n","    previous_word_idx=None\n","    label_ids=[]\n","    for word_idx in word_ids:\n","      # 特殊标记的单词word_ids为None。将标签设置为-100，以便它们自动在损失函数中被忽略。\n","      if word_idx is None:\n","        label_ids.append(-100)\n","      # 我们为每个单词的第一个标记设置标签。（这里一个单词多个subword的word_idx只有一个数）\n","      elif word_idx != previous_word_idx:\n","        label_ids.append(label[word_idx])\n","      # 对于单词中的其他标记，我们将标签设置为当前标签或-100，具体取决于label_all_tokens标志。\n","      else:\n","        label_ids.append(label[word_idx] if label_all_tokens else -100)\n","      #label_all_tokens = True时，其它子词添加和第一个子词一样的标签，否则全部设为-100\n","      previous_word_idx = word_idx\n","\n","    pad_labels.append(label_ids)\n","\n","  tokenized_inputs[\"labels\"] = pad_labels\n","  return tokenized_inputs\n","\n","#如果是中文分词，只需要使用word_ids去除特殊单词就行，比如对应标签设置为-100。"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"cbs_SgaUP19o"},"source":["from datasets import Dataset\n","from transformers import AutoTokenizer\n","#这里一定要选AutoTokenizer，如果是BertTokenizer，会提示bertbase没有word_ids方法。结果没用到\n","\n","trains_ds=Dataset.from_pandas(train_df)\n","val_ds=Dataset.from_pandas(val_df)\n","test_ds=Dataset.from_pandas(test_df)\n","\n","tokenizer=AutoTokenizer.from_pretrained(config.roberta_model,do_lower_case=True)\n","\n","\n","tokenized_trains_ds=trains_ds.map(tokenize_and_align_labels,batched=True)\n","tokenized_val_ds=val_ds.map(tokenize_and_align_labels,batched=True)\n","tokenized_test_ds=test_ds.map(tokenize_and_align_labels,batched=True)"],"execution_count":null,"outputs":[]},{"cell_type":"code","source":["print(tokenized_trains_ds[0])"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"kOdS_0RCgjWi","executionInfo":{"status":"ok","timestamp":1639988114497,"user_tz":-480,"elapsed":53,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"e7ef78e8-2551-4e37-9afc-c9cfe5efc86c"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["{'__index_level_0__': 856, 'attention_mask': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], 'input_ids': [101, 3173, 1290, 5381, 3341, 3975, 8038, 5042, 3694, 1062, 2357, 121, 130, 2399, 686, 4518, 1092, 1213, 2961, 1399, 5401, 3791, 915, 704, 5739, 1146, 1154, 1184, 758, 102], 'labels': [-100, 3, 13, 13, 0, 0, 0, 2, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -100], 'labels0': [3, 13, 13, 0, 0, 0, 2, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 'token_type_ids': [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 'words': ['新', '华', '网', '来', '源', '：', '简', '氏', '公', '布', '0', '9', '年', '世', '界', '军', '力', '排', '名', '美', '法', '俄', '中', '英', '分', '列', '前', '五']}\n"]}]},{"cell_type":"code","metadata":{"id":"RcKd7JbCIu-b"},"source":["#加载模型\n","import torch\n","from transformers import AutoModelForTokenClassification\n","\n","model = AutoModelForTokenClassification.from_pretrained('hfl/chinese-roberta-wwm-ext-large',num_labels=31)\n","\n","device=torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n","model.to(device)"],"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":["###解读BertForTokenClassification任务头（说明代码，不需要运行）\n","###计算loss时忽略padding部分（即只算attention_mask==1部分）步骤为：\n"],"metadata":{"id":"xz7S6pYrnMhN"}},{"cell_type":"code","source":["sequence_output=outputs[0]#bert输出取第一维，即每个tokens的隐向量   \n","sequence_output=self.dropout(sequence_output)\n","logits=self.classifier(sequence_output)#经过线性变换，从torch.Size([3,52,1024])转为torch.Size([3,52,31])\n","loss_fct = CrossEntropyLoss()#交叉熵损失函数，自带softmax\n","\n","if attention_mask is not None:\n","  #1.先取出mask矩阵压缩为一维，attention_mask==1转为一维真假矩阵。\n","  active_loss=attention_mask.view(-1)==1#torch.Size([156])，只有156个有效tokens。\n","\n","  #2.labels压缩，再创建一个同形状的loss ignore_index矩阵\n","  active_logits=logits.view(-1,self.num_labels)#logits变成一维。即由torch.Size([3,52,31])变成torch.Size([156,31])\n","\n","  #3.torch.where取出labels对应mask==1的部分，其余部分为loss忽略索引。即labels为（mask==1和忽略部分）\n","  active_labels=torch.where(active_loss,labels.view(-1),\n","              torch.tensor(loss_fct.ignore_index).type_as(labels))#torch.Size([156,31])，labels含有ignore_index。\n","  \"\"\"torch.tensor(loss_fct.ignore_index).type_as(labels)就是一个全部为loss忽视索引，形状和labels一样的矩阵。\n","    torch.where的用法就是满足参数1的条件active_loss，就从参数2矩阵取值，否则从参数3矩阵取值\"\"\"\n","  #4.计算logtis和active_labels的loss。\n","  loss=loss_fct(active_logits,active_labels)\n","else:\n","  loss=loss_fct(logits.view(-1,self.num_labels),labels.view(-1))\n","\n","\"\"\"然而，只有pad部分attention_mask==0,句子首尾部分的cls和sep还是计算loss的。本身这样处理还留有cls和sep。\"\"\""],"metadata":{"id":"VSKKB3SUnD4-"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["label2id = {\n","    \"O\": 0,\n","    \"B-address\": 1,\n","    \"B-book\": 2,\n","    \"B-company\": 3,\n","    'B-game': 4,\n","    'B-government': 5,\n","    'B-movie': 6,\n","    'B-name': 7,\n","    'B-organization': 8,\n","    'B-position': 9,\n","    'B-scene': 10,\n","    \"I-address\": 11,\n","    \"I-book\": 12,\n","    \"I-company\": 13,\n","    'I-game': 14,\n","    'I-government': 15,\n","    'I-movie': 16,\n","    'I-name': 17,\n","    'I-organization': 18,\n","    'I-position': 19,\n","    'I-scene': 20,\n","    \"S-address\": 21,\n","    \"S-book\": 22,\n","    \"S-company\": 23,\n","    'S-game': 24,\n","    'S-government': 25,\n","    'S-movie': 26,\n","    'S-name': 27,\n","    'S-organization': 28,\n","    'S-position': 29,\n","    'S-scene': 30\n","}\n","\n","label_list= [label for label,id in list(label2id.items())]\n","label_list"],"metadata":{"id":"GyccSGUSwvLK"},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":["## 设置seqeval评测方法，需要注意以下几点\n","\n","- 选择预测分类最大概率的下标\n","- 将数字下标转化为BIOS格式的label，因为seqeval除了总的指标，还可以查看各个类别的指标。如果只是数字，运行时会有异常提示（但正常运行）\n","- 忽略-100所在地方，即特殊tokens的位置\n","- 这步和token分类任务头合起来，就将pad部位和特殊tokens部分都忽略loss计算了。"],"metadata":{"id":"CjWDhXlUx35X"}},{"cell_type":"code","source":["from datasets import load_metric\n","metric=load_metric(\"seqeval\")\n","import numpy as np\n","\n","def compute_metrics(p):\n","  predictions,labels = p\n","  predictions = np.argmax(predictions,axis=2)\n","\n","  # 去掉特殊字符处的值，不作比较。将label由数字转为ner标签31类。\n","  true_predictions = [\n","    [label_list[p] for (p, l) in zip(prediction, label) if l != -100]\n","    for prediction, label in zip(predictions, labels)\n","  ]\n","  true_labels = [\n","    [label_list[l] for (p, l) in zip(prediction, label) if l != -100]\n","    for prediction, label in zip(predictions, labels)\n","  ]\n","\n","  results = metric.compute(predictions=true_predictions,references=true_labels)\n","  return {\n","      \"precision\": results[\"overall_precision\"],\n","      \"recall\": results[\"overall_recall\"],\n","      \"f1\": results[\"overall_f1\"],\n","      \"accuracy\": results[\"overall_accuracy\"],\n","  }"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":49,"referenced_widgets":["259f728994f44848b5139ab76461bab0","cab2c7e1181743adb5bed504506f054d","dfa5111c0f4a4c7da819c67cbeedeff8","ed456021f67549f1bbee77a8d7ec0979","8443489235424da58a2250761773d08a","650f48df35704899834646bb7a61297f","bd9a6eeb9dec4b1cba6f79981e324668","814c210354434bebbf8be814fed57744","6bd5161987bf47878f704b498e118f0d","d2c4caa541de4268b79738eaea2c365a","73a1d734b6164fcd92f19fd1382c0b12"]},"id":"1PAU6Y2IwBvS","executionInfo":{"status":"ok","timestamp":1639988209758,"user_tz":-480,"elapsed":1019,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"1b0b06b7-76f6-4fe6-9a60-9d2bdd3b54fa"},"execution_count":null,"outputs":[{"output_type":"display_data","data":{"application/vnd.jupyter.widget-view+json":{"model_id":"259f728994f44848b5139ab76461bab0","version_minor":0,"version_major":2},"text/plain":["Downloading:   0%|          | 0.00/2.48k [00:00<?, ?B/s]"]},"metadata":{}}]},{"cell_type":"code","source":["batch_size=32\n","metric_name=\"f1\"\n","#数据整理器,将接收到的输入及标签进行动态填充。估计是不填充的话labels不齐，无法输入模型\n","from transformers import DataCollatorForTokenClassification\n","data_collator = DataCollatorForTokenClassification(tokenizer)\n","\n","from transformers import TrainingArguments,Trainer\n","args=TrainingArguments(\n","  \"bert_softmax\",\n","  evaluation_strategy=\"epoch\",\n","  #save_strategy=\"epoch\",\n","  learning_rate=2e-5,\n","  per_device_train_batch_size=batch_size,\n","  per_device_eval_batch_size=batch_size,\n","  num_train_epochs=8,\n","  weight_decay=0.01,\n","  metric_for_best_model=metric_name#只是调用最好的模型,)\n","\n","trainer=Trainer(model,args,\n","  train_dataset=tokenized_trains_ds,\n","  eval_dataset=tokenized_val_ds,\n","  data_collator=data_collator,\n","  tokenizer=tokenizer,\n","  compute_metrics=compute_metrics)"],"metadata":{"id":"jqQIhRuAjBN3"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["#进行训练\n","trainer.train()"],"metadata":{"id":"P8Im2PLNIIcV","colab":{"base_uri":"https://localhost:8080/","height":1000},"executionInfo":{"status":"ok","timestamp":1639993880610,"user_tz":-480,"elapsed":5645764,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"6fce6b97-c8b7-481a-f95d-7a4029f643a4"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":["The following columns in the training set  don't have a corresponding argument in `BertForTokenClassification.forward` and have been ignored: words, labels0, __index_level_0__.\n","***** Running training *****\n","  Num examples = 10748\n","  Num Epochs = 8\n","  Instantaneous batch size per device = 32\n","  Total train batch size (w. parallel, distributed & accumulation) = 32\n","  Gradient Accumulation steps = 1\n","  Total optimization steps = 2688\n"]},{"output_type":"display_data","data":{"text/html":["\n","    <div>\n","      \n","      <progress value='2688' max='2688' style='width:300px; height:20px; vertical-align: middle;'></progress>\n","      [2688/2688 1:34:02, Epoch 8/8]\n","    </div>\n","    <table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: left;\">\n","      <th>Epoch</th>\n","      <th>Training Loss</th>\n","      <th>Validation Loss</th>\n","      <th>Precision</th>\n","      <th>Recall</th>\n","      <th>F1</th>\n","      <th>Accuracy</th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <td>1</td>\n","      <td>No log</td>\n","      <td>0.205783</td>\n","      <td>0.684743</td>\n","      <td>0.793294</td>\n","      <td>0.735032</td>\n","      <td>0.938082</td>\n","    </tr>\n","    <tr>\n","      <td>2</td>\n","      <td>0.302400</td>\n","      <td>0.205716</td>\n","      <td>0.716806</td>\n","      <td>0.806641</td>\n","      <td>0.759075</td>\n","      <td>0.939276</td>\n","    </tr>\n","    <tr>\n","      <td>3</td>\n","      <td>0.117300</td>\n","      <td>0.213287</td>\n","      <td>0.736779</td>\n","      <td>0.798177</td>\n","      <td>0.766250</td>\n","      <td>0.941265</td>\n","    </tr>\n","    <tr>\n","      <td>4</td>\n","      <td>0.117300</td>\n","      <td>0.244457</td>\n","      <td>0.735330</td>\n","      <td>0.791341</td>\n","      <td>0.762308</td>\n","      <td>0.939952</td>\n","    </tr>\n","    <tr>\n","      <td>5</td>\n","      <td>0.056000</td>\n","      <td>0.275058</td>\n","      <td>0.743161</td>\n","      <td>0.795898</td>\n","      <td>0.768626</td>\n","      <td>0.941146</td>\n","    </tr>\n","    <tr>\n","      <td>6</td>\n","      <td>0.031100</td>\n","      <td>0.302491</td>\n","      <td>0.738582</td>\n","      <td>0.800130</td>\n","      <td>0.768125</td>\n","      <td>0.941663</td>\n","    </tr>\n","    <tr>\n","      <td>7</td>\n","      <td>0.031100</td>\n","      <td>0.326065</td>\n","      <td>0.739182</td>\n","      <td>0.806315</td>\n","      <td>0.771291</td>\n","      <td>0.942957</td>\n","    </tr>\n","    <tr>\n","      <td>8</td>\n","      <td>0.015800</td>\n","      <td>0.336456</td>\n","      <td>0.741374</td>\n","      <td>0.804362</td>\n","      <td>0.771585</td>\n","      <td>0.941882</td>\n","    </tr>\n","  </tbody>\n","</table><p>"],"text/plain":["<IPython.core.display.HTML object>"]},"metadata":{}},{"output_type":"stream","name":"stderr","text":["The following columns in the evaluation set  don't have a corresponding argument in `BertForTokenClassification.forward` and have been ignored: words, labels0.\n","***** Running Evaluation *****\n","  Num examples = 1343\n","  Batch size = 32\n","Saving model checkpoint to bert_softmax/checkpoint-336\n","Configuration saved in bert_softmax/checkpoint-336/config.json\n","Model weights saved in bert_softmax/checkpoint-336/pytorch_model.bin\n","tokenizer config file saved in bert_softmax/checkpoint-336/tokenizer_config.json\n","Special tokens file saved in bert_softmax/checkpoint-336/special_tokens_map.json\n","The following columns in the evaluation set  don't have a corresponding argument in `BertForTokenClassification.forward` and have been ignored: words, labels0.\n","***** Running Evaluation *****\n","  Num examples = 1343\n","  Batch size = 32\n","Saving model checkpoint to bert_softmax/checkpoint-672\n","Configuration saved in bert_softmax/checkpoint-672/config.json\n","Model weights saved in bert_softmax/checkpoint-672/pytorch_model.bin\n","tokenizer config file saved in bert_softmax/checkpoint-672/tokenizer_config.json\n","Special tokens file saved in bert_softmax/checkpoint-672/special_tokens_map.json\n","The following columns in the evaluation set  don't have a corresponding argument in `BertForTokenClassification.forward` and have been ignored: words, labels0.\n","***** Running Evaluation *****\n","  Num examples = 1343\n","  Batch size = 32\n","Saving model checkpoint to bert_softmax/checkpoint-1008\n","Configuration saved in bert_softmax/checkpoint-1008/config.json\n","Model weights saved in bert_softmax/checkpoint-1008/pytorch_model.bin\n","tokenizer config file saved in bert_softmax/checkpoint-1008/tokenizer_config.json\n","Special tokens file saved in bert_softmax/checkpoint-1008/special_tokens_map.json\n","The following columns in the evaluation set  don't have a corresponding argument in `BertForTokenClassification.forward` and have been ignored: words, labels0.\n","***** Running Evaluation *****\n","  Num examples = 1343\n","  Batch size = 32\n","Saving model checkpoint to bert_softmax/checkpoint-1344\n","Configuration saved in bert_softmax/checkpoint-1344/config.json\n","Model weights saved in bert_softmax/checkpoint-1344/pytorch_model.bin\n","tokenizer config file saved in bert_softmax/checkpoint-1344/tokenizer_config.json\n","Special tokens file saved in bert_softmax/checkpoint-1344/special_tokens_map.json\n","The following columns in the evaluation set  don't have a corresponding argument in `BertForTokenClassification.forward` and have been ignored: words, labels0.\n","***** Running Evaluation *****\n","  Num examples = 1343\n","  Batch size = 32\n","Saving model checkpoint to bert_softmax/checkpoint-1680\n","Configuration saved in bert_softmax/checkpoint-1680/config.json\n","Model weights saved in bert_softmax/checkpoint-1680/pytorch_model.bin\n","tokenizer config file saved in bert_softmax/checkpoint-1680/tokenizer_config.json\n","Special tokens file saved in bert_softmax/checkpoint-1680/special_tokens_map.json\n","The following columns in the evaluation set  don't have a corresponding argument in `BertForTokenClassification.forward` and have been ignored: words, labels0.\n","***** Running Evaluation *****\n","  Num examples = 1343\n","  Batch size = 32\n","Saving model checkpoint to bert_softmax/checkpoint-2016\n","Configuration saved in bert_softmax/checkpoint-2016/config.json\n","Model weights saved in bert_softmax/checkpoint-2016/pytorch_model.bin\n","tokenizer config file saved in bert_softmax/checkpoint-2016/tokenizer_config.json\n","Special tokens file saved in bert_softmax/checkpoint-2016/special_tokens_map.json\n","The following columns in the evaluation set  don't have a corresponding argument in `BertForTokenClassification.forward` and have been ignored: words, labels0.\n","***** Running Evaluation *****\n","  Num examples = 1343\n","  Batch size = 32\n","Saving model checkpoint to bert_softmax/checkpoint-2352\n","Configuration saved in bert_softmax/checkpoint-2352/config.json\n","Model weights saved in bert_softmax/checkpoint-2352/pytorch_model.bin\n","tokenizer config file saved in bert_softmax/checkpoint-2352/tokenizer_config.json\n","Special tokens file saved in bert_softmax/checkpoint-2352/special_tokens_map.json\n","The following columns in the evaluation set  don't have a corresponding argument in `BertForTokenClassification.forward` and have been ignored: words, labels0.\n","***** Running Evaluation *****\n","  Num examples = 1343\n","  Batch size = 32\n","Saving model checkpoint to bert_softmax/checkpoint-2688\n","Configuration saved in bert_softmax/checkpoint-2688/config.json\n","Model weights saved in bert_softmax/checkpoint-2688/pytorch_model.bin\n","tokenizer config file saved in bert_softmax/checkpoint-2688/tokenizer_config.json\n","Special tokens file saved in bert_softmax/checkpoint-2688/special_tokens_map.json\n","\n","\n","Training completed. Do not forget to share your model on huggingface.co/models =)\n","\n","\n"]},{"output_type":"execute_result","data":{"text/plain":["TrainOutput(global_step=2688, training_loss=0.09796489925966376, metrics={'train_runtime': 5645.1208, 'train_samples_per_second': 15.232, 'train_steps_per_second': 0.476, 'total_flos': 8072824637823936.0, 'train_loss': 0.09796489925966376, 'epoch': 8.0})"]},"metadata":{},"execution_count":16}]},{"cell_type":"markdown","source":["如果想要得到单个类别的precision/recall/f1，我们直接将结果输入相同的评估函数即可："],"metadata":{"id":"CohszA84z0Qj"}},{"cell_type":"code","source":["#进行评估\n","trainer.evaluate()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"LaA39rZh0J2U","executionInfo":{"status":"ok","timestamp":1639994141892,"user_tz":-480,"elapsed":7799,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"4c0f8e0f-01c4-4c60-a183-854f5917255c"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":["Saving model checkpoint to ./checkpoint-2688/\n","Configuration saved in ./checkpoint-2688/config.json\n","Model weights saved in ./checkpoint-2688/pytorch_model.bin\n","tokenizer config file saved in ./checkpoint-2688/tokenizer_config.json\n","Special tokens file saved in ./checkpoint-2688/special_tokens_map.json\n"]}]},{"cell_type":"code","source":["import torch\n","torch.save(model.state_dict(),\"./bert_softmax/bert_lstm_softmax_model\")"],"metadata":{"id":"ZgE6ktJURUze"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["predictions,labels,loss=trainer.predict(tokenized_val_ds)\n","predictions=np.argmax(predictions,axis=2)\n","\n","# Remove ignored index (special tokens)\n","true_predictions = [\n","    [label_list[p] for (p,l) in zip(prediction, label) if l != -100]\n","    for prediction, label in zip(predictions, labels)\n","]\n","true_labels = [\n","    [label_list[l] for (p, l) in zip(prediction, label) if l != -100]\n","    for prediction, label in zip(predictions, labels)\n","]\n","\n","results=metric.compute(predictions=true_predictions,references=true_labels)\n","results"],"metadata":{"id":"UIKUR5-dz2Hs"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["#将结果排序查看\n","result_df=pd.DataFrame(results)\n","result_df.stack().unstack(0).sort_values(by=['f1'])"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":488},"id":"mDVlPD2baPXr","executionInfo":{"status":"ok","timestamp":1639996964607,"user_tz":-480,"elapsed":1254,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"12398920-e803-4870-f79f-873d8835f236"},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/html":["\n","  <div id=\"df-99b1b09b-aed8-4ffa-8e05-43b4b39704bf\">\n","    <div class=\"colab-df-container\">\n","      <div>\n","<style scoped>\n","    .dataframe tbody tr th:only-of-type {\n","        vertical-align: middle;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: right;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>precision</th>\n","      <th>recall</th>\n","      <th>f1</th>\n","      <th>number</th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>address</th>\n","      <td>0.556627</td>\n","      <td>0.619303</td>\n","      <td>0.586294</td>\n","      <td>373.000000</td>\n","    </tr>\n","    <tr>\n","      <th>scene</th>\n","      <td>0.684211</td>\n","      <td>0.746411</td>\n","      <td>0.713959</td>\n","      <td>209.000000</td>\n","    </tr>\n","    <tr>\n","      <th>overall_precision</th>\n","      <td>0.741374</td>\n","      <td>0.741374</td>\n","      <td>0.741374</td>\n","      <td>0.741374</td>\n","    </tr>\n","    <tr>\n","      <th>organization</th>\n","      <td>0.713592</td>\n","      <td>0.801090</td>\n","      <td>0.754814</td>\n","      <td>367.000000</td>\n","    </tr>\n","    <tr>\n","      <th>book</th>\n","      <td>0.743902</td>\n","      <td>0.792208</td>\n","      <td>0.767296</td>\n","      <td>154.000000</td>\n","    </tr>\n","    <tr>\n","      <th>overall_f1</th>\n","      <td>0.771585</td>\n","      <td>0.771585</td>\n","      <td>0.771585</td>\n","      <td>0.771585</td>\n","    </tr>\n","    <tr>\n","      <th>position</th>\n","      <td>0.753813</td>\n","      <td>0.799076</td>\n","      <td>0.775785</td>\n","      <td>433.000000</td>\n","    </tr>\n","    <tr>\n","      <th>company</th>\n","      <td>0.752427</td>\n","      <td>0.820106</td>\n","      <td>0.784810</td>\n","      <td>378.000000</td>\n","    </tr>\n","    <tr>\n","      <th>government</th>\n","      <td>0.738516</td>\n","      <td>0.846154</td>\n","      <td>0.788679</td>\n","      <td>247.000000</td>\n","    </tr>\n","    <tr>\n","      <th>overall_recall</th>\n","      <td>0.804362</td>\n","      <td>0.804362</td>\n","      <td>0.804362</td>\n","      <td>0.804362</td>\n","    </tr>\n","    <tr>\n","      <th>game</th>\n","      <td>0.808050</td>\n","      <td>0.884746</td>\n","      <td>0.844660</td>\n","      <td>295.000000</td>\n","    </tr>\n","    <tr>\n","      <th>movie</th>\n","      <td>0.858108</td>\n","      <td>0.841060</td>\n","      <td>0.849498</td>\n","      <td>151.000000</td>\n","    </tr>\n","    <tr>\n","      <th>name</th>\n","      <td>0.848671</td>\n","      <td>0.892473</td>\n","      <td>0.870021</td>\n","      <td>465.000000</td>\n","    </tr>\n","    <tr>\n","      <th>overall_accuracy</th>\n","      <td>0.941882</td>\n","      <td>0.941882</td>\n","      <td>0.941882</td>\n","      <td>0.941882</td>\n","    </tr>\n","  </tbody>\n","</table>\n","</div>\n","      <button class=\"colab-df-convert\" onclick=\"convertToInteractive('df-99b1b09b-aed8-4ffa-8e05-43b4b39704bf')\"\n","              title=\"Convert this dataframe to an interactive table.\"\n","              style=\"display:none;\">\n","        \n","  <svg xmlns=\"http://www.w3.org/2000/svg\" height=\"24px\"viewBox=\"0 0 24 24\"\n","       width=\"24px\">\n","    <path d=\"M0 0h24v24H0V0z\" fill=\"none\"/>\n","    <path d=\"M18.56 5.44l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94zm-11 1L8.5 8.5l.94-2.06 2.06-.94-2.06-.94L8.5 2.5l-.94 2.06-2.06.94zm10 10l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94z\"/><path d=\"M17.41 7.96l-1.37-1.37c-.4-.4-.92-.59-1.43-.59-.52 0-1.04.2-1.43.59L10.3 9.45l-7.72 7.72c-.78.78-.78 2.05 0 2.83L4 21.41c.39.39.9.59 1.41.59.51 0 1.02-.2 1.41-.59l7.78-7.78 2.81-2.81c.8-.78.8-2.07 0-2.86zM5.41 20L4 18.59l7.72-7.72 1.47 1.35L5.41 20z\"/>\n","  </svg>\n","      </button>\n","      \n","  <style>\n","    .colab-df-container {\n","      display:flex;\n","      flex-wrap:wrap;\n","      gap: 12px;\n","    }\n","\n","    .colab-df-convert {\n","      background-color: #E8F0FE;\n","      border: none;\n","      border-radius: 50%;\n","      cursor: pointer;\n","      display: none;\n","      fill: #1967D2;\n","      height: 32px;\n","      padding: 0 0 0 0;\n","      width: 32px;\n","    }\n","\n","    .colab-df-convert:hover {\n","      background-color: #E2EBFA;\n","      box-shadow: 0px 1px 2px rgba(60, 64, 67, 0.3), 0px 1px 3px 1px rgba(60, 64, 67, 0.15);\n","      fill: #174EA6;\n","    }\n","\n","    [theme=dark] .colab-df-convert {\n","      background-color: #3B4455;\n","      fill: #D2E3FC;\n","    }\n","\n","    [theme=dark] .colab-df-convert:hover {\n","      background-color: #434B5C;\n","      box-shadow: 0px 1px 3px 1px rgba(0, 0, 0, 0.15);\n","      filter: drop-shadow(0px 1px 2px rgba(0, 0, 0, 0.3));\n","      fill: #FFFFFF;\n","    }\n","  </style>\n","\n","      <script>\n","        const buttonEl =\n","          document.querySelector('#df-99b1b09b-aed8-4ffa-8e05-43b4b39704bf button.colab-df-convert');\n","        buttonEl.style.display =\n","          google.colab.kernel.accessAllowed ? 'block' : 'none';\n","\n","        async function convertToInteractive(key) {\n","          const element = document.querySelector('#df-99b1b09b-aed8-4ffa-8e05-43b4b39704bf');\n","          const dataTable =\n","            await google.colab.kernel.invokeFunction('convertToInteractive',\n","                                                     [key], {});\n","          if (!dataTable) return;\n","\n","          const docLinkHtml = 'Like what you see? Visit the ' +\n","            '<a target=\"_blank\" href=https://colab.research.google.com/notebooks/data_table.ipynb>data table notebook</a>'\n","            + ' to learn more about interactive tables.';\n","          element.innerHTML = '';\n","          dataTable['output_type'] = 'display_data';\n","          await google.colab.output.renderOutput(dataTable, element);\n","          const docLink = document.createElement('div');\n","          docLink.innerHTML = docLinkHtml;\n","          element.appendChild(docLink);\n","        }\n","      </script>\n","    </div>\n","  </div>\n","  "],"text/plain":["                   precision    recall        f1      number\n","address             0.556627  0.619303  0.586294  373.000000\n","scene               0.684211  0.746411  0.713959  209.000000\n","overall_precision   0.741374  0.741374  0.741374    0.741374\n","organization        0.713592  0.801090  0.754814  367.000000\n","book                0.743902  0.792208  0.767296  154.000000\n","overall_f1          0.771585  0.771585  0.771585    0.771585\n","position            0.753813  0.799076  0.775785  433.000000\n","company             0.752427  0.820106  0.784810  378.000000\n","government          0.738516  0.846154  0.788679  247.000000\n","overall_recall      0.804362  0.804362  0.804362    0.804362\n","game                0.808050  0.884746  0.844660  295.000000\n","movie               0.858108  0.841060  0.849498  151.000000\n","name                0.848671  0.892473  0.870021  465.000000\n","overall_accuracy    0.941882  0.941882  0.941882    0.941882"]},"metadata":{},"execution_count":61}]},{"cell_type":"code","source":["#预测验证集结果并对比标签\n","predictions,metrics,Loss=trainer.predict(tokenized_val_ds,metric_key_prefix=\"test\")\n","pred=np.argmax(predictions,axis=2)#生成的结果是二维数组，所以需要用下一行进行转换。\n","preds=[x for x in pred]\n","val_df['preds']=pd.Series(preds)\n","val_df.to_csv('./bert_softmax/val_1220.csv')\n","val_df"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":513},"id":"FnOFgbj81SPl","executionInfo":{"status":"ok","timestamp":1639995952462,"user_tz":-480,"elapsed":27279,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"db73b399-efb1-4192-e2c4-13e2af482d00"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":["The following columns in the test set  don't have a corresponding argument in `BertForTokenClassification.forward` and have been ignored: words, labels0.\n","***** Running Prediction *****\n","  Num examples = 1343\n","  Batch size = 32\n"]},{"output_type":"display_data","data":{"text/html":["\n","    <div>\n","      \n","      <progress value='212' max='42' style='width:300px; height:20px; vertical-align: middle;'></progress>\n","      [42/42 31:38]\n","    </div>\n","    "],"text/plain":["<IPython.core.display.HTML object>"]},"metadata":{}},{"output_type":"execute_result","data":{"text/html":["\n","  <div id=\"df-27a50086-ffda-40dc-a8a2-68519d2488e3\">\n","    <div class=\"colab-df-container\">\n","      <div>\n","<style scoped>\n","    .dataframe tbody tr th:only-of-type {\n","        vertical-align: middle;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: right;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>words</th>\n","      <th>labels0</th>\n","      <th>preds</th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>0</th>\n","      <td>[彭, 小, 军, 认, 为, ，, 国, 内, 银, 行, 现, 在, 走, 的, 是, ...</td>\n","      <td>[7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[0, 7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>1</th>\n","      <td>[温, 格, 的, 球, 队, 终, 于, 又, 踢, 了, 一, 场, 经, 典, 的, ...</td>\n","      <td>[7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...</td>\n","      <td>[0, 7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...</td>\n","    </tr>\n","    <tr>\n","      <th>2</th>\n","      <td>[突, 袭, 黑, 暗, 雅, 典, 娜, 》, 中, R, i, d, d, i, c, ...</td>\n","      <td>[4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17, 17, ...</td>\n","      <td>[0, 4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17, 1...</td>\n","    </tr>\n","    <tr>\n","      <th>3</th>\n","      <td>[郑, 阿, 姨, 就, 赶, 到, 文, 汇, 路, 排, 队, 拿, 钱, ，, 希, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>4</th>\n","      <td>[我, 想, 站, 在, 雪, 山, 脚, 下, 你, 会, 被, 那, 巍, 峨, 的, ...</td>\n","      <td>[0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[0, 0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>...</th>\n","      <td>...</td>\n","      <td>...</td>\n","      <td>...</td>\n","    </tr>\n","    <tr>\n","      <th>1338</th>\n","      <td>[在, 这, 个, 非, 常, 喜, 庆, 的, 日, 子, 里, ，, 我, 们, 首, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1339</th>\n","      <td>[姜, 哲, 中, ：, 公, 共, 之, 敌, 1, -, 1, 》, 、, 《, 神, ...</td>\n","      <td>[6, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16...</td>\n","      <td>[0, 7, 17, 17, 16, 16, 16, 16, 16, 16, 16, 16,...</td>\n","    </tr>\n","    <tr>\n","      <th>1340</th>\n","      <td>[目, 前, ，, 日, 本, 松, 山, 海, 上, 保, 安, 部, 正, 在, 就, ...</td>\n","      <td>[0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15, 15, 0...</td>\n","      <td>[0, 0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15, 15...</td>\n","    </tr>\n","    <tr>\n","      <th>1341</th>\n","      <td>[也, 就, 是, 说, 英, 国, 人, 在, 世, 博, 会, 上, 的, 英, 国, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 10, 20, 20, 0, 0, 0, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 18, 18, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>1342</th>\n","      <td>[另, 外, 意, 大, 利, 的, P, l, a, y, G, e, n, e, r, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 2, 12, 12, 12, 12, 12, 12, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 2, 12, 12, 12, 12, 12, 1...</td>\n","    </tr>\n","  </tbody>\n","</table>\n","<p>1343 rows × 3 columns</p>\n","</div>\n","      <button class=\"colab-df-convert\" onclick=\"convertToInteractive('df-27a50086-ffda-40dc-a8a2-68519d2488e3')\"\n","              title=\"Convert this dataframe to an interactive table.\"\n","              style=\"display:none;\">\n","        \n","  <svg xmlns=\"http://www.w3.org/2000/svg\" height=\"24px\"viewBox=\"0 0 24 24\"\n","       width=\"24px\">\n","    <path d=\"M0 0h24v24H0V0z\" fill=\"none\"/>\n","    <path d=\"M18.56 5.44l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94zm-11 1L8.5 8.5l.94-2.06 2.06-.94-2.06-.94L8.5 2.5l-.94 2.06-2.06.94zm10 10l.94 2.06.94-2.06 2.06-.94-2.06-.94-.94-2.06-.94 2.06-2.06.94z\"/><path d=\"M17.41 7.96l-1.37-1.37c-.4-.4-.92-.59-1.43-.59-.52 0-1.04.2-1.43.59L10.3 9.45l-7.72 7.72c-.78.78-.78 2.05 0 2.83L4 21.41c.39.39.9.59 1.41.59.51 0 1.02-.2 1.41-.59l7.78-7.78 2.81-2.81c.8-.78.8-2.07 0-2.86zM5.41 20L4 18.59l7.72-7.72 1.47 1.35L5.41 20z\"/>\n","  </svg>\n","      </button>\n","      \n","  <style>\n","    .colab-df-container {\n","      display:flex;\n","      flex-wrap:wrap;\n","      gap: 12px;\n","    }\n","\n","    .colab-df-convert {\n","      background-color: #E8F0FE;\n","      border: none;\n","      border-radius: 50%;\n","      cursor: pointer;\n","      display: none;\n","      fill: #1967D2;\n","      height: 32px;\n","      padding: 0 0 0 0;\n","      width: 32px;\n","    }\n","\n","    .colab-df-convert:hover {\n","      background-color: #E2EBFA;\n","      box-shadow: 0px 1px 2px rgba(60, 64, 67, 0.3), 0px 1px 3px 1px rgba(60, 64, 67, 0.15);\n","      fill: #174EA6;\n","    }\n","\n","    [theme=dark] .colab-df-convert {\n","      background-color: #3B4455;\n","      fill: #D2E3FC;\n","    }\n","\n","    [theme=dark] .colab-df-convert:hover {\n","      background-color: #434B5C;\n","      box-shadow: 0px 1px 3px 1px rgba(0, 0, 0, 0.15);\n","      filter: drop-shadow(0px 1px 2px rgba(0, 0, 0, 0.3));\n","      fill: #FFFFFF;\n","    }\n","  </style>\n","\n","      <script>\n","        const buttonEl =\n","          document.querySelector('#df-27a50086-ffda-40dc-a8a2-68519d2488e3 button.colab-df-convert');\n","        buttonEl.style.display =\n","          google.colab.kernel.accessAllowed ? 'block' : 'none';\n","\n","        async function convertToInteractive(key) {\n","          const element = document.querySelector('#df-27a50086-ffda-40dc-a8a2-68519d2488e3');\n","          const dataTable =\n","            await google.colab.kernel.invokeFunction('convertToInteractive',\n","                                                     [key], {});\n","          if (!dataTable) return;\n","\n","          const docLinkHtml = 'Like what you see? Visit the ' +\n","            '<a target=\"_blank\" href=https://colab.research.google.com/notebooks/data_table.ipynb>data table notebook</a>'\n","            + ' to learn more about interactive tables.';\n","          element.innerHTML = '';\n","          dataTable['output_type'] = 'display_data';\n","          await google.colab.output.renderOutput(dataTable, element);\n","          const docLink = document.createElement('div');\n","          docLink.innerHTML = docLinkHtml;\n","          element.appendChild(docLink);\n","        }\n","      </script>\n","    </div>\n","  </div>\n","  "],"text/plain":["                                                  words  ...                                              preds\n","0     [彭, 小, 军, 认, 为, ，, 国, 内, 银, 行, 现, 在, 走, 的, 是, ...  ...  [0, 7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...\n","1     [温, 格, 的, 球, 队, 终, 于, 又, 踢, 了, 一, 场, 经, 典, 的, ...  ...  [0, 7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...\n","2     [突, 袭, 黑, 暗, 雅, 典, 娜, 》, 中, R, i, d, d, i, c, ...  ...  [0, 4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17, 1...\n","3     [郑, 阿, 姨, 就, 赶, 到, 文, 汇, 路, 排, 队, 拿, 钱, ，, 希, ...  ...  [0, 0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0, 0...\n","4     [我, 想, 站, 在, 雪, 山, 脚, 下, 你, 会, 被, 那, 巍, 峨, 的, ...  ...  [0, 0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0...\n","...                                                 ...  ...                                                ...\n","1338  [在, 这, 个, 非, 常, 喜, 庆, 的, 日, 子, 里, ，, 我, 们, 首, ...  ...  [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","1339  [姜, 哲, 中, ：, 公, 共, 之, 敌, 1, -, 1, 》, 、, 《, 神, ...  ...  [0, 7, 17, 17, 16, 16, 16, 16, 16, 16, 16, 16,...\n","1340  [目, 前, ，, 日, 本, 松, 山, 海, 上, 保, 安, 部, 正, 在, 就, ...  ...  [0, 0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15, 15...\n","1341  [也, 就, 是, 说, 英, 国, 人, 在, 世, 博, 会, 上, 的, 英, 国, ...  ...  [0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 18, 18, 0, 0, 0...\n","1342  [另, 外, 意, 大, 利, 的, P, l, a, y, G, e, n, e, r, ...  ...  [0, 0, 0, 0, 0, 0, 0, 2, 12, 12, 12, 12, 12, 1...\n","\n","[1343 rows x 3 columns]"]},"metadata":{},"execution_count":47}]},{"cell_type":"code","source":["#测试预测的结果，这部分不需要运行。\n","#预测出来的结果二维数组，不能直接转为Series。如果直接装进DataFrame，每个词是一列，一共52列\n","import pandas as pd\n","df=pd.Series(a)\n","print(pred)\n","print(df)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"OBWDZH6XVrjh","executionInfo":{"status":"ok","timestamp":1639995748716,"user_tz":-480,"elapsed":469,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"102026e1-c933-4949-cf39-1748e3cbeaa7"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["[[ 0  8  0 ...  0  0  0]\n"," [ 0  5 15 ... 15 15  0]\n"," [ 0  0  0 ...  0 14  0]\n"," ...\n"," [ 0  0  0 ... 12  0 12]\n"," [ 0  0  0 ... 16  0  0]\n"," [ 0  8 18 ...  0  0  0]]\n","0       [29, 12, 1, 1, 42, 1, 1, 42, 1, 1, 23, 14, 4, ...\n","1       [28, 13, 17, 17, 1, 1, 17, 13, 17, 7, 13, 14, ...\n","2       [3, 3, 18, 6, 6, 6, 6, 5, 6, 6, 6, 4, 16, 16, ...\n","3       [5, 13, 22, 45, 39, 45, 10, 10, 24, 40, 10, 14...\n","4       [32, 20, 14, 20, 14, 20, 14, 14, 41, 33, 20, 2...\n","                              ...                        \n","1340    [28, 43, 12, 24, 3, 31, 4, 31, 31, 24, 43, 32,...\n","1341    [22, 7, 33, 3, 10, 10, 46, 33, 10, 10, 23, 8, ...\n","1342    [26, 39, 18, 18, 45, 40, 18, 14, 18, 3, 3, 44,...\n","1343    [2, 23, 46, 46, 46, 40, 46, 40, 40, 10, 46, 24...\n","1344    [32, 10, 41, 33, 41, 34, 41, 41, 1, 41, 33, 13...\n","Length: 1345, dtype: object\n"]}]},{"cell_type":"code","source":["#用trainer预测结果并保存\n","predictions,metrics,Loss=trainer.predict(tokenized_test_ds,metric_key_prefix=\"test\")\n","pred=np.argmax(predictions,axis=1)\n","preds=preds=[x for x in pred]\n","pd.DataFrame({'label':preds}).to_csv('./bert_softmax/submit1220.csv',index=None)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":196},"id":"aZdKm7Ci1D1G","executionInfo":{"status":"ok","timestamp":1639996101930,"user_tz":-480,"elapsed":26515,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"e56c4749-71d1-47e2-8353-33fe777a9362"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":["The following columns in the test set  don't have a corresponding argument in `BertForTokenClassification.forward` and have been ignored: words, labels0.\n","***** Running Prediction *****\n","  Num examples = 1345\n","  Batch size = 32\n"]},{"output_type":"display_data","data":{"text/html":["\n","    <div>\n","      \n","      <progress value='255' max='42' style='width:300px; height:20px; vertical-align: middle;'></progress>\n","      [42/42 34:08]\n","    </div>\n","    "],"text/plain":["<IPython.core.display.HTML object>"]},"metadata":{}},{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.7/dist-packages/seqeval/metrics/v1.py:57: UndefinedMetricWarning: Recall and F-score are ill-defined and being set to 0.0 in labels with no true samples. Use `zero_division` parameter to control this behavior.\n","  _warn_prf(average, modifier, msg_start, len(result))\n","/usr/local/lib/python3.7/dist-packages/seqeval/metrics/v1.py:57: UndefinedMetricWarning: Recall and F-score are ill-defined and being set to 0.0 due to no true samples. Use `zero_division` parameter to control this behavior.\n","  _warn_prf(average, modifier, msg_start, len(result))\n"]}]}]}