{"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"bert_lstm_ner.ipynb（softmax跑完后验证集标签对比）","provenance":[],"collapsed_sections":[],"authorship_tag":"ABX9TyM6jHkVSUSgquRfMd4qPPzk"},"kernelspec":{"name":"python3","display_name":"Python 3"},"language_info":{"name":"python"},"widgets":{"application/vnd.jupyter.widget-state+json":{"060477f278d14559ac73192c13d94830":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_view_name":"HBoxView","_dom_classes":[],"_model_name":"HBoxModel","_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.5.0","box_style":"","layout":"IPY_MODEL_789995afc14841f08eb9becc7c8d50c0","_model_module":"@jupyter-widgets/controls","children":["IPY_MODEL_61aaeee6b77047838bb3447cd44a4551","IPY_MODEL_2b31e8d8a1a044e099a14d3bd7c878dc","IPY_MODEL_d83ad38b5bcf4b188cf1d2a39dc90646"]}},"789995afc14841f08eb9becc7c8d50c0":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"61aaeee6b77047838bb3447cd44a4551":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_view_name":"HTMLView","style":"IPY_MODEL_e5a40b88103b46f19167c32a836ef8e6","_dom_classes":[],"description":"","_model_name":"HTMLModel","placeholder":"​","_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","value":"100%","_view_count":null,"_view_module_version":"1.5.0","description_tooltip":null,"_model_module":"@jupyter-widgets/controls","layout":"IPY_MODEL_fdaa9b00882e44888b918d99247d2d09"}},"2b31e8d8a1a044e099a14d3bd7c878dc":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_view_name":"ProgressView","style":"IPY_MODEL_8896700b212141598b98acc9945d75b0","_dom_classes":[],"description":"","_model_name":"FloatProgressModel","bar_style":"success","max":11,"_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","value":11,"_view_count":null,"_view_module_version":"1.5.0","orientation":"horizontal","min":0,"description_tooltip":null,"_model_module":"@jupyter-widgets/controls","layout":"IPY_MODEL_34882e1484424b3692487aa7e5f371ca"}},"d83ad38b5bcf4b188cf1d2a39dc90646":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_view_name":"HTMLView","style":"IPY_MODEL_c74381917cb144ac8592f4b8550e636c","_dom_classes":[],"description":"","_model_name":"HTMLModel","placeholder":"​","_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","value":" 11/11 [00:03&lt;00:00,  3.26ba/s]","_view_count":null,"_view_module_version":"1.5.0","description_tooltip":null,"_model_module":"@jupyter-widgets/controls","layout":"IPY_MODEL_26fca9c8d5d14debbe0096662ab84895"}},"e5a40b88103b46f19167c32a836ef8e6":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_view_name":"StyleView","_model_name":"DescriptionStyleModel","description_width":"","_view_module":"@jupyter-widgets/base","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.2.0","_model_module":"@jupyter-widgets/controls"}},"fdaa9b00882e44888b918d99247d2d09":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"8896700b212141598b98acc9945d75b0":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_view_name":"StyleView","_model_name":"ProgressStyleModel","description_width":"","_view_module":"@jupyter-widgets/base","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.2.0","bar_color":null,"_model_module":"@jupyter-widgets/controls"}},"34882e1484424b3692487aa7e5f371ca":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"c74381917cb144ac8592f4b8550e636c":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_view_name":"StyleView","_model_name":"DescriptionStyleModel","description_width":"","_view_module":"@jupyter-widgets/base","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.2.0","_model_module":"@jupyter-widgets/controls"}},"26fca9c8d5d14debbe0096662ab84895":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"f97f4cfd83174b67bd3f99068712e3b1":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_view_name":"HBoxView","_dom_classes":[],"_model_name":"HBoxModel","_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.5.0","box_style":"","layout":"IPY_MODEL_28a1d3a622d642479e2720913ba00324","_model_module":"@jupyter-widgets/controls","children":["IPY_MODEL_46b6af4256894c04b88f0ee7fe5c3409","IPY_MODEL_f49391466310463bac5f40450a694229","IPY_MODEL_58076410d6ba46bfb1aa59ef5de48e05"]}},"28a1d3a622d642479e2720913ba00324":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"46b6af4256894c04b88f0ee7fe5c3409":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_view_name":"HTMLView","style":"IPY_MODEL_a87ca4328d754b3593d369b457f5b191","_dom_classes":[],"description":"","_model_name":"HTMLModel","placeholder":"​","_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","value":"100%","_view_count":null,"_view_module_version":"1.5.0","description_tooltip":null,"_model_module":"@jupyter-widgets/controls","layout":"IPY_MODEL_f46c845d738d4e1295d4ea931fa0f940"}},"f49391466310463bac5f40450a694229":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_view_name":"ProgressView","style":"IPY_MODEL_e07dc3b23894474687cfa1a498c390a3","_dom_classes":[],"description":"","_model_name":"FloatProgressModel","bar_style":"success","max":2,"_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","value":2,"_view_count":null,"_view_module_version":"1.5.0","orientation":"horizontal","min":0,"description_tooltip":null,"_model_module":"@jupyter-widgets/controls","layout":"IPY_MODEL_b682b302c8234735b5a8092628d1b4f0"}},"58076410d6ba46bfb1aa59ef5de48e05":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_view_name":"HTMLView","style":"IPY_MODEL_434d34fee29f44ada791898ef454c3cc","_dom_classes":[],"description":"","_model_name":"HTMLModel","placeholder":"​","_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","value":" 2/2 [00:00&lt;00:00,  2.94ba/s]","_view_count":null,"_view_module_version":"1.5.0","description_tooltip":null,"_model_module":"@jupyter-widgets/controls","layout":"IPY_MODEL_29934ff8c9274f15868f56c5dd02d784"}},"a87ca4328d754b3593d369b457f5b191":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_view_name":"StyleView","_model_name":"DescriptionStyleModel","description_width":"","_view_module":"@jupyter-widgets/base","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.2.0","_model_module":"@jupyter-widgets/controls"}},"f46c845d738d4e1295d4ea931fa0f940":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"e07dc3b23894474687cfa1a498c390a3":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_view_name":"StyleView","_model_name":"ProgressStyleModel","description_width":"","_view_module":"@jupyter-widgets/base","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.2.0","bar_color":null,"_model_module":"@jupyter-widgets/controls"}},"b682b302c8234735b5a8092628d1b4f0":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"434d34fee29f44ada791898ef454c3cc":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_view_name":"StyleView","_model_name":"DescriptionStyleModel","description_width":"","_view_module":"@jupyter-widgets/base","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.2.0","_model_module":"@jupyter-widgets/controls"}},"29934ff8c9274f15868f56c5dd02d784":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"0a7ca74c031741b899da9a55a24d898c":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_view_name":"HBoxView","_dom_classes":[],"_model_name":"HBoxModel","_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.5.0","box_style":"","layout":"IPY_MODEL_cde89f5db7964500937217315cc4a91b","_model_module":"@jupyter-widgets/controls","children":["IPY_MODEL_7a4f40583aa64ca9a4be79ccf8486c78","IPY_MODEL_70b80e9daf344cde872765523478cdf8","IPY_MODEL_c2c2354f1c7c4fcca46d141bbab675de"]}},"cde89f5db7964500937217315cc4a91b":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"7a4f40583aa64ca9a4be79ccf8486c78":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_view_name":"HTMLView","style":"IPY_MODEL_07cc5ee285ca42a5b03ea8adcf39ce36","_dom_classes":[],"description":"","_model_name":"HTMLModel","placeholder":"​","_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","value":"100%","_view_count":null,"_view_module_version":"1.5.0","description_tooltip":null,"_model_module":"@jupyter-widgets/controls","layout":"IPY_MODEL_bc66539544e743d6987abd5992092fe4"}},"70b80e9daf344cde872765523478cdf8":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_view_name":"ProgressView","style":"IPY_MODEL_2339e85eda6b41d99b5fb56e35e6b457","_dom_classes":[],"description":"","_model_name":"FloatProgressModel","bar_style":"success","max":2,"_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","value":2,"_view_count":null,"_view_module_version":"1.5.0","orientation":"horizontal","min":0,"description_tooltip":null,"_model_module":"@jupyter-widgets/controls","layout":"IPY_MODEL_583893be1c834d888e0cdc6c5c921797"}},"c2c2354f1c7c4fcca46d141bbab675de":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_view_name":"HTMLView","style":"IPY_MODEL_8c631d8c333345759dfba55e906218b4","_dom_classes":[],"description":"","_model_name":"HTMLModel","placeholder":"​","_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","value":" 2/2 [00:00&lt;00:00,  4.79ba/s]","_view_count":null,"_view_module_version":"1.5.0","description_tooltip":null,"_model_module":"@jupyter-widgets/controls","layout":"IPY_MODEL_aa1dfac4ff7c48eb83642732a21d0236"}},"07cc5ee285ca42a5b03ea8adcf39ce36":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_view_name":"StyleView","_model_name":"DescriptionStyleModel","description_width":"","_view_module":"@jupyter-widgets/base","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.2.0","_model_module":"@jupyter-widgets/controls"}},"bc66539544e743d6987abd5992092fe4":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"2339e85eda6b41d99b5fb56e35e6b457":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_view_name":"StyleView","_model_name":"ProgressStyleModel","description_width":"","_view_module":"@jupyter-widgets/base","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.2.0","bar_color":null,"_model_module":"@jupyter-widgets/controls"}},"583893be1c834d888e0cdc6c5c921797":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"8c631d8c333345759dfba55e906218b4":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_view_name":"StyleView","_model_name":"DescriptionStyleModel","description_width":"","_view_module":"@jupyter-widgets/base","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.2.0","_model_module":"@jupyter-widgets/controls"}},"aa1dfac4ff7c48eb83642732a21d0236":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"90bca0d5a1984507ae466555009cd9cd":{"model_module":"@jupyter-widgets/controls","model_name":"HBoxModel","model_module_version":"1.5.0","state":{"_view_name":"HBoxView","_dom_classes":[],"_model_name":"HBoxModel","_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.5.0","box_style":"","layout":"IPY_MODEL_1aa5f030462444aaa0e069e147082896","_model_module":"@jupyter-widgets/controls","children":["IPY_MODEL_b14facba21e8409eb825c39c7a4eb748","IPY_MODEL_b13fe845c361424cb43923d96d61d2a0","IPY_MODEL_45c1030f722d49fea94a144bdff4fed2"]}},"1aa5f030462444aaa0e069e147082896":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"b14facba21e8409eb825c39c7a4eb748":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_view_name":"HTMLView","style":"IPY_MODEL_9262f7ca5d4f416d9270a0c9684ca626","_dom_classes":[],"description":"","_model_name":"HTMLModel","placeholder":"​","_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","value":" 36%","_view_count":null,"_view_module_version":"1.5.0","description_tooltip":null,"_model_module":"@jupyter-widgets/controls","layout":"IPY_MODEL_b5c02ff4852c424e848506318cbd6650"}},"b13fe845c361424cb43923d96d61d2a0":{"model_module":"@jupyter-widgets/controls","model_name":"FloatProgressModel","model_module_version":"1.5.0","state":{"_view_name":"ProgressView","style":"IPY_MODEL_beb4ecef7828437a90aef25c1faeac07","_dom_classes":[],"description":"","_model_name":"FloatProgressModel","bar_style":"","max":2860,"_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","value":1025,"_view_count":null,"_view_module_version":"1.5.0","orientation":"horizontal","min":0,"description_tooltip":null,"_model_module":"@jupyter-widgets/controls","layout":"IPY_MODEL_14ed44a3ab154c85b2ae7f29c394caca"}},"45c1030f722d49fea94a144bdff4fed2":{"model_module":"@jupyter-widgets/controls","model_name":"HTMLModel","model_module_version":"1.5.0","state":{"_view_name":"HTMLView","style":"IPY_MODEL_4804cb5834824d19840398b4e3cb38bf","_dom_classes":[],"description":"","_model_name":"HTMLModel","placeholder":"​","_view_module":"@jupyter-widgets/controls","_model_module_version":"1.5.0","value":" 1025/2860 [45:24&lt;1:16:55,  2.52s/it]","_view_count":null,"_view_module_version":"1.5.0","description_tooltip":null,"_model_module":"@jupyter-widgets/controls","layout":"IPY_MODEL_06fb9cce312948169ffaa37a92ff3a43"}},"9262f7ca5d4f416d9270a0c9684ca626":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_view_name":"StyleView","_model_name":"DescriptionStyleModel","description_width":"","_view_module":"@jupyter-widgets/base","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.2.0","_model_module":"@jupyter-widgets/controls"}},"b5c02ff4852c424e848506318cbd6650":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"beb4ecef7828437a90aef25c1faeac07":{"model_module":"@jupyter-widgets/controls","model_name":"ProgressStyleModel","model_module_version":"1.5.0","state":{"_view_name":"StyleView","_model_name":"ProgressStyleModel","description_width":"","_view_module":"@jupyter-widgets/base","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.2.0","bar_color":null,"_model_module":"@jupyter-widgets/controls"}},"14ed44a3ab154c85b2ae7f29c394caca":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}},"4804cb5834824d19840398b4e3cb38bf":{"model_module":"@jupyter-widgets/controls","model_name":"DescriptionStyleModel","model_module_version":"1.5.0","state":{"_view_name":"StyleView","_model_name":"DescriptionStyleModel","description_width":"","_view_module":"@jupyter-widgets/base","_model_module_version":"1.5.0","_view_count":null,"_view_module_version":"1.2.0","_model_module":"@jupyter-widgets/controls"}},"06fb9cce312948169ffaa37a92ff3a43":{"model_module":"@jupyter-widgets/base","model_name":"LayoutModel","model_module_version":"1.2.0","state":{"_view_name":"LayoutView","grid_template_rows":null,"right":null,"justify_content":null,"_view_module":"@jupyter-widgets/base","overflow":null,"_model_module_version":"1.2.0","_view_count":null,"flex_flow":null,"width":null,"min_width":null,"border":null,"align_items":null,"bottom":null,"_model_module":"@jupyter-widgets/base","top":null,"grid_column":null,"overflow_y":null,"overflow_x":null,"grid_auto_flow":null,"grid_area":null,"grid_template_columns":null,"flex":null,"_model_name":"LayoutModel","justify_items":null,"grid_row":null,"max_height":null,"align_content":null,"visibility":null,"align_self":null,"height":null,"min_height":null,"padding":null,"grid_auto_rows":null,"grid_gap":null,"max_width":null,"order":null,"_view_module_version":"1.2.0","grid_template_areas":null,"object_position":null,"object_fit":null,"grid_auto_columns":null,"margin":null,"display":null,"left":null}}}}},"cells":[{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"hw0bsFiqWXy6","executionInfo":{"status":"ok","timestamp":1639998615053,"user_tz":-480,"elapsed":17038,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"e819afac-28e8-4da5-cf78-7163e9e9ac06"},"source":["from google.colab import drive\n","drive.mount('/content/drive')"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Mounted at /content/drive\n"]}]},{"cell_type":"code","metadata":{"id":"e01TmJXPXERL"},"source":["import os\n","os.chdir('/content/drive/MyDrive/chinese task/CLUENER2020')"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"zalA7BN8XOEo","colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1639372398877,"user_tz":-480,"elapsed":4862,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"67b84f38-3220-4e54-949e-4503d1cc9374"},"source":["#安装\n","!pip install transformers datasets"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Requirement already satisfied: transformers in /usr/local/lib/python3.7/dist-packages (4.13.0)\n","Requirement already satisfied: datasets in /usr/local/lib/python3.7/dist-packages (1.16.1)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.7/dist-packages (from transformers) (21.3)\n","Requirement already satisfied: filelock in /usr/local/lib/python3.7/dist-packages (from transformers) (3.4.0)\n","Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.7/dist-packages (from transformers) (1.19.5)\n","Requirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.7/dist-packages (from transformers) (4.62.3)\n","Requirement already satisfied: sacremoses in /usr/local/lib/python3.7/dist-packages (from transformers) (0.0.46)\n","Requirement already satisfied: huggingface-hub<1.0,>=0.1.0 in /usr/local/lib/python3.7/dist-packages (from transformers) (0.2.1)\n","Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.7/dist-packages (from transformers) (2019.12.20)\n","Requirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from transformers) (2.23.0)\n","Requirement already satisfied: tokenizers<0.11,>=0.10.1 in /usr/local/lib/python3.7/dist-packages (from transformers) (0.10.3)\n","Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.7/dist-packages (from transformers) (4.8.2)\n","Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.7/dist-packages (from transformers) (6.0)\n","Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.7/dist-packages (from huggingface-hub<1.0,>=0.1.0->transformers) (3.10.0.2)\n","Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /usr/local/lib/python3.7/dist-packages (from packaging>=20.0->transformers) (3.0.6)\n","Requirement already satisfied: fsspec[http]>=2021.05.0 in /usr/local/lib/python3.7/dist-packages (from datasets) (2021.11.1)\n","Requirement already satisfied: pandas in /usr/local/lib/python3.7/dist-packages (from datasets) (1.1.5)\n","Requirement already satisfied: pyarrow!=4.0.0,>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from datasets) (3.0.0)\n","Requirement already satisfied: xxhash in /usr/local/lib/python3.7/dist-packages (from datasets) (2.0.2)\n","Requirement already satisfied: multiprocess in /usr/local/lib/python3.7/dist-packages (from datasets) (0.70.12.2)\n","Requirement already satisfied: aiohttp in /usr/local/lib/python3.7/dist-packages (from datasets) (3.8.1)\n","Requirement already satisfied: dill in /usr/local/lib/python3.7/dist-packages (from datasets) (0.3.4)\n","Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (1.24.3)\n","Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (2.10)\n","Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (3.0.4)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (2021.10.8)\n","Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.7/dist-packages (from aiohttp->datasets) (1.2.0)\n","Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.7/dist-packages (from aiohttp->datasets) (4.0.1)\n","Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.7/dist-packages (from aiohttp->datasets) (5.2.0)\n","Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.7/dist-packages (from aiohttp->datasets) (1.2.0)\n","Requirement already satisfied: asynctest==0.13.0 in /usr/local/lib/python3.7/dist-packages (from aiohttp->datasets) (0.13.0)\n","Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.7/dist-packages (from aiohttp->datasets) (1.7.2)\n","Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.7/dist-packages (from aiohttp->datasets) (21.2.0)\n","Requirement already satisfied: charset-normalizer<3.0,>=2.0 in /usr/local/lib/python3.7/dist-packages (from aiohttp->datasets) (2.0.8)\n","Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.7/dist-packages (from importlib-metadata->transformers) (3.6.0)\n","Requirement already satisfied: python-dateutil>=2.7.3 in /usr/local/lib/python3.7/dist-packages (from pandas->datasets) (2.8.2)\n","Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.7/dist-packages (from pandas->datasets) (2018.9)\n","Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.7.3->pandas->datasets) (1.15.0)\n","Requirement already satisfied: joblib in /usr/local/lib/python3.7/dist-packages (from sacremoses->transformers) (1.1.0)\n","Requirement already satisfied: click in /usr/local/lib/python3.7/dist-packages (from sacremoses->transformers) (7.1.2)\n"]}]},{"cell_type":"code","metadata":{"id":"0a0Y29QSXciS"},"source":["import os\n","import json\n","import logging\n","import numpy as np\n","import pandas as pd\n","import config\n","\n","from sklearn.model_selection import train_test_split\n","from torch.utils.data import DataLoader"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"9_0f6C8cI-uu","executionInfo":{"status":"ok","timestamp":1639372400603,"user_tz":-480,"elapsed":22,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"c3104b6d-1199-40b9-a292-5a5e47eebaea"},"source":["#加载处理完的npz数据集\n","#不加allow_pickle=True会报错Object arrays cannot be loaded when allow_pickle=False，numpy新版本中默认为False。\n","train_data=np.load('./data/train.npz',allow_pickle=True)\n","val_data=np.load('./data/dev.npz',allow_pickle=True)\n","test_data=np.load('./data/test.npz',allow_pickle=True)\n","\n","test_data.files"],"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":["['words', 'labels']"]},"metadata":{},"execution_count":5}]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/","height":423},"id":"4DGMH-td8_Sn","executionInfo":{"status":"ok","timestamp":1639372401089,"user_tz":-480,"elapsed":503,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"91197859-deba-4c7f-e90a-dc5264ef3c50"},"source":["#转换为dataframe格式\n","import pandas as pd\n","#补个随机frac\n","train_df=pd.concat([pd.DataFrame(train_data['words'],columns=['words']),\n","          pd.DataFrame(train_data['labels'],columns=['labels'])],axis=1)\n","\n","val_df=pd.concat([pd.DataFrame(val_data['words'],columns=['words']),\n","          pd.DataFrame(val_data['labels'],columns=['labels'])],axis=1)\n","\n","test_df=pd.concat([pd.DataFrame(test_data['words'],columns=['words']),\n","          pd.DataFrame(test_data['labels'],columns=['labels'])],axis=1)\n","\n","#将训练验证集的BIOS标签转换为数字索引，此时word和labels已经对齐了\n","def trans(labels):\n","  labels=list(labels)\n","  nums=[]\n","  for label in labels:\n","    nums.append(config.label2id[label])\n","  return nums\n","    \n","train_df['labels']=train_df['labels'].map(lambda x: trans(x))\n","val_df['labels']=val_df['labels'].map(lambda x: trans(x))\n","\n","test_df['labels']=test_df['labels'].map(lambda x: trans(x))\n","val_df"],"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/html":["<div>\n","<style scoped>\n","    .dataframe tbody tr th:only-of-type {\n","        vertical-align: middle;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: right;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>words</th>\n","      <th>labels</th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>0</th>\n","      <td>[彭, 小, 军, 认, 为, ，, 国, 内, 银, 行, 现, 在, 走, 的, 是, ...</td>\n","      <td>[7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>1</th>\n","      <td>[温, 格, 的, 球, 队, 终, 于, 又, 踢, 了, 一, 场, 经, 典, 的, ...</td>\n","      <td>[7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...</td>\n","    </tr>\n","    <tr>\n","      <th>2</th>\n","      <td>[突, 袭, 黑, 暗, 雅, 典, 娜, 》, 中, R, i, d, d, i, c, ...</td>\n","      <td>[4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17, 17, ...</td>\n","    </tr>\n","    <tr>\n","      <th>3</th>\n","      <td>[郑, 阿, 姨, 就, 赶, 到, 文, 汇, 路, 排, 队, 拿, 钱, ，, 希, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>4</th>\n","      <td>[我, 想, 站, 在, 雪, 山, 脚, 下, 你, 会, 被, 那, 巍, 峨, 的, ...</td>\n","      <td>[0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>...</th>\n","      <td>...</td>\n","      <td>...</td>\n","    </tr>\n","    <tr>\n","      <th>1338</th>\n","      <td>[在, 这, 个, 非, 常, 喜, 庆, 的, 日, 子, 里, ，, 我, 们, 首, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1339</th>\n","      <td>[姜, 哲, 中, ：, 公, 共, 之, 敌, 1, -, 1, 》, 、, 《, 神, ...</td>\n","      <td>[6, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16...</td>\n","    </tr>\n","    <tr>\n","      <th>1340</th>\n","      <td>[目, 前, ，, 日, 本, 松, 山, 海, 上, 保, 安, 部, 正, 在, 就, ...</td>\n","      <td>[0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15, 15, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>1341</th>\n","      <td>[也, 就, 是, 说, 英, 国, 人, 在, 世, 博, 会, 上, 的, 英, 国, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 10, 20, 20, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1342</th>\n","      <td>[另, 外, 意, 大, 利, 的, P, l, a, y, G, e, n, e, r, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 2, 12, 12, 12, 12, 12, 12, ...</td>\n","    </tr>\n","  </tbody>\n","</table>\n","<p>1343 rows × 2 columns</p>\n","</div>"],"text/plain":["                                                  words                                             labels\n","0     [彭, 小, 军, 认, 为, ，, 国, 内, 银, 行, 现, 在, 走, 的, 是, ...  [7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...\n","1     [温, 格, 的, 球, 队, 终, 于, 又, 踢, 了, 一, 场, 经, 典, 的, ...  [7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...\n","2     [突, 袭, 黑, 暗, 雅, 典, 娜, 》, 中, R, i, d, d, i, c, ...  [4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17, 17, ...\n","3     [郑, 阿, 姨, 就, 赶, 到, 文, 汇, 路, 排, 队, 拿, 钱, ，, 希, ...  [0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0, 0, 0...\n","4     [我, 想, 站, 在, 雪, 山, 脚, 下, 你, 会, 被, 那, 巍, 峨, 的, ...  [0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0...\n","...                                                 ...                                                ...\n","1338  [在, 这, 个, 非, 常, 喜, 庆, 的, 日, 子, 里, ，, 我, 们, 首, ...  [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","1339  [姜, 哲, 中, ：, 公, 共, 之, 敌, 1, -, 1, 》, 、, 《, 神, ...  [6, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16...\n","1340  [目, 前, ，, 日, 本, 松, 山, 海, 上, 保, 安, 部, 正, 在, 就, ...  [0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15, 15, 0...\n","1341  [也, 就, 是, 说, 英, 国, 人, 在, 世, 博, 会, 上, 的, 英, 国, ...  [0, 0, 0, 0, 0, 0, 0, 0, 10, 20, 20, 0, 0, 0, ...\n","1342  [另, 外, 意, 大, 利, 的, P, l, a, y, G, e, n, e, r, ...  [0, 0, 0, 0, 0, 0, 2, 12, 12, 12, 12, 12, 12, ...\n","\n","[1343 rows x 2 columns]"]},"metadata":{},"execution_count":6}]},{"cell_type":"markdown","metadata":{"id":"KzH2PsAaQY_s"},"source":["## pandas数据装入datasets进行解码，之后方便直接pad labels。\n","## labels在后面装入dataloader的时候，不处理的话长度不一致，处理的话整理函数太麻烦。"]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/","height":130,"referenced_widgets":["060477f278d14559ac73192c13d94830","789995afc14841f08eb9becc7c8d50c0","61aaeee6b77047838bb3447cd44a4551","2b31e8d8a1a044e099a14d3bd7c878dc","d83ad38b5bcf4b188cf1d2a39dc90646","e5a40b88103b46f19167c32a836ef8e6","fdaa9b00882e44888b918d99247d2d09","8896700b212141598b98acc9945d75b0","34882e1484424b3692487aa7e5f371ca","c74381917cb144ac8592f4b8550e636c","26fca9c8d5d14debbe0096662ab84895","f97f4cfd83174b67bd3f99068712e3b1","28a1d3a622d642479e2720913ba00324","46b6af4256894c04b88f0ee7fe5c3409","f49391466310463bac5f40450a694229","58076410d6ba46bfb1aa59ef5de48e05","a87ca4328d754b3593d369b457f5b191","f46c845d738d4e1295d4ea931fa0f940","e07dc3b23894474687cfa1a498c390a3","b682b302c8234735b5a8092628d1b4f0","434d34fee29f44ada791898ef454c3cc","29934ff8c9274f15868f56c5dd02d784","0a7ca74c031741b899da9a55a24d898c","cde89f5db7964500937217315cc4a91b","7a4f40583aa64ca9a4be79ccf8486c78","70b80e9daf344cde872765523478cdf8","c2c2354f1c7c4fcca46d141bbab675de","07cc5ee285ca42a5b03ea8adcf39ce36","bc66539544e743d6987abd5992092fe4","2339e85eda6b41d99b5fb56e35e6b457","583893be1c834d888e0cdc6c5c921797","8c631d8c333345759dfba55e906218b4","aa1dfac4ff7c48eb83642732a21d0236"]},"id":"cbs_SgaUP19o","executionInfo":{"status":"ok","timestamp":1639372412373,"user_tz":-480,"elapsed":11290,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"756cc149-015f-4e18-ba9a-18f213a64f91"},"source":["from datasets import Dataset\n","from transformers import AutoTokenizer\n","#这里一定要选AutoTokenizer，如果是BertTokenizer，会提示bertbase没有word_ids方法。结果没用到\n","trains_ds=Dataset.from_pandas(train_df)\n","val_ds=Dataset.from_pandas(val_df)\n","test_ds=Dataset.from_pandas(test_df)\n","\n","tokenizer=AutoTokenizer.from_pretrained(config.roberta_model,do_lower_case=True)\n","\n","#tokenized_inputs=tokenizer(trains_ds[\"words\"],padding=True,truncation=True,is_split_into_words=True)为啥这种是错的\n","tokenized_trains_ds=trains_ds.map(lambda examples:tokenizer(examples['words'],is_split_into_words=True,truncation=True,padding=True),batched=True)\n","tokenized_val_ds=val_ds.map(lambda examples:tokenizer(examples['words'],is_split_into_words=True,truncation=True,padding=True),batched=True)\n","tokenized_test_ds=test_ds.map(lambda examples:tokenizer(examples['words'],is_split_into_words=True,truncation=True,padding=True),batched=True)"],"execution_count":null,"outputs":[{"output_type":"display_data","data":{"application/vnd.jupyter.widget-view+json":{"model_id":"060477f278d14559ac73192c13d94830","version_minor":0,"version_major":2},"text/plain":["  0%|          | 0/11 [00:00<?, ?ba/s]"]},"metadata":{}},{"output_type":"stream","name":"stderr","text":["Asking to truncate to max_length but no maximum length is provided and the model has no predefined maximum length. Default to no truncation.\n"]},{"output_type":"display_data","data":{"application/vnd.jupyter.widget-view+json":{"model_id":"f97f4cfd83174b67bd3f99068712e3b1","version_minor":0,"version_major":2},"text/plain":["  0%|          | 0/2 [00:00<?, ?ba/s]"]},"metadata":{}},{"output_type":"display_data","data":{"application/vnd.jupyter.widget-view+json":{"model_id":"0a7ca74c031741b899da9a55a24d898c","version_minor":0,"version_major":2},"text/plain":["  0%|          | 0/2 [00:00<?, ?ba/s]"]},"metadata":{}}]},{"cell_type":"code","metadata":{"id":"A2N5xYkxn8qs","colab":{"base_uri":"https://localhost:8080/","height":423},"executionInfo":{"status":"ok","timestamp":1639372416559,"user_tz":-480,"elapsed":3884,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"1dc0e542-43c0-4d66-93d6-a38178b71e43"},"source":["#在编码之后的datasets里面操作，得到的结果无法写入datasets，所以只好写到pandas文件里。\n","#将labels填充到和input_ids一样长（最长句子52，所以其实全部都填充到52）\n","\n","def padding(data):\n","  pad_labels=[]\n","  for ds in data:\n","    labels=ds['labels'] \n","    mask=ds['attention_mask']\n","\n","    pad_length=len(mask)\n","    label_length=len(labels)\n","    \n","    label_ids=[-100]+labels+[-100]*(pad_length-label_length-1)\n","    pad_labels.append(label_ids)\n","  return pad_labels\n","#tokenized_trains_ds[\"pad_labels\"]=pad_labels# Column 2 named labels expected length 10748 but got length 1000\n","train_df['pad_labels']=padding(tokenized_trains_ds)#\n","val_df['pad_labels']=padding(tokenized_val_ds)\n","test_df['pad_labels']=padding(tokenized_test_ds)\n","test_df"],"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/html":["<div>\n","<style scoped>\n","    .dataframe tbody tr th:only-of-type {\n","        vertical-align: middle;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: right;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>words</th>\n","      <th>labels</th>\n","      <th>pad_labels</th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>0</th>\n","      <td>[四, 川, 敦, 煌, 学, ”, 。, 近, 年, 来, ，, 丹, 棱, 县, 等, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1</th>\n","      <td>[尼, 日, 利, 亚, 海, 军, 发, 言, 人, 当, 天, 在, 阿, 布, 贾, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>2</th>\n","      <td>[销, 售, 冠, 军, ：, 辐, 射, 3, -, B, e, t, h, e, s, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>3</th>\n","      <td>[所, 以, 大, 多, 数, 人, 都, 是, 从, 巴, 厘, 岛, 南, 部, 开, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>4</th>\n","      <td>[备, 受, 瞩, 目, 的, 动, 作, 及, 冒, 险, 类, 大, 作, 《, 迷, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>...</th>\n","      <td>...</td>\n","      <td>...</td>\n","      <td>...</td>\n","    </tr>\n","    <tr>\n","      <th>1340</th>\n","      <td>[貌, 似, 主, 队, 优, 势, 不, 小, 。, 不, 过, 欧, 平, 客, 胜, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1341</th>\n","      <td>[当, 年, 赵, 国, 都, 城, 邯, 郸, 被, 秦, 军, 包, 围, ，, 赵, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1342</th>\n","      <td>[关, 于, 张, 衡, 地, 动, 仪, 的, 历, 史, 证, 据, ，, 冯, 锐, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1343</th>\n","      <td>[一, 直, 面, 带, 微, 笑, ，, 任, 由, 化, 妆, 师, 为, 她, 化, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1344</th>\n","      <td>[阿, 森, 纳, 将, 在, 主, 场, 对, 阵, 基, 伏, 迪, 纳, 摩, ，, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","  </tbody>\n","</table>\n","<p>1345 rows × 3 columns</p>\n","</div>"],"text/plain":["                                                  words  ...                                         pad_labels\n","0     [四, 川, 敦, 煌, 学, ”, 。, 近, 年, 来, ，, 丹, 棱, 县, 等, ...  ...  [-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","1     [尼, 日, 利, 亚, 海, 军, 发, 言, 人, 当, 天, 在, 阿, 布, 贾, ...  ...  [-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","2     [销, 售, 冠, 军, ：, 辐, 射, 3, -, B, e, t, h, e, s, ...  ...  [-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","3     [所, 以, 大, 多, 数, 人, 都, 是, 从, 巴, 厘, 岛, 南, 部, 开, ...  ...  [-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","4     [备, 受, 瞩, 目, 的, 动, 作, 及, 冒, 险, 类, 大, 作, 《, 迷, ...  ...  [-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","...                                                 ...  ...                                                ...\n","1340  [貌, 似, 主, 队, 优, 势, 不, 小, 。, 不, 过, 欧, 平, 客, 胜, ...  ...  [-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","1341  [当, 年, 赵, 国, 都, 城, 邯, 郸, 被, 秦, 军, 包, 围, ，, 赵, ...  ...  [-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","1342  [关, 于, 张, 衡, 地, 动, 仪, 的, 历, 史, 证, 据, ，, 冯, 锐, ...  ...  [-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","1343  [一, 直, 面, 带, 微, 笑, ，, 任, 由, 化, 妆, 师, 为, 她, 化, ...  ...  [-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","1344  [阿, 森, 纳, 将, 在, 主, 场, 对, 阵, 基, 伏, 迪, 纳, 摩, ，, ...  ...  [-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","\n","[1345 rows x 3 columns]"]},"metadata":{},"execution_count":8}]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"eNRmBnegDmqa","executionInfo":{"status":"ok","timestamp":1639372416560,"user_tz":-480,"elapsed":36,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"be8ac989-3840-4bfc-a771-8ba60ef44527"},"source":["%pylab inline\n","#最大句子长度50\n","train_df['text_len'] = train_df['words'].apply(lambda x: len(x))\n","print(train_df['text_len'].describe())"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Populating the interactive namespace from numpy and matplotlib\n","count    10748.000000\n","mean        37.380350\n","std         10.709827\n","min          2.000000\n","25%         32.000000\n","50%         41.000000\n","75%         46.000000\n","max         50.000000\n","Name: text_len, dtype: float64\n"]}]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"0dlnGtnxFHWU","executionInfo":{"status":"ok","timestamp":1639372416562,"user_tz":-480,"elapsed":28,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"4836a50e-5937-4c77-c3c2-354b1e798056"},"source":["#每个句子都被pad到52的长度\n","train_df['label_len'] = train_df['pad_labels'].apply(lambda x: len(x))\n","print(train_df['label_len'].describe())"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["count    10748.0\n","mean        52.0\n","std          0.0\n","min         52.0\n","25%         52.0\n","50%         52.0\n","75%         52.0\n","max         52.0\n","Name: label_len, dtype: float64\n"]}]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/","height":423},"id":"6p2wmsmrM_x1","executionInfo":{"status":"ok","timestamp":1639372416563,"user_tz":-480,"elapsed":25,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"75f49e8e-e9e8-480d-b91f-720a53b98132"},"source":["val_df"],"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/html":["<div>\n","<style scoped>\n","    .dataframe tbody tr th:only-of-type {\n","        vertical-align: middle;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: right;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>words</th>\n","      <th>labels</th>\n","      <th>pad_labels</th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>0</th>\n","      <td>[彭, 小, 军, 认, 为, ，, 国, 内, 银, 行, 现, 在, 走, 的, 是, ...</td>\n","      <td>[7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[-100, 7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>1</th>\n","      <td>[温, 格, 的, 球, 队, 终, 于, 又, 踢, 了, 一, 场, 经, 典, 的, ...</td>\n","      <td>[7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...</td>\n","      <td>[-100, 7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...</td>\n","    </tr>\n","    <tr>\n","      <th>2</th>\n","      <td>[突, 袭, 黑, 暗, 雅, 典, 娜, 》, 中, R, i, d, d, i, c, ...</td>\n","      <td>[4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17, 17, ...</td>\n","      <td>[-100, 4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17...</td>\n","    </tr>\n","    <tr>\n","      <th>3</th>\n","      <td>[郑, 阿, 姨, 就, 赶, 到, 文, 汇, 路, 排, 队, 拿, 钱, ，, 希, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>4</th>\n","      <td>[我, 想, 站, 在, 雪, 山, 脚, 下, 你, 会, 被, 那, 巍, 峨, 的, ...</td>\n","      <td>[0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[-100, 0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>...</th>\n","      <td>...</td>\n","      <td>...</td>\n","      <td>...</td>\n","    </tr>\n","    <tr>\n","      <th>1338</th>\n","      <td>[在, 这, 个, 非, 常, 喜, 庆, 的, 日, 子, 里, ，, 我, 们, 首, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1339</th>\n","      <td>[姜, 哲, 中, ：, 公, 共, 之, 敌, 1, -, 1, 》, 、, 《, 神, ...</td>\n","      <td>[6, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16...</td>\n","      <td>[-100, 6, 16, 16, 16, 16, 16, 16, 16, 16, 16, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1340</th>\n","      <td>[目, 前, ，, 日, 本, 松, 山, 海, 上, 保, 安, 部, 正, 在, 就, ...</td>\n","      <td>[0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15, 15, 0...</td>\n","      <td>[-100, 0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15,...</td>\n","    </tr>\n","    <tr>\n","      <th>1341</th>\n","      <td>[也, 就, 是, 说, 英, 国, 人, 在, 世, 博, 会, 上, 的, 英, 国, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 10, 20, 20, 0, 0, 0, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 0, 0, 10, 20, 20, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1342</th>\n","      <td>[另, 外, 意, 大, 利, 的, P, l, a, y, G, e, n, e, r, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 2, 12, 12, 12, 12, 12, 12, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 2, 12, 12, 12, 12, 12...</td>\n","    </tr>\n","  </tbody>\n","</table>\n","<p>1343 rows × 3 columns</p>\n","</div>"],"text/plain":["                                                  words  ...                                         pad_labels\n","0     [彭, 小, 军, 认, 为, ，, 国, 内, 银, 行, 现, 在, 走, 的, 是, ...  ...  [-100, 7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...\n","1     [温, 格, 的, 球, 队, 终, 于, 又, 踢, 了, 一, 场, 经, 典, 的, ...  ...  [-100, 7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...\n","2     [突, 袭, 黑, 暗, 雅, 典, 娜, 》, 中, R, i, d, d, i, c, ...  ...  [-100, 4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17...\n","3     [郑, 阿, 姨, 就, 赶, 到, 文, 汇, 路, 排, 队, 拿, 钱, ，, 希, ...  ...  [-100, 0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0...\n","4     [我, 想, 站, 在, 雪, 山, 脚, 下, 你, 会, 被, 那, 巍, 峨, 的, ...  ...  [-100, 0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0...\n","...                                                 ...  ...                                                ...\n","1338  [在, 这, 个, 非, 常, 喜, 庆, 的, 日, 子, 里, ，, 我, 们, 首, ...  ...  [-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","1339  [姜, 哲, 中, ：, 公, 共, 之, 敌, 1, -, 1, 》, 、, 《, 神, ...  ...  [-100, 6, 16, 16, 16, 16, 16, 16, 16, 16, 16, ...\n","1340  [目, 前, ，, 日, 本, 松, 山, 海, 上, 保, 安, 部, 正, 在, 就, ...  ...  [-100, 0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15,...\n","1341  [也, 就, 是, 说, 英, 国, 人, 在, 世, 博, 会, 上, 的, 英, 国, ...  ...  [-100, 0, 0, 0, 0, 0, 0, 0, 0, 10, 20, 20, 0, ...\n","1342  [另, 外, 意, 大, 利, 的, P, l, a, y, G, e, n, e, r, ...  ...  [-100, 0, 0, 0, 0, 0, 0, 2, 12, 12, 12, 12, 12...\n","\n","[1343 rows x 3 columns]"]},"metadata":{},"execution_count":11}]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"WuPK09A2f1xb","executionInfo":{"status":"ok","timestamp":1639372416564,"user_tz":-480,"elapsed":23,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"e14c7dfd-114b-4072-e441-f94eea90d8dd"},"source":["print(type(test_df['labels'][0][0]))"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["<class 'int'>\n"]}]},{"cell_type":"code","metadata":{"id":"eUjuf-chiW8I"},"source":["batch_size=32\n","\n","#划分训练验证集\n","from sklearn.model_selection import train_test_split\n","from datasets import Dataset\n","from torch.nn.utils.rnn import pad_sequence\n","\n","\n","train_data,val_data,train_label,val_label=train_test_split(\n","    train_df['words'].iloc[:], \n","    train_df['pad_labels'].iloc[:],\n","    test_size=0.15,shuffle=True)\n","\n","test_data,test_label=(test_df['words'].iloc[:],test_df['pad_labels'].iloc[:])\n","validation_data,validation_label=(val_df['words'].iloc[:],val_df['pad_labels'].iloc[:])\n","#stratify=train_df['label'].iloc[:]报错:The least populated class in y has only 1 member,which is too few.\n","#The minimum number of groups for any class cannot be less than 2.估计是样本太少，分层抽取不可行。\n","\n","#数据预处理\n","\n","tokenizer=AutoTokenizer.from_pretrained(config.roberta_model,do_lower_case=True)\n","train_encoding=tokenizer(list(train_data),is_split_into_words=True,truncation=True,padding=True,return_tensors='pt')#训练集中划分的训练集\n","val_encoding=tokenizer(list(val_data),is_split_into_words=True,truncation=True,padding=True,return_tensors='pt')#训练集中划分的验证集\n","test_encoding=tokenizer(list(test_data),is_split_into_words=True,truncation=True,padding=True,return_tensors='pt')#测试集\n","\n","validation_econding=tokenizer(list(validation_data),is_split_into_words=True,truncation=True,padding=True,return_tensors='pt')#原本的验证集\n"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"4O2SbmyDskK6"},"source":["#加载到datalodar并预处理\n","#数据集读取\n","\n","from torch.utils.data import Dataset, DataLoader,TensorDataset\n","import torch\n","class XFeiDataset(Dataset):\n","  def __init__(self,encodings,labels):\n","    self.encodings=encodings\n","    self.labels=labels\n","  \n","  # 读取单个样本\n","  def __getitem__(self,idx):\n","    item={key:torch.tensor(val[idx]) for key,val in self.encodings.items()}\n","    item['pad_labels']=torch.tensor((self.labels[idx]))\n","    item['mask']=(item['pad_labels']!=-100)\n","    return item\n","  \n","  def __len__(self):\n","    return len(self.labels)\n","\n","#def collate_fn\n","\n","train_dataset=XFeiDataset(train_encoding,list(train_label))\n","val_dataset=XFeiDataset(val_encoding,list(val_label))\n","test_dataset=XFeiDataset(test_encoding,list(test_label))\n","validation_dataset=XFeiDataset(validation_econding,list(validation_label))\n","\n","from torch.utils.data import Dataset,DataLoader,TensorDataset\n","\n","train_loader=DataLoader(train_dataset,batch_size=batch_size,shuffle=True)\n","val_loader=DataLoader(val_dataset,batch_size=batch_size,shuffle=True)\n","test_loader=DataLoader(test_dataset,batch_size=batch_size,shuffle=False)#test数据不能shuffle啊，真坑死我了\n","validation_loader=DataLoader(validation_dataset,batch_size=batch_size,shuffle=False)#test数据不能shuffle啊，真坑死我了"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"vZh3myklQsHo"},"source":["from transformers import BertModel\n","from torch.nn.utils.rnn import pad_sequence\n","#初始化bert模型\n","from transformers import BertConfig\n","import torch.nn as nn\n","from torch.nn import LSTM\n","\n","\n","\n","num_labels=31\n","dropout=0.1\n","\n","class Bert_LSTM(nn.Module):\n","  def __init__(self):\n","    super(Bert_LSTM,self).__init__()\n","    self.num_labels=num_labels\n","    self.dropout=nn.Dropout(dropout)\n","    self.bert=BertModel.from_pretrained(config.roberta_model)\n","    for param in self.bert.parameters():\n","      param.requires_grad=True\n","    self.classifier=nn.Linear(1024,self.num_labels)\n","    #self.crf=CRF(num_labels,batch_first=True)\n","    \n","    self.bilstm=nn.LSTM(\n","        input_size=1024, \n","        hidden_size=512, \n","        batch_first=True,\n","        num_layers=2,\n","        dropout=0.5,  \n","        bidirectional=True)\n","\n","  def forward(self,batch_seqs,batch_seq_masks,batch_seq_segments):\n","\n","    output=self.bert(input_ids=batch_seqs,attention_mask=batch_seq_masks,token_type_ids=batch_seq_segments)\n","    #pooler_output=output.pooler_output\n","    last_hidden_state=output.last_hidden_state\n","\n","    if model.train():\n","      last_hidden_state=self.dropout(last_hidden_state)\n","    #只有这种写法不会报错，如果是sequence_output,pooler_output=self.bert(**kwags)这种，sequence_output会报错str没有xxx属性。\n","    #貌似是bert输出有很多，直接用output.last_hidden_state来调用结果（估计是版本问题，坑），关键是输出要打印出来\n","    \n","\n","    lstm_output,(hn,cn)=self.bilstm(last_hidden_state)\n","    #output为输出序列的隐藏层，hn为最后一个时刻的隐藏层，cn为最后一个时刻的隐藏细胞\n","    if model.train():\n","      lstm_output=self.dropout(lstm_output)\n","\n","    # 得到判别值\n","    logits=self.classifier(lstm_output)\n","    \n","    \n","    return logits   "],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"RcKd7JbCIu-b"},"source":["#加载模型\n","model=Bert_LSTM()\n","#model.load_state_dict(torch.load(\"best_bert_model_3epoch\"))\n","device=torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n","model.to(device)"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"yf0ux4z6V7vk"},"source":["#定义优化器\n","epoch=10\n","lr=3e-5\n","\n","from transformers import AdamW,get_scheduler\n","\n","train_steps_per_epoch=len(train_loader)\n","num_training_steps=train_steps_per_epoch*epoch\n","\n","#定义各模块参数\n","bert_parameters=list(model.bert.named_parameters())\n","lstm_parameters=list(model.bilstm.named_parameters())\n","classifier_parameters=list(model.classifier.named_parameters())\n","no_decay=['bias','LayerNorm.weight']\n","\n","#bert模型、lstm模型、nn.linear的学习率分离，后两个是bert的3倍\n","optimizer_grouped_parameters=[\n","    {'params':[p for n,p in bert_parameters if not any(nd in n for nd in no_decay)],\n","      'lr':lr,'weight_decay':0.01},\n","    {'params':[p for n,p in bert_parameters if any(nd in n for nd in no_decay)],\n","      'lr':lr,'weight_decay':0.0},\n","    {'params':[p for n,p in lstm_parameters if not any(nd in n for nd in no_decay)],\n","      'lr':lr*3,'weight_decay':0.01},\n","    {'params':[p for n,p in lstm_parameters if any(nd in n for nd in no_decay)],\n","      'lr':lr*3,'weight_decay': 0.0},\n","    {'params':[p for n,p in classifier_parameters if not any(nd in n for nd in no_decay)],\n","      'lr':lr*3,'weight_decay':0.01},\n","    {'params':[p for n,p in classifier_parameters if any(nd in n for nd in no_decay)],\n","      'lr':lr*3,'weight_decay':0.0}]\n","\n","optimizer=AdamW(optimizer_grouped_parameters,lr=lr,eps=1e-8)\n","\n","lr_scheduler=get_scheduler(\n","    \"linear\",\n","    optimizer=optimizer,\n","    num_warmup_steps=0,\n","    num_training_steps=num_training_steps)"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"LFe1R8aMcCL7","colab":{"base_uri":"https://localhost:8080/","height":49,"referenced_widgets":["90bca0d5a1984507ae466555009cd9cd","1aa5f030462444aaa0e069e147082896","b14facba21e8409eb825c39c7a4eb748","b13fe845c361424cb43923d96d61d2a0","45c1030f722d49fea94a144bdff4fed2","9262f7ca5d4f416d9270a0c9684ca626","b5c02ff4852c424e848506318cbd6650","beb4ecef7828437a90aef25c1faeac07","14ed44a3ab154c85b2ae7f29c394caca","4804cb5834824d19840398b4e3cb38bf","06fb9cce312948169ffaa37a92ff3a43"]},"executionInfo":{"status":"ok","timestamp":1639372490991,"user_tz":-480,"elapsed":355,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"7ddb8572-edfa-47db-db6f-7944672cb995"},"source":["#编写训练和验证循环\n","import time\n","import numpy as np\n","from sklearn.metrics import f1_score,precision_score,recall_score,accuracy_score\n","from torch.nn import functional as F\n","from torchcrf import CRF\n","#加载进度条\n","from tqdm.auto import tqdm\n","\n","num_training_steps=train_steps_per_epoch*epoch\n","\n","progress_bar=tqdm(range(num_training_steps))\n","\n","def train_and_eval(epoch):\n","  best_acc=0.0\n","  num_labels=31\n","  criterion=nn.CrossEntropyLoss()\n","  for i in range(epoch):\n","    \"\"\"训练模型\"\"\"\n","    start=time.time()\n","    model.train()\n","    print(\"***** Running training epoch {} *****\".format(i+1))\n","    train_loss_sum=0.0\n","    for idx,batch in enumerate(train_loader):\n","      input_ids=batch['input_ids'].to(device)\n","      attention_mask=batch['attention_mask'].to(device)\n","      token_type_ids=batch['token_type_ids'].to(device)\n","      pad_labels=batch['pad_labels'].to(device)\n","      mask=batch['mask'].to(device)\n","\n","\n","      #计算输出和loss\n","      logits=model(input_ids,attention_mask,token_type_ids)\n","\n","      loss=criterion(logits[mask],pad_labels[mask])\n","      loss.backward()\n","\n","      optimizer.step()\n","      lr_scheduler.step()\n","      optimizer.zero_grad()  \n","      progress_bar.update(1)\n","\n","      train_loss_sum+=loss.item()\n","      if (idx+1)%(len(train_loader)//5)==0: # 只打印五次结果\n","        print(\"Epoch {:03d} | Step {:04d}/{:04d} | Loss {:.4f} | Time {:.4f} | Learning rate = {} \\n\".format(\n","                  i+1,idx+1,len(train_loader),train_loss_sum/(idx+1),time.time()-start,optimizer.state_dict()['param_groups'][0]['lr']))\n","      \n","      #验证模型\n","    model.eval()\n","    total=0#每个batch要预测的token总数\n","    acc=0#每个batch的acc\n","    total_eval_accuracy=0\n","    total_eval_loss=0\n","    \n","    for batch in val_loader:\n","      with torch.no_grad():#只有这一块是不需要求导的\n","      \n","        input_ids=batch['input_ids'].to(device)\n","        attention_mask=batch['attention_mask'].to(device)\n","        token_type_ids=batch['token_type_ids'].to(device)\n","        pad_labels=batch['pad_labels'].to(device)\n","        mask=batch['mask'].to(device)\n","        logits=model(input_ids,attention_mask,token_type_ids)\n","\n","\n","      #logits[mask]从句子矩阵变被拉平，且只含有真实token的logtis。和bertfortoken分类任务头的view效果是一样的。\n","      loss=criterion(logits[mask],pad_labels[mask])#只计算没有mask的部分单词的loss和准确率\n","      \n","      total_eval_loss+=loss.item()\n","\n","      acc+=(logits.argmax(dim=-1)[mask]==pad_labels[mask]).sum().item()#只计算没有mask的单词的准确率,mask在外面似乎accs0.93不准。\n","      total+=mask.sum().item()\n","    total_eval_accuracy=acc/total\n","\n","    #avg_val_accuracy=total_eval_accuracy/len(val_loader)\n","    if total_eval_accuracy>best_acc:\n","      best_acc=total_eval_accuracy\n","      torch.save(model.state_dict(),\"bert_lstm_softmax_model\")\n","    \n","    print(\"val_accuracy:%.4f\" % (total_eval_accuracy))\n","    print(\"Average val loss: %.4f\"%(total_eval_loss))\n","    print(\"time costed={}s \\n\".format(round(time.time()-start,5)))\n","    print(\"-------------------------------\")"],"execution_count":null,"outputs":[{"output_type":"display_data","data":{"application/vnd.jupyter.widget-view+json":{"model_id":"90bca0d5a1984507ae466555009cd9cd","version_minor":0,"version_major":2},"text/plain":["  0%|          | 0/2860 [00:00<?, ?it/s]"]},"metadata":{}}]},{"cell_type":"markdown","metadata":{"id":"Y_Ahn2d251Tt"},"source":["## 对比bert的token分类任务头"]},{"cell_type":"code","metadata":{"id":"sORb6WSDVejg","colab":{"base_uri":"https://localhost:8080/","height":1000},"executionInfo":{"status":"error","timestamp":1639375216817,"user_tz":-480,"elapsed":2720094,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"08e04c1a-0e68-4780-c135-a2a85f51bb3d"},"source":["train_and_eval(epoch)"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["***** Running training epoch 1 *****\n"]},{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:13: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n","  del sys.path[0]\n"]},{"output_type":"stream","name":"stdout","text":["Epoch 001 | Step 0057/0286 | Loss 1.5436 | Time 143.0378 | Learning rate = 2.94020979020979e-05 \n","\n","Epoch 001 | Step 0114/0286 | Loss 1.4414 | Time 285.9572 | Learning rate = 2.8804195804195805e-05 \n","\n","Epoch 001 | Step 0171/0286 | Loss 1.2982 | Time 428.8080 | Learning rate = 2.820629370629371e-05 \n","\n","Epoch 001 | Step 0228/0286 | Loss 1.1858 | Time 571.7140 | Learning rate = 2.760839160839161e-05 \n","\n","Epoch 001 | Step 0285/0286 | Loss 1.0738 | Time 714.6863 | Learning rate = 2.701048951048951e-05 \n","\n","val_accuracy:0.8461\n","Average val loss: 26.1596\n","time costed=765.61637s \n","\n","-------------------------------\n","***** Running training epoch 2 *****\n","Epoch 002 | Step 0057/0286 | Loss 0.4251 | Time 142.9354 | Learning rate = 2.6402097902097903e-05 \n","\n","Epoch 002 | Step 0114/0286 | Loss 0.3791 | Time 285.8664 | Learning rate = 2.5804195804195803e-05 \n","\n","Epoch 002 | Step 0171/0286 | Loss 0.3487 | Time 428.8010 | Learning rate = 2.5206293706293707e-05 \n","\n","Epoch 002 | Step 0228/0286 | Loss 0.3313 | Time 571.7066 | Learning rate = 2.460839160839161e-05 \n","\n","Epoch 002 | Step 0285/0286 | Loss 0.3173 | Time 714.9066 | Learning rate = 2.401048951048951e-05 \n","\n","val_accuracy:0.9344\n","Average val loss: 11.7958\n","time costed=766.02415s \n","\n","-------------------------------\n","***** Running training epoch 3 *****\n","Epoch 003 | Step 0057/0286 | Loss 0.1949 | Time 143.9009 | Learning rate = 2.3402097902097904e-05 \n","\n","Epoch 003 | Step 0114/0286 | Loss 0.1873 | Time 287.7282 | Learning rate = 2.2804195804195805e-05 \n","\n","Epoch 003 | Step 0171/0286 | Loss 0.1851 | Time 430.7669 | Learning rate = 2.2206293706293705e-05 \n","\n","Epoch 003 | Step 0228/0286 | Loss 0.1814 | Time 573.7254 | Learning rate = 2.160839160839161e-05 \n","\n","Epoch 003 | Step 0285/0286 | Loss 0.1755 | Time 716.5703 | Learning rate = 2.101048951048951e-05 \n","\n","val_accuracy:0.9370\n","Average val loss: 11.1384\n","time costed=767.47082s \n","\n","-------------------------------\n","***** Running training epoch 4 *****\n","Epoch 004 | Step 0057/0286 | Loss 0.1212 | Time 143.1384 | Learning rate = 2.0402097902097903e-05 \n","\n","Epoch 004 | Step 0114/0286 | Loss 0.1244 | Time 286.1948 | Learning rate = 1.9804195804195807e-05 \n","\n"]},{"output_type":"error","ename":"KeyboardInterrupt","evalue":"ignored","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)","\u001b[0;32m<ipython-input-20-6a051a1c9714>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mtrain_and_eval\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mepoch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m","\u001b[0;32m<ipython-input-19-1cad63d1a704>\u001b[0m in \u001b[0;36mtrain_and_eval\u001b[0;34m(epoch)\u001b[0m\n\u001b[1;32m     33\u001b[0m       \u001b[0mlogits\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput_ids\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mattention_mask\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mtoken_type_ids\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     34\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 35\u001b[0;31m       \u001b[0mloss\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcriterion\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlogits\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mmask\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mpad_labels\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mmask\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     36\u001b[0m       \u001b[0mloss\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbackward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     37\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mKeyboardInterrupt\u001b[0m: "]}]},{"cell_type":"code","metadata":{"id":"EvTMpEjLKpZx"},"source":["#用trainer预测验证集结果并保存\n","#torch.save(model.state_dict(),\"best_lstm_whole_4epoch\")\n","model.load_state_dict(torch.load(\"after_test_bert_lstm_softmax_model\"))\n","model.to(device)"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"id":"l2gwqTW_BHyw","colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1636888687581,"user_tz":-480,"elapsed":8214,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"183e1d67-6400-4329-89f3-ecc7b48fe8f0"},"source":["model.eval()\n","criterion=nn.NLLLoss()#不带softmax的损失函数\n","total=0#每个batch要预测的token总数\n","acc=0#每个batch的acc\n","total_eval_accuracy=0\n","total_eval_loss=0\n","val_pred = []\n","\n","for batch in validation_loader:  \n","  with torch.no_grad():#只有这一块是不需要求导的\n","  \n","    input_ids=batch['input_ids'].to(device)\n","    attention_mask=batch['attention_mask'].to(device)\n","    token_type_ids=batch['token_type_ids'].to(device)\n","    pad_labels=batch['pad_labels'].to(device)\n","    mask=batch['mask'].to(device)\n","    logits=model(input_ids,attention_mask,token_type_ids)\n","\n","  #logits[mask]从句子矩阵变被拉平，且只含有真实token的logtis。和bertfortoken分类任务头的view效果是一样的。\n","  loss=criterion(logits[mask],pad_labels[mask])#只计算没有mask的部分单词的loss和准确率\n","  total_eval_loss+=loss.item()\n","\n","  acc+=(logits.argmax(dim=-1)==pad_labels)[mask].sum().item()#只计算没有mask的单词的准确率\n","  total+=mask.sum().item()\n","  total_eval_accuracy=acc/total\n","print(\"val_accuracy:%.4f\" % (total_eval_accuracy))\n","print(\"Average val loss: %.4f\"%(total_eval_loss))"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:13: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n","  del sys.path[0]\n"]},{"output_type":"stream","name":"stdout","text":["val_accuracy:0.9341\n","Average val loss: 11.3264\n"]}]},{"cell_type":"code","metadata":{"id":"t3jFldUuVhBI"},"source":["#编写predict函数\n","def predict(model,data_loader):#参数名为data时加载训练好的模型来预测报错，原模型不报错\n","  model.eval()\n","  test_pred = []\n","  for batch in data_loader:\n","    with torch.no_grad():\n","      input_ids=batch['input_ids'].to(device)\n","      attention_mask=batch['attention_mask'].to(device)\n","      token_type_ids=batch['token_type_ids'].to(device)\n","      mask=batch['mask'].to(device)\n","      logits=model(input_ids,attention_mask,token_type_ids)\n","      pad_logits=logits[mask]\n","\n","      y_pred=torch.argmax(logits,dim=-1).detach().cpu().numpy()#为啥最后拉平的又变回矩阵了，看不懂啊\n","      test_pred.extend(y_pred)\n","      \n","  return test_pred"],"execution_count":null,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"dUAE3Oo2ajVj"},"source":["## 输入一个validation_loader的数据，打印出来看看为啥变回52长度的矩阵（应该是模型只能输入输出等长的矩阵）"]},{"cell_type":"code","metadata":{"id":"Lae22D-fVr6B","colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1638475049861,"user_tz":-480,"elapsed":10379,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"d1782d67-8e38-4a5f-9572-fe4c3c68e41c"},"source":["ls={}\n","for data in train_loader:\n","  ls={key:val for key,val in data.items()}\n","  break\n","ls\n","\n","from torchcrf import CRF\n","crf=CRF(31,batch_first=True)\n","input_ids=ls['input_ids'].to(device)\n","attention_mask=ls['attention_mask'].to(device)\n","token_type_ids=ls['token_type_ids'].to(device)\n","pad_labels=ls['pad_labels'].to(device)\n","mask=ls['mask'].to(device)\n","logits=model(input_ids,attention_mask,token_type_ids)\n","print(mask.shape,logits.shape,pad_labels.shape)"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:13: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n","  del sys.path[0]\n"]},{"output_type":"stream","name":"stdout","text":["torch.Size([16, 51]) torch.Size([16, 52, 31]) torch.Size([16, 51])\n"]}]},{"cell_type":"code","metadata":{"id":"X-gKlzackr8G","colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1638475066163,"user_tz":-480,"elapsed":486,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"855db59d-6aca-4ae7-d078-d06e0a3d5506"},"source":["crf.to(device)\n","logits=logits[:,1:,:].to(device)\n","\n","\n","#loss=crf(logits,pad_labels,mask)\n","#pad_logits=logits[mask]\n","\n","#y_pred=torch.argmax(logits,dim=-1).detach().cpu().numpy()\n","#test_pred.extend(y_pred)\n","print(logits.shape)\n","#print(logits,loss)"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["torch.Size([16, 51, 31])\n"]}]},{"cell_type":"markdown","metadata":{"id":"5zPtppBH8nJs"},"source":["## mask真假矩阵来过滤，得到的真实token的结果矩阵是每行长度不一致，所以被拉平了。loss计算的是拉平的有效词的loss。\n","\n","##预测时不能mask过滤，否则报错。"]},{"cell_type":"code","metadata":{"id":"pk0BS3Ygfvsp"},"source":["loss=crf(logits,pad_labels,mask)\n","loss"],"execution_count":null,"outputs":[]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"zZgC2HYmX6gZ","executionInfo":{"status":"ok","timestamp":1636818751904,"user_tz":-480,"elapsed":469,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"48538d3f-0ae4-443f-c49d-19da917aa867"},"source":["pad_logits=logits[mask]#多个句子的token矩阵，经过mask矩阵被拉平。\n","y_pred=torch.argmax(pad_logits,dim=-1)\n","print(pad_logits,pad_logits.shape)\n","print('--------------------------------------------------------------------------------------------------------')\n","print(y_pred,y_pred.shape)"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["tensor([[-6.8415e+00, -7.9797e+00, -6.9759e+00,  ..., -1.2822e+01,\n","         -1.2866e+01, -1.2334e+01],\n","        [-6.4157e+00, -1.2603e+01, -1.1605e+01,  ..., -1.2643e+01,\n","         -1.2956e+01, -1.2452e+01],\n","        [-5.5986e+00, -1.2277e+01, -1.3045e+01,  ..., -1.3453e+01,\n","         -1.3545e+01, -1.3535e+01],\n","        ...,\n","        [-6.8185e-05, -1.1929e+01, -1.4428e+01,  ..., -1.7257e+01,\n","         -1.6416e+01, -1.7380e+01],\n","        [-8.2966e-05, -1.1916e+01, -1.4230e+01,  ..., -1.6780e+01,\n","         -1.6141e+01, -1.7019e+01],\n","        [-6.4609e-05, -1.2297e+01, -1.4409e+01,  ..., -1.7191e+01,\n","         -1.6353e+01, -1.7541e+01]], device='cuda:0', grad_fn=<IndexBackward0>) torch.Size([1219, 31])\n","--------------------------------------------------------------------------------------------------------\n","tensor([ 7, 17, 17,  ...,  0,  0,  0], device='cuda:0') torch.Size([1219])\n"]}]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"LoKpYR2S8TEA","executionInfo":{"status":"ok","timestamp":1636818553794,"user_tz":-480,"elapsed":456,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"52646c47-1b49-4552-f8c2-c33691e678c1"},"source":["import torch\n","from torch import tensor\n","a=torch.randn(2,4)\n","b=tensor([[False,True,True,False],\n"," [True,False,True,True]])\n","c=a[b]\n","\n","print(a,a.shape)\n","print(c,c.shape)\n","#mask矩阵可以晒选需要的位置,但是确实被拉平了"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["tensor([[-0.2196,  0.1262, -0.6929, -1.7824],\n","        [-0.4014, -0.5301, -0.6155,  0.6116]]) torch.Size([2, 4])\n","tensor([ 0.1262, -0.6929, -0.4014, -0.6155,  0.6116]) torch.Size([5])\n"]}]},{"cell_type":"markdown","metadata":{"id":"pP1vNsUHaalV"},"source":["## 用模型预测验证集结果，与原标签对比"]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"YMQp6C-rZ0S3","executionInfo":{"status":"ok","timestamp":1636888796850,"user_tz":-480,"elapsed":8267,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"dd068e68-e72d-4dee-fcc8-8e8e0779dcf2"},"source":["#用trainer预测验证集结果并保存\n","#torch.save(model.state_dict(),\"best_lstm_whole_4epoch\")\n","#model.load_state_dict(torch.load(\"after_test_bert_lstm_softmax_model\"))\n","#model.to(device)\n","\n","predictions=predict(model,validation_loader)\n","val_df['pre_labels']=pd.Series(predictions)\n","from datasets import Dataset\n","val_datasets=Dataset.from_pandas(val_df)\n","#将预测的结果直接加到val_df。如果存入csv读取出来再加入，读取的labels数据就是文本数据，坑了好久才发现。而且还有换行符，醉了。"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:13: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n","  del sys.path[0]\n"]}]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/","height":585},"id":"-dS9vYbRZlj8","executionInfo":{"status":"ok","timestamp":1636888811696,"user_tz":-480,"elapsed":618,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"55695dad-7059-4308-cba5-6a878349254e"},"source":["def unpadding(data):\n","  unpad_labels=[]\n","  for ds in data:#直接这样迭代读取报错。pandas数据不能这样读取每一行。datasets是dict格式，可以用datasets['train'][0]这样的方式读取\n","    pad_labels=ds['pre_labels'] #这里是pre_labels，又他妈写错了\n","    words=ds['words']\n","\n","    length=len(words)\n","    label_ids=pad_labels[1:(length+1)]\n","\n","    unpad_labels.append(label_ids)\n","  return unpad_labels\n","#tokenized_trains_ds[\"pad_labels\"]=pad_labels# Column 2 named labels expected length 10748 but got length 1000\n","val_df['unpad_labels']=unpadding(val_datasets)\n","val_df"],"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/html":["<div>\n","<style scoped>\n","    .dataframe tbody tr th:only-of-type {\n","        vertical-align: middle;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: right;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>words</th>\n","      <th>labels</th>\n","      <th>pad_labels</th>\n","      <th>pre_labels</th>\n","      <th>unpad_labels</th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>0</th>\n","      <td>[彭, 小, 军, 认, 为, ，, 国, 内, 银, 行, 现, 在, 走, 的, 是, ...</td>\n","      <td>[7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[-100, 7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[0, 7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>1</th>\n","      <td>[温, 格, 的, 球, 队, 终, 于, 又, 踢, 了, 一, 场, 经, 典, 的, ...</td>\n","      <td>[7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...</td>\n","      <td>[-100, 7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...</td>\n","      <td>[0, 7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...</td>\n","      <td>[7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...</td>\n","    </tr>\n","    <tr>\n","      <th>2</th>\n","      <td>[突, 袭, 黑, 暗, 雅, 典, 娜, 》, 中, R, i, d, d, i, c, ...</td>\n","      <td>[4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17, 17, ...</td>\n","      <td>[-100, 4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17...</td>\n","      <td>[0, 4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17, 1...</td>\n","      <td>[4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17, 17, ...</td>\n","    </tr>\n","    <tr>\n","      <th>3</th>\n","      <td>[郑, 阿, 姨, 就, 赶, 到, 文, 汇, 路, 排, 队, 拿, 钱, ，, 希, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0, 0...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>4</th>\n","      <td>[我, 想, 站, 在, 雪, 山, 脚, 下, 你, 会, 被, 那, 巍, 峨, 的, ...</td>\n","      <td>[0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[-100, 0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[0, 0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>...</th>\n","      <td>...</td>\n","      <td>...</td>\n","      <td>...</td>\n","      <td>...</td>\n","      <td>...</td>\n","    </tr>\n","    <tr>\n","      <th>1338</th>\n","      <td>[在, 这, 个, 非, 常, 喜, 庆, 的, 日, 子, 里, ，, 我, 们, 首, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1339</th>\n","      <td>[姜, 哲, 中, ：, 公, 共, 之, 敌, 1, -, 1, 》, 、, 《, 神, ...</td>\n","      <td>[6, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16...</td>\n","      <td>[-100, 6, 16, 16, 16, 16, 16, 16, 16, 16, 16, ...</td>\n","      <td>[0, 7, 17, 17, 0, 6, 16, 16, 16, 16, 16, 16, 1...</td>\n","      <td>[7, 17, 17, 0, 6, 16, 16, 16, 16, 16, 16, 16, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1340</th>\n","      <td>[目, 前, ，, 日, 本, 松, 山, 海, 上, 保, 安, 部, 正, 在, 就, ...</td>\n","      <td>[0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15, 15, 0...</td>\n","      <td>[-100, 0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15,...</td>\n","      <td>[0, 0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15, 15...</td>\n","      <td>[0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15, 15, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>1341</th>\n","      <td>[也, 就, 是, 说, 英, 国, 人, 在, 世, 博, 会, 上, 的, 英, 国, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 10, 20, 20, 0, 0, 0, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 0, 0, 10, 20, 20, 0, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10,...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 20...</td>\n","    </tr>\n","    <tr>\n","      <th>1342</th>\n","      <td>[另, 外, 意, 大, 利, 的, P, l, a, y, G, e, n, e, r, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 2, 12, 12, 12, 12, 12, 12, ...</td>\n","      <td>[-100, 0, 0, 0, 0, 0, 0, 2, 12, 12, 12, 12, 12...</td>\n","      <td>[0, 0, 0, 1, 0, 0, 0, 2, 12, 12, 12, 12, 12, 1...</td>\n","      <td>[0, 0, 1, 0, 0, 0, 2, 12, 12, 12, 12, 12, 12, ...</td>\n","    </tr>\n","  </tbody>\n","</table>\n","<p>1343 rows × 5 columns</p>\n","</div>"],"text/plain":["                                                  words  ...                                       unpad_labels\n","0     [彭, 小, 军, 认, 为, ，, 国, 内, 银, 行, 现, 在, 走, 的, 是, ...  ...  [7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...\n","1     [温, 格, 的, 球, 队, 终, 于, 又, 踢, 了, 一, 场, 经, 典, 的, ...  ...  [7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...\n","2     [突, 袭, 黑, 暗, 雅, 典, 娜, 》, 中, R, i, d, d, i, c, ...  ...  [4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17, 17, ...\n","3     [郑, 阿, 姨, 就, 赶, 到, 文, 汇, 路, 排, 队, 拿, 钱, ，, 希, ...  ...  [0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0, 0, 0...\n","4     [我, 想, 站, 在, 雪, 山, 脚, 下, 你, 会, 被, 那, 巍, 峨, 的, ...  ...  [0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0...\n","...                                                 ...  ...                                                ...\n","1338  [在, 这, 个, 非, 常, 喜, 庆, 的, 日, 子, 里, ，, 我, 们, 首, ...  ...  [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","1339  [姜, 哲, 中, ：, 公, 共, 之, 敌, 1, -, 1, 》, 、, 《, 神, ...  ...  [7, 17, 17, 0, 6, 16, 16, 16, 16, 16, 16, 16, ...\n","1340  [目, 前, ，, 日, 本, 松, 山, 海, 上, 保, 安, 部, 正, 在, 就, ...  ...  [0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15, 15, 0...\n","1341  [也, 就, 是, 说, 英, 国, 人, 在, 世, 博, 会, 上, 的, 英, 国, ...  ...  [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 20...\n","1342  [另, 外, 意, 大, 利, 的, P, l, a, y, G, e, n, e, r, ...  ...  [0, 0, 1, 0, 0, 0, 2, 12, 12, 12, 12, 12, 12, ...\n","\n","[1343 rows x 5 columns]"]},"metadata":{},"execution_count":22}]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/","height":417},"id":"w0m045tzn2Ur","executionInfo":{"status":"ok","timestamp":1636888859623,"user_tz":-480,"elapsed":1089,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"747abbb5-d3fd-4782-9687-67e2f5075995"},"source":["val_df.drop(columns=(['pad_labels','pre_labels']),inplace=True)\n","val_df.to_csv('bert_lstm_validation_1113.csv')\n","val_df"],"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/html":["<div>\n","<style scoped>\n","    .dataframe tbody tr th:only-of-type {\n","        vertical-align: middle;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: right;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>words</th>\n","      <th>labels</th>\n","      <th>unpad_labels</th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>0</th>\n","      <td>[彭, 小, 军, 认, 为, ，, 国, 内, 银, 行, 现, 在, 走, 的, 是, ...</td>\n","      <td>[7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>1</th>\n","      <td>[温, 格, 的, 球, 队, 终, 于, 又, 踢, 了, 一, 场, 经, 典, 的, ...</td>\n","      <td>[7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...</td>\n","      <td>[7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...</td>\n","    </tr>\n","    <tr>\n","      <th>2</th>\n","      <td>[突, 袭, 黑, 暗, 雅, 典, 娜, 》, 中, R, i, d, d, i, c, ...</td>\n","      <td>[4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17, 17, ...</td>\n","      <td>[4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17, 17, ...</td>\n","    </tr>\n","    <tr>\n","      <th>3</th>\n","      <td>[郑, 阿, 姨, 就, 赶, 到, 文, 汇, 路, 排, 队, 拿, 钱, ，, 希, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>4</th>\n","      <td>[我, 想, 站, 在, 雪, 山, 脚, 下, 你, 会, 被, 那, 巍, 峨, 的, ...</td>\n","      <td>[0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","      <td>[0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>...</th>\n","      <td>...</td>\n","      <td>...</td>\n","      <td>...</td>\n","    </tr>\n","    <tr>\n","      <th>1338</th>\n","      <td>[在, 这, 个, 非, 常, 喜, 庆, 的, 日, 子, 里, ，, 我, 们, 首, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1339</th>\n","      <td>[姜, 哲, 中, ：, 公, 共, 之, 敌, 1, -, 1, 》, 、, 《, 神, ...</td>\n","      <td>[6, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16...</td>\n","      <td>[7, 17, 17, 0, 6, 16, 16, 16, 16, 16, 16, 16, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1340</th>\n","      <td>[目, 前, ，, 日, 本, 松, 山, 海, 上, 保, 安, 部, 正, 在, 就, ...</td>\n","      <td>[0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15, 15, 0...</td>\n","      <td>[0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15, 15, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>1341</th>\n","      <td>[也, 就, 是, 说, 英, 国, 人, 在, 世, 博, 会, 上, 的, 英, 国, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 10, 20, 20, 0, 0, 0, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 20...</td>\n","    </tr>\n","    <tr>\n","      <th>1342</th>\n","      <td>[另, 外, 意, 大, 利, 的, P, l, a, y, G, e, n, e, r, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 2, 12, 12, 12, 12, 12, 12, ...</td>\n","      <td>[0, 0, 1, 0, 0, 0, 2, 12, 12, 12, 12, 12, 12, ...</td>\n","    </tr>\n","  </tbody>\n","</table>\n","<p>1343 rows × 3 columns</p>\n","</div>"],"text/plain":["                                                  words  ...                                       unpad_labels\n","0     [彭, 小, 军, 认, 为, ，, 国, 内, 银, 行, 现, 在, 走, 的, 是, ...  ...  [7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...\n","1     [温, 格, 的, 球, 队, 终, 于, 又, 踢, 了, 一, 场, 经, 典, 的, ...  ...  [7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...\n","2     [突, 袭, 黑, 暗, 雅, 典, 娜, 》, 中, R, i, d, d, i, c, ...  ...  [4, 14, 14, 14, 14, 14, 14, 14, 0, 7, 17, 17, ...\n","3     [郑, 阿, 姨, 就, 赶, 到, 文, 汇, 路, 排, 队, 拿, 钱, ，, 希, ...  ...  [0, 0, 0, 0, 0, 0, 1, 11, 11, 0, 0, 0, 0, 0, 0...\n","4     [我, 想, 站, 在, 雪, 山, 脚, 下, 你, 会, 被, 那, 巍, 峨, 的, ...  ...  [0, 0, 0, 0, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0...\n","...                                                 ...  ...                                                ...\n","1338  [在, 这, 个, 非, 常, 喜, 庆, 的, 日, 子, 里, ，, 我, 们, 首, ...  ...  [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","1339  [姜, 哲, 中, ：, 公, 共, 之, 敌, 1, -, 1, 》, 、, 《, 神, ...  ...  [7, 17, 17, 0, 6, 16, 16, 16, 16, 16, 16, 16, ...\n","1340  [目, 前, ，, 日, 本, 松, 山, 海, 上, 保, 安, 部, 正, 在, 就, ...  ...  [0, 0, 0, 5, 15, 15, 15, 15, 15, 15, 15, 15, 0...\n","1341  [也, 就, 是, 说, 英, 国, 人, 在, 世, 博, 会, 上, 的, 英, 国, ...  ...  [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 20...\n","1342  [另, 外, 意, 大, 利, 的, P, l, a, y, G, e, n, e, r, ...  ...  [0, 0, 1, 0, 0, 0, 2, 12, 12, 12, 12, 12, 12, ...\n","\n","[1343 rows x 3 columns]"]},"metadata":{},"execution_count":24}]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"I14HxCEVv8KM","executionInfo":{"status":"ok","timestamp":1636814554835,"user_tz":-480,"elapsed":8,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"f70c20f2-9244-4422-b288-24eb6a629765"},"source":["val_df.columns"],"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":["Index(['words', 'labels', 'unpad_labels'], dtype='object')"]},"metadata":{},"execution_count":34}]},{"cell_type":"code","metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"Bc8c307LlJSm","executionInfo":{"status":"ok","timestamp":1636811951745,"user_tz":-480,"elapsed":484,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"12b75846-8c2b-4275-880b-ad592ff22c24"},"source":["for i in tokenized_val_ds:#这里写val_ds读出来的就是'word'。pandas列间运算要另外写。\n","  print(i)\n","  break"],"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["{'attention_mask': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0], 'input_ids': [101, 2510, 2207, 1092, 6371, 711, 8024, 1744, 1079, 7213, 6121, 4385, 1762, 6624, 4638, 3221, 1378, 3968, 4638, 1355, 1305, 3563, 2466, 8024, 1044, 6858, 6814, 6651, 7716, 1750, 1765, 1086, 1762, 1750, 4638, 1765, 7027, 7481, 6848, 2885, 2145, 2787, 8024, 102, 0, 0, 0, 0, 0, 0, 0, 0], 'labels': [7, 17, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 'token_type_ids': [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 'words': ['彭', '小', '军', '认', '为', '，', '国', '内', '银', '行', '现', '在', '走', '的', '是', '台', '湾', '的', '发', '卡', '模', '式', '，', '先', '通', '过', '跑', '马', '圈', '地', '再', '在', '圈', '的', '地', '里', '面', '选', '择', '客', '户', '，']}\n"]}]},{"cell_type":"code","metadata":{"id":"HyNVlmij7_rk","colab":{"base_uri":"https://localhost:8080/","height":417},"executionInfo":{"status":"ok","timestamp":1636815804514,"user_tz":-480,"elapsed":492,"user":{"displayName":"张hongxu","photoUrl":"https://lh3.googleusercontent.com/a/default-user=s64","userId":"01344108933923387301"}},"outputId":"ba4c2d27-7013-4a92-f244-8a9f3d4135f3"},"source":["#预测测试集结果并保存\n","#torch.save(model.state_dict(),\"best_lstm_whole_4epoch\")\n","#model.load_state_dict(torch.load(\"after_test_bert_lstm_softmax_model\"))\n","#model.to(device)\n","\"\"\"\n","test_predictions=predict(model,test_loader)\n","test_df['pre_labels']=pd.Series(test_predictions)\n","test_datasets=Dataset.from_pandas(test_df)\n","test_df['unpad_labels']=unpadding(test_datasets)\n","\"\"\"\n","test_df.drop(columns=(['labels','pad_labels','pre_labels']),inplace=True)\n","test_df"],"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/html":["<div>\n","<style scoped>\n","    .dataframe tbody tr th:only-of-type {\n","        vertical-align: middle;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: right;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>words</th>\n","      <th>unpad_labels</th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>0</th>\n","      <td>[四, 川, 敦, 煌, 学, ”, 。, 近, 年, 来, ，, 丹, 棱, 县, 等, ...</td>\n","      <td>[1, 11, 11, 18, 0, 0, 0, 0, 0, 0, 0, 1, 11, 11...</td>\n","    </tr>\n","    <tr>\n","      <th>1</th>\n","      <td>[尼, 日, 利, 亚, 海, 军, 发, 言, 人, 当, 天, 在, 阿, 布, 贾, ...</td>\n","      <td>[5, 15, 15, 15, 15, 15, 0, 0, 0, 0, 0, 0, 1, 1...</td>\n","    </tr>\n","    <tr>\n","      <th>2</th>\n","      <td>[销, 售, 冠, 军, ：, 辐, 射, 3, -, B, e, t, h, e, s, ...</td>\n","      <td>[0, 0, 0, 0, 0, 4, 14, 14, 0, 0, 0, 14, 14, 14...</td>\n","    </tr>\n","    <tr>\n","      <th>3</th>\n","      <td>[所, 以, 大, 多, 数, 人, 都, 是, 从, 巴, 厘, 岛, 南, 部, 开, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 20, 20, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>4</th>\n","      <td>[备, 受, 瞩, 目, 的, 动, 作, 及, 冒, 险, 类, 大, 作, 《, 迷, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 14,...</td>\n","    </tr>\n","    <tr>\n","      <th>...</th>\n","      <td>...</td>\n","      <td>...</td>\n","    </tr>\n","    <tr>\n","      <th>1340</th>\n","      <td>[貌, 似, 主, 队, 优, 势, 不, 小, 。, 不, 过, 欧, 平, 客, 胜, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...</td>\n","    </tr>\n","    <tr>\n","      <th>1341</th>\n","      <td>[当, 年, 赵, 国, 都, 城, 邯, 郸, 被, 秦, 军, 包, 围, ，, 赵, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 1, 11, 0, 0, 0, 0, 0, 0, 0,...</td>\n","    </tr>\n","    <tr>\n","      <th>1342</th>\n","      <td>[关, 于, 张, 衡, 地, 动, 仪, 的, 历, 史, 证, 据, ，, 冯, 锐, ...</td>\n","      <td>[0, 0, 7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 17...</td>\n","    </tr>\n","    <tr>\n","      <th>1343</th>\n","      <td>[一, 直, 面, 带, 微, 笑, ，, 任, 由, 化, 妆, 师, 为, 她, 化, ...</td>\n","      <td>[0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 19, 19, 0, 0, 0...</td>\n","    </tr>\n","    <tr>\n","      <th>1344</th>\n","      <td>[阿, 森, 纳, 将, 在, 主, 场, 对, 阵, 基, 伏, 迪, 纳, 摩, ，, ...</td>\n","      <td>[8, 18, 18, 0, 0, 0, 0, 0, 0, 8, 18, 18, 18, 1...</td>\n","    </tr>\n","  </tbody>\n","</table>\n","<p>1345 rows × 2 columns</p>\n","</div>"],"text/plain":["                                                  words                                       unpad_labels\n","0     [四, 川, 敦, 煌, 学, ”, 。, 近, 年, 来, ，, 丹, 棱, 县, 等, ...  [1, 11, 11, 18, 0, 0, 0, 0, 0, 0, 0, 1, 11, 11...\n","1     [尼, 日, 利, 亚, 海, 军, 发, 言, 人, 当, 天, 在, 阿, 布, 贾, ...  [5, 15, 15, 15, 15, 15, 0, 0, 0, 0, 0, 0, 1, 1...\n","2     [销, 售, 冠, 军, ：, 辐, 射, 3, -, B, e, t, h, e, s, ...  [0, 0, 0, 0, 0, 4, 14, 14, 0, 0, 0, 14, 14, 14...\n","3     [所, 以, 大, 多, 数, 人, 都, 是, 从, 巴, 厘, 岛, 南, 部, 开, ...  [0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 20, 20, 0, 0, ...\n","4     [备, 受, 瞩, 目, 的, 动, 作, 及, 冒, 险, 类, 大, 作, 《, 迷, ...  [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 14,...\n","...                                                 ...                                                ...\n","1340  [貌, 似, 主, 队, 优, 势, 不, 小, 。, 不, 过, 欧, 平, 客, 胜, ...  [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...\n","1341  [当, 年, 赵, 国, 都, 城, 邯, 郸, 被, 秦, 军, 包, 围, ，, 赵, ...  [0, 0, 0, 0, 0, 0, 1, 11, 0, 0, 0, 0, 0, 0, 0,...\n","1342  [关, 于, 张, 衡, 地, 动, 仪, 的, 历, 史, 证, 据, ，, 冯, 锐, ...  [0, 0, 7, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 17...\n","1343  [一, 直, 面, 带, 微, 笑, ，, 任, 由, 化, 妆, 师, 为, 她, 化, ...  [0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 19, 19, 0, 0, 0...\n","1344  [阿, 森, 纳, 将, 在, 主, 场, 对, 阵, 基, 伏, 迪, 纳, 摩, ，, ...  [8, 18, 18, 0, 0, 0, 0, 0, 0, 8, 18, 18, 18, 1...\n","\n","[1345 rows x 2 columns]"]},"metadata":{},"execution_count":40}]},{"cell_type":"markdown","metadata":{"id":"jOAbzhYSPU06"},"source":["## 想写个返回元组的dataloader，有时间在慢慢试把。还有整理函数里面pad labels数据。"]},{"cell_type":"markdown","metadata":{"id":"kUC6YsAVPA8x"},"source":[""]}]}