asahi417's picture
add model
5a3401a
raw
history blame
13.4 kB
{
"_name_or_path": "cner_output/model/baseline/bertweet_large/best_model",
"adapters": {
"adapters": {},
"config_map": {},
"fusion_config_map": {},
"fusions": {}
},
"architectures": [
"RobertaForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"crf_state_dict": {
"_constraint_mask": [
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
]
],
"end_transitions": [
0.13981296122074127,
0.7566394805908203,
-0.7220990061759949,
1.166231632232666,
-0.6779972314834595,
1.1762301921844482,
-0.5866987705230713,
-1.099051594734192,
0.9499191045761108,
0.6545137166976929,
0.17080585658550262,
2.566377639770508,
-0.7701754570007324,
-1.9436296224594116,
0.12138371169567108
],
"start_transitions": [
-0.5090795159339905,
-0.8464639186859131,
0.2335522323846817,
-1.0348395109176636,
-0.8655450940132141,
-0.29520362615585327,
-0.25719234347343445,
-0.26060569286346436,
-1.949448585510254,
0.6188551783561707,
0.5877351760864258,
1.1786189079284668,
1.5304737091064453,
-1.323814034461975,
-0.8883035182952881
],
"transitions": [
[
0.04974466934800148,
-0.03809132054448128,
0.028723005205392838,
0.14809350669384003,
-0.18124741315841675,
-0.30009377002716064,
0.4024972915649414,
0.12427700310945511,
-0.02090192399919033,
-0.01924043521285057,
0.02045069821178913,
-0.026875942945480347,
-0.2256879210472107,
-0.28042808175086975,
-0.38050928711891174
],
[
0.14292222261428833,
-0.15924188494682312,
0.29936087131500244,
0.21903842687606812,
-0.24696919322013855,
0.2514601945877075,
-0.13819704949855804,
-0.02829776518046856,
0.41824162006378174,
-0.03291238844394684,
-0.09794175624847412,
0.26777705550193787,
-0.2604112923145294,
-0.5017340779304504,
-0.33826756477355957
],
[
0.20634552836418152,
0.2884766161441803,
0.050690244883298874,
-0.2542170286178589,
-0.10609420388936996,
-0.05259394273161888,
0.005932319909334183,
0.13752709329128265,
-0.359562486410141,
-0.20218300819396973,
0.11926814913749695,
0.0770644024014473,
-0.2104598432779312,
0.22928859293460846,
-0.11474431306123734
],
[
-0.2518812417984009,
-0.2623654305934906,
0.0030688962433487177,
-0.22886236011981964,
-0.29026105999946594,
0.03264757990837097,
-0.4936668872833252,
0.04080783948302269,
-0.009684090502560139,
0.05848511680960655,
-0.08215039223432541,
-0.1444261223077774,
-0.23482805490493774,
-0.3303450047969818,
0.27681994438171387
],
[
-0.11496470868587494,
-0.2690128982067108,
0.16720564663410187,
0.30854731798171997,
-0.15550227463245392,
-0.1797359734773636,
-0.009729867801070213,
-0.43127304315567017,
-0.22200368344783783,
0.28994038701057434,
0.47457680106163025,
-0.17821747064590454,
0.13429678976535797,
-0.602329671382904,
-0.4876270890235901
],
[
0.098031185567379,
0.35223954916000366,
-0.29000213742256165,
-0.3664630651473999,
0.2946051359176636,
-0.19097892940044403,
0.5786229372024536,
0.05523525923490524,
0.20716753602027893,
0.08401688188314438,
-0.19978824257850647,
-0.27228856086730957,
-0.5584190487861633,
0.501918613910675,
0.18864977359771729
],
[
-0.25449416041374207,
0.11471042782068253,
0.0708269327878952,
0.48177415132522583,
-0.13733172416687012,
-0.24508923292160034,
0.24965311586856842,
0.017624789848923683,
0.21790564060211182,
0.15200982987880707,
0.1941514015197754,
-0.009082012809813023,
-0.05904566869139671,
0.13588111102581024,
-0.19730260968208313
],
[
-0.368647962808609,
-0.02364405244588852,
-0.1750616878271103,
0.014001518487930298,
0.5926456451416016,
0.09254804253578186,
0.026694776490330696,
0.200929194688797,
-0.045042138546705246,
-0.08849305659532547,
-0.2987114191055298,
0.1489189863204956,
0.017428912222385406,
-0.313556969165802,
0.10209201276302338
],
[
-0.08893606811761856,
0.1083703562617302,
0.2809942960739136,
0.21534833312034607,
-0.10164651274681091,
0.3729429543018341,
-0.020973650738596916,
-0.20382806658744812,
-0.3456699252128601,
-0.048593081533908844,
0.017275281250476837,
-0.11940392106771469,
-0.2559013366699219,
-0.21001744270324707,
-0.4107760488986969
],
[
0.27411866188049316,
-0.11049547046422958,
0.04318324476480484,
0.49271389842033386,
0.12284833937883377,
0.38686099648475647,
-0.2757198214530945,
-0.05987190827727318,
-0.11121044307947159,
0.09883908182382584,
-0.03714452683925629,
0.18949896097183228,
0.3318972587585449,
0.11847714334726334,
-0.24845154583454132
],
[
0.371123343706131,
0.2063368558883667,
0.3805433511734009,
-0.5009337663650513,
0.129607692360878,
0.1506439447402954,
0.5653514862060547,
-0.37773358821868896,
-0.08297053724527359,
-0.3832688629627228,
0.30121174454689026,
-0.059700917452573776,
0.08907622843980789,
0.11086258292198181,
-0.19111648201942444
],
[
-0.15240901708602905,
0.07657630741596222,
0.0011312044225633144,
-0.23733586072921753,
0.2698361873626709,
0.07116317749023438,
0.21674710512161255,
-0.6951579451560974,
-0.3042435348033905,
0.20563803613185883,
-0.2922617495059967,
-0.25907737016677856,
0.021585553884506226,
-0.16183961927890778,
-0.5643469095230103
],
[
0.1865845024585724,
-0.1970795840024948,
0.2911040186882019,
-0.11214333027601242,
0.2651805281639099,
-0.21113859117031097,
-0.0573832169175148,
0.3723134696483612,
0.08252034336328506,
-0.387037992477417,
-0.5451337695121765,
-0.14733290672302246,
-0.16811956465244293,
-0.10859085619449615,
0.2981266379356384
],
[
-0.13301323354244232,
0.1941656619310379,
-0.08048874884843826,
0.0028258326929062605,
0.26474693417549133,
-0.2700863182544708,
0.16385707259178162,
0.05855187401175499,
0.23214198648929596,
-0.18021482229232788,
-0.22293873131275177,
-0.027526002377271652,
-0.028176482766866684,
0.11903589963912964,
-0.002579913940280676
],
[
0.3391837775707245,
-0.2745877206325531,
-0.07065751403570175,
-0.06504078209400177,
-0.13832345604896545,
-0.08049823343753815,
0.14161577820777893,
-0.23011572659015656,
-0.21264490485191345,
-0.0559820719063282,
-0.5045429468154907,
-0.2468603104352951,
0.1422368884086609,
0.27310627698898315,
-0.36274927854537964
]
]
},
"eos_token_id": 2,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"id2label": {
"0": "B-corporation",
"1": "B-creative_work",
"2": "B-event",
"3": "B-group",
"4": "B-location",
"5": "B-person",
"6": "B-product",
"7": "I-corporation",
"8": "I-creative_work",
"9": "I-event",
"10": "I-group",
"11": "I-location",
"12": "I-person",
"13": "I-product",
"14": "O"
},
"initializer_range": 0.02,
"intermediate_size": 4096,
"label2id": {
"B-corporation": 0,
"B-creative_work": 1,
"B-event": 2,
"B-group": 3,
"B-location": 4,
"B-person": 5,
"B-product": 6,
"I-corporation": 7,
"I-creative_work": 8,
"I-event": 9,
"I-group": 10,
"I-location": 11,
"I-person": 12,
"I-product": 13,
"O": 14
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "roberta",
"num_attention_heads": 16,
"num_hidden_layers": 24,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.11.3",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 50265
}