asahi417 commited on
Commit
5b27c45
1 Parent(s): 94905b3

model update

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "cner_output/model/baseline_2021/roberta_large_concat/best_model",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
 
1
  {
2
+ "_name_or_path": "roberta-large",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
eval/metric.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"2020.dev": {"micro/f1": 0.6569543705332601, "micro/f1_ci": {"90": [0.6347708061707447, 0.675787762538431], "95": [0.6315458913405132, 0.6789961088599261]}, "micro/recall": 0.6243469174503657, "micro/precision": 0.6931554524361949, "macro/f1": 0.6071703884347733, "macro/f1_ci": {"90": [0.5842888341668635, 0.6272437871418639], "95": [0.5798781678355553, 0.6309167018785864]}, "macro/recall": 0.5768996197534351, "macro/precision": 0.6505290877604671, "per_entity_metric": {"corporation": {"f1": 0.5219941348973607, "f1_ci": {"90": [0.4574780058651027, 0.5763688760806917], "95": [0.44520339124624125, 0.5885562475671468]}, "precision": 0.644927536231884, "recall": 0.43842364532019706}, "creative_work": {"f1": 0.5555555555555556, "f1_ci": {"90": [0.4925531612328286, 0.6081887366818874], "95": [0.477124183006536, 0.6200120481927712]}, "precision": 0.6578947368421053, "recall": 0.4807692307692308}, "event": {"f1": 0.3737166324435318, "f1_ci": {"90": [0.32148941298339, 0.4247155049786628], "95": [0.31484302510465717, 0.4322250767382891]}, "precision": 0.3939393939393939, "recall": 0.35546875}, "group": {"f1": 0.5807770961145193, "f1_ci": {"90": [0.5275139764908258, 0.6275909121462661], "95": [0.5189204545454545, 0.6361928193310445]}, "precision": 0.5419847328244275, "recall": 0.6255506607929515}, "location": {"f1": 0.6666666666666666, "f1_ci": {"90": [0.6095185331750428, 0.7147472409437143], "95": [0.5951683238877389, 0.7281801905492677]}, "precision": 0.6542553191489362, "recall": 0.6795580110497238}, "person": {"f1": 0.8665511265164645, "f1_ci": {"90": [0.8438121549252551, 0.8865095226489329], "95": [0.8386447010869564, 0.8910758099658961]}, "precision": 0.8992805755395683, "recall": 0.8361204013377926}, "product": {"f1": 0.6849315068493151, "f1_ci": {"90": [0.6361345844206319, 0.7276693316215375], "95": [0.6277119983854149, 0.7336193346190936]}, "precision": 0.7614213197969543, "recall": 0.6224066390041494}}}, "2021.test": {"micro/f1": 0.6574934067194129, "micro/f1_ci": {"90": [0.6488384906771661, 0.6668708832165559], "95": [0.6465245785916518, 0.6685455948975375]}, "micro/recall": 0.6630434782608695, "micro/precision": 0.6520354787355015, "macro/f1": 0.6118476204727268, "macro/f1_ci": {"90": [0.6017404898343343, 0.6217032067276532], "95": [0.5998330900671207, 0.6231511559289998]}, "macro/recall": 0.6175539552781218, "macro/precision": 0.6072477857570534, "per_entity_metric": {"corporation": {"f1": 0.541899441340782, "f1_ci": {"90": [0.5186795598247989, 0.5685856621094023], "95": [0.5126138225832753, 0.5743503513898618]}, "precision": 0.5449438202247191, "recall": 0.5388888888888889}, "creative_work": {"f1": 0.46601941747572817, "f1_ci": {"90": [0.4353626239768139, 0.4989775051124744], "95": [0.43055507794589637, 0.5059671468032207]}, "precision": 0.47257383966244726, "recall": 0.45964432284541723}, "event": {"f1": 0.49394221808014904, "f1_ci": {"90": [0.46986206833907485, 0.5162773107105282], "95": [0.4663047835964417, 0.5204910201076981]}, "precision": 0.5062082139446036, "recall": 0.4822565969062784}, "group": {"f1": 0.6076659395450296, "f1_ci": {"90": [0.5869813402758498, 0.6292822463184621], "95": [0.5833058401946332, 0.6339008885966244]}, "precision": 0.576581904198699, "recall": 0.642292490118577}, "location": {"f1": 0.6697674418604651, "f1_ci": {"90": [0.6431739079049713, 0.6956575951425783], "95": [0.638513775592407, 0.6994724542049008]}, "precision": 0.6387832699619772, "recall": 0.7039106145251397}, "person": {"f1": 0.8321060382916053, "f1_ci": {"90": [0.8215742088885891, 0.8431088930663221], "95": [0.8197897485813659, 0.844858913782981]}, "precision": 0.8308823529411765, "recall": 0.8333333333333334}, "product": {"f1": 0.6715328467153284, "f1_ci": {"90": [0.649823578485181, 0.6921889493709199], "95": [0.6449209966416612, 0.6959884403753066]}, "precision": 0.6807610993657506, "recall": 0.6625514403292181}}}, "2020.test": {"micro/f1": 0.6490030046435399, "micro/f1_ci": {"90": [0.6289418548289524, 0.668141458100033], "95": [0.6249252774003541, 0.6725029471927343]}, "micro/recall": 0.6165023352361183, "micro/precision": 0.6851211072664359, "macro/f1": 0.6137144801993001, "macro/f1_ci": {"90": [0.5921297090566222, 0.6343935788610027], "95": [0.5882584824882211, 0.638865227814623]}, "macro/recall": 0.5808559297761207, "macro/precision": 0.6514440514516731, "per_entity_metric": {"corporation": {"f1": 0.5722543352601156, "f1_ci": {"90": [0.5085396825396825, 0.628099173553719], "95": [0.4955725476566051, 0.6370768029046094]}, "precision": 0.6387096774193548, "recall": 0.518324607329843}, "creative_work": {"f1": 0.5176470588235295, "f1_ci": {"90": [0.4589757904887581, 0.5744010277473017], "95": [0.4495016858106177, 0.585654161001996]}, "precision": 0.546583850931677, "recall": 0.49162011173184356}, "event": {"f1": 0.4701348747591523, "f1_ci": {"90": [0.41884503039393695, 0.5178236397748592], "95": [0.41117694923741366, 0.5247983740763956]}, "precision": 0.48031496062992124, "recall": 0.46037735849056605}, "group": {"f1": 0.5561312607944734, "f1_ci": {"90": [0.5053600521342457, 0.605074580444502], "95": [0.4974327603118063, 0.6153903903903905]}, "precision": 0.6007462686567164, "recall": 0.5176848874598071}, "location": {"f1": 0.6729559748427673, "f1_ci": {"90": [0.6073081783715472, 0.7337578575170246], "95": [0.5973079234698988, 0.7469276094276096]}, "precision": 0.6993464052287581, "recall": 0.6484848484848484}, "person": {"f1": 0.817391304347826, "f1_ci": {"90": [0.7891867433043904, 0.8432835843761107], "95": [0.785072596776479, 0.8466064618376089]}, "precision": 0.8483754512635379, "recall": 0.7885906040268457}, "product": {"f1": 0.6894865525672372, "f1_ci": {"90": [0.6385614324393587, 0.7354514422256357], "95": [0.6277188528286759, 0.7440663925875929]}, "precision": 0.746031746031746, "recall": 0.6409090909090909}}}, "2021.test (span detection)": {"micro/f1": 0.7902069835445216, "micro/f1_ci": {}, "micro/recall": 0.7969237885972014, "micro/precision": 0.7836024562201501, "macro/f1": 0.7902069835445216, "macro/f1_ci": {}, "macro/recall": 0.7969237885972014, "macro/precision": 0.7836024562201501}, "2020.test (span detection)": {"micro/f1": 0.7557377049180328, "micro/f1_ci": {}, "micro/recall": 0.7176959003632589, "micro/precision": 0.7980380842469705, "macro/f1": 0.7557377049180328, "macro/f1_ci": {}, "macro/recall": 0.7176959003632589, "macro/precision": 0.7980380842469705}, "2020.dev (span detection)": {"micro/f1": 0.7680043980208907, "micro/f1_ci": {}, "micro/recall": 0.7298850574712644, "micro/precision": 0.8103248259860789, "macro/f1": 0.7680043980208907, "macro/f1_ci": {}, "macro/recall": 0.7298850574712644, "macro/precision": 0.8103248259860789}}
eval/prediction.2020.dev.json ADDED
The diff for this file is too large to render. See raw diff
 
eval/prediction.2020.test.json ADDED
The diff for this file is too large to render. See raw diff
 
eval/prediction.2021.test.json ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:950180fa4ac7b6a8614a89f71fe5365b59a8cf7a45c3d155be34bfd2c1a13bee
3
- size 1417461873
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:455434abde74afb620eac2b328f0ffb2e4421eb96d0195c994891f7a63e8da8c
3
+ size 1417467377
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "name_or_path": "cner_output/model/baseline_2021/roberta_large_concat/best_model", "special_tokens_map_file": "cner_output/model/baseline_2021/roberta_large_concat/best_model/special_tokens_map.json", "tokenizer_class": "RobertaTokenizer"}
 
1
+ {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "name_or_path": "roberta-large", "tokenizer_class": "RobertaTokenizer"}