asahi417 commited on
Commit
022e0fc
1 Parent(s): 99a2c19

model update

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "cner_output/model/baseline_2021/t_roberta_base_dec2020_concat/best_model",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
 
1
  {
2
+ "_name_or_path": "cardiffnlp/twitter-roberta-base-dec2020",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
eval/metric.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"2021.dev": {"micro/f1": 0.6425339366515838, "micro/f1_ci": {}, "micro/recall": 0.639, "micro/precision": 0.6461071789686552, "macro/f1": 0.5989569296686776, "macro/f1_ci": {}, "macro/recall": 0.5963672516044548, "macro/precision": 0.604043351645594, "per_entity_metric": {"corporation": {"f1": 0.6082474226804123, "f1_ci": {}, "precision": 0.6413043478260869, "recall": 0.5784313725490197}, "creative_work": {"f1": 0.4171779141104294, "f1_ci": {}, "precision": 0.38202247191011235, "recall": 0.4594594594594595}, "event": {"f1": 0.4251968503937008, "f1_ci": {}, "precision": 0.43902439024390244, "recall": 0.4122137404580153}, "group": {"f1": 0.6236080178173719, "f1_ci": {}, "precision": 0.6306306306306306, "recall": 0.6167400881057269}, "location": {"f1": 0.6486486486486486, "f1_ci": {}, "precision": 0.631578947368421, "recall": 0.6666666666666666}, "person": {"f1": 0.8286713286713286, "f1_ci": {}, "precision": 0.8200692041522492, "recall": 0.8374558303886925}, "product": {"f1": 0.6411483253588517, "f1_ci": {}, "precision": 0.6836734693877551, "recall": 0.6036036036036037}}}, "2021.test": {"micro/f1": 0.6526255707762557, "micro/f1_ci": {"90": [0.6437691693006731, 0.6623739817960804], "95": [0.6423289413183693, 0.6642699129749126]}, "micro/recall": 0.6610777058279371, "micro/precision": 0.6443868349864743, "macro/f1": 0.6069741859166096, "macro/f1_ci": {"90": [0.5969405931574795, 0.6169180905698453], "95": [0.5951882145650105, 0.6186261415221893]}, "macro/recall": 0.6172166732079049, "macro/precision": 0.5990170780704488, "per_entity_metric": {"corporation": {"f1": 0.5153234960272418, "f1_ci": {"90": [0.48979124830710313, 0.5416453967401038], "95": [0.4846763606289239, 0.5484286848323734]}, "precision": 0.5266821345707656, "recall": 0.5044444444444445}, "creative_work": {"f1": 0.47595252966895685, "f1_ci": {"90": [0.4450136547127212, 0.5060689375403542], "95": [0.4406451100516083, 0.5104259293002914]}, "precision": 0.4379310344827586, "recall": 0.521203830369357}, "event": {"f1": 0.46693657219973006, "f1_ci": {"90": [0.4433278746436782, 0.4904300531124057], "95": [0.43908609101602464, 0.4948272743714957]}, "precision": 0.46174377224199287, "recall": 0.47224749772520475}, "group": {"f1": 0.60928, "f1_ci": {"90": [0.58905038898122, 0.6306565075497464], "95": [0.5857818627333528, 0.6337814610823933]}, "precision": 0.5924082140634723, "recall": 0.6271409749670619}, "location": {"f1": 0.6688567674113008, "f1_ci": {"90": [0.6411061399494976, 0.6963439016627287], "95": [0.6362671616964405, 0.7015416924027178]}, "precision": 0.6315136476426799, "recall": 0.7108938547486033}, "person": {"f1": 0.8386501936197677, "f1_ci": {"90": [0.8280056803440087, 0.8493018351616476], "95": [0.8263492236736317, 0.8510733824274442]}, "precision": 0.8388048690520103, "recall": 0.838495575221239}, "product": {"f1": 0.6738197424892703, "f1_ci": {"90": [0.65218196906801, 0.6956179563127771], "95": [0.6475943730141744, 0.6984190719050953]}, "precision": 0.7040358744394619, "recall": 0.6460905349794238}}}, "2020.test": {"micro/f1": 0.6544474393530997, "micro/f1_ci": {"90": [0.6328497699075983, 0.6728044842641441], "95": [0.6291867061094695, 0.6760719350870426]}, "micro/recall": 0.6299948105864037, "micro/precision": 0.680874929893438, "macro/f1": 0.6138692748869267, "macro/f1_ci": {"90": [0.590175490065613, 0.6330937411191636], "95": [0.5865029961531653, 0.6369957089246945]}, "macro/recall": 0.5920548821120473, "macro/precision": 0.639860659918586, "per_entity_metric": {"corporation": {"f1": 0.5675675675675675, "f1_ci": {"90": [0.504295559145109, 0.6253295812135242], "95": [0.49554695181907565, 0.6339265479676438]}, "precision": 0.5865921787709497, "recall": 0.5497382198952879}, "creative_work": {"f1": 0.550561797752809, "f1_ci": {"90": [0.4934960481877372, 0.6020524118738404], "95": [0.4848118648722878, 0.6132814376651454]}, "precision": 0.5536723163841808, "recall": 0.547486033519553}, "event": {"f1": 0.462406015037594, "f1_ci": {"90": [0.4108194750550992, 0.5124638081746882], "95": [0.401291157260602, 0.5232811578406383]}, "precision": 0.4606741573033708, "recall": 0.4641509433962264}, "group": {"f1": 0.5551601423487544, "f1_ci": {"90": [0.5016589792385684, 0.6076091861210725], "95": [0.4912804447531413, 0.617329390460492]}, "precision": 0.6215139442231076, "recall": 0.5016077170418006}, "location": {"f1": 0.6426426426426426, "f1_ci": {"90": [0.5713889988128216, 0.7035890939799735], "95": [0.5591985171261487, 0.7202876984126986]}, "precision": 0.6369047619047619, "recall": 0.6484848484848484}, "person": {"f1": 0.8487467588591183, "f1_ci": {"90": [0.8219659833630422, 0.8716422802714063], "95": [0.8163611658762722, 0.8762541806020068]}, "precision": 0.875222816399287, "recall": 0.8238255033557047}, "product": {"f1": 0.67, "f1_ci": {"90": [0.6153665583243049, 0.7173996017258546], "95": [0.6060281385281385, 0.7304367192795584]}, "precision": 0.7444444444444445, "recall": 0.6090909090909091}}}, "2021.test (span detection)": {"micro/f1": 0.7868028997088875, "micro/f1_ci": {}, "micro/recall": 0.7970394356424193, "micro/precision": 0.7768259693417493, "macro/f1": 0.7868028997088875, "macro/f1_ci": {}, "macro/recall": 0.7970394356424193, "macro/precision": 0.7768259693417493}, "2020.test (span detection)": {"micro/f1": 0.7586950660555406, "micro/f1_ci": {}, "micro/recall": 0.7301504929942917, "micro/precision": 0.7895622895622896, "macro/f1": 0.7586950660555406, "macro/f1_ci": {}, "macro/recall": 0.7301504929942917, "macro/precision": 0.7895622895622896}}
eval/prediction.2020.test.json ADDED
The diff for this file is too large to render. See raw diff
 
eval/prediction.2021.dev.json ADDED
The diff for this file is too large to render. See raw diff
 
eval/prediction.2021.test.json ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9b7a9c945d8858dc2ec84ae34d7979df2502725a784c4f73fc3a13a1218f3c52
3
- size 496349169
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0bc46c3da337c84bff57e34eea41f3a04f4f40c135c657a45873af2b28b8cd65
3
+ size 496351921
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "cner_output/model/baseline_2021/t_roberta_base_dec2020_concat/best_model", "tokenizer_class": "RobertaTokenizer"}
 
1
+ {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "cardiffnlp/twitter-roberta-base-dec2020", "tokenizer_class": "RobertaTokenizer"}
trainer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"data_split": "2020_2021.train", "model": "cardiffnlp/twitter-roberta-base-dec2020", "crf": true, "max_length": 128, "epoch": 30, "batch_size": 32, "lr": 1e-05, "random_seed": 0, "gradient_accumulation_steps": 1, "weight_decay": 1e-07, "lr_warmup_step_ratio": 0.3, "max_grad_norm": 1}