asahi417 commited on
Commit
6f2bbee
1 Parent(s): b1f1f58

model update

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "cner_output/model/baseline_2021/t_roberta_base_2019_concat/best_model",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
 
1
  {
2
+ "_name_or_path": "cardiffnlp/twitter-roberta-base-2019-90m",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
eval/metric.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"2021.dev": {"micro/f1": 0.6393525543753161, "micro/f1_ci": {}, "micro/recall": 0.632, "micro/precision": 0.646878198567042, "macro/f1": 0.5955739662265487, "macro/f1_ci": {}, "macro/recall": 0.5933953061891987, "macro/precision": 0.6021591570769812, "per_entity_metric": {"corporation": {"f1": 0.6132075471698113, "f1_ci": {}, "precision": 0.5909090909090909, "recall": 0.6372549019607843}, "creative_work": {"f1": 0.4230769230769231, "f1_ci": {}, "precision": 0.4024390243902439, "recall": 0.44594594594594594}, "event": {"f1": 0.41004184100418406, "f1_ci": {}, "precision": 0.4537037037037037, "recall": 0.37404580152671757}, "group": {"f1": 0.6143497757847532, "f1_ci": {}, "precision": 0.6255707762557078, "recall": 0.6035242290748899}, "location": {"f1": 0.6405228758169934, "f1_ci": {}, "precision": 0.6049382716049383, "recall": 0.6805555555555556}, "person": {"f1": 0.8210526315789473, "f1_ci": {}, "precision": 0.8153310104529616, "recall": 0.8268551236749117}, "product": {"f1": 0.6467661691542289, "f1_ci": {}, "precision": 0.7222222222222222, "recall": 0.5855855855855856}}}, "2021.test": {"micro/f1": 0.6567966159826227, "micro/f1_ci": {"90": [0.648368394653773, 0.6664006471768674], "95": [0.646545111092117, 0.6680503208004025]}, "micro/recall": 0.6643154486586494, "micro/precision": 0.6494460773230839, "macro/f1": 0.6099755599654287, "macro/f1_ci": {"90": [0.5996849808261219, 0.6196609939303647], "95": [0.5981068219009371, 0.6213388278430806]}, "macro/recall": 0.6189811354202427, "macro/precision": 0.602661693428744, "per_entity_metric": {"corporation": {"f1": 0.5087071240105541, "f1_ci": {"90": [0.48418645598643384, 0.5346670166688542], "95": [0.4789882509840839, 0.5378380150591427]}, "precision": 0.4844221105527638, "recall": 0.5355555555555556}, "creative_work": {"f1": 0.4729907773386035, "f1_ci": {"90": [0.44239598560481413, 0.5042078308369256], "95": [0.43621394580304024, 0.510041651317275]}, "precision": 0.45616264294790343, "recall": 0.4911080711354309}, "event": {"f1": 0.48405253283302063, "f1_ci": {"90": [0.45995907383110685, 0.5078510542844324], "95": [0.4572237113916049, 0.5114127149849642]}, "precision": 0.4995159728944821, "recall": 0.4695177434030937}, "group": {"f1": 0.6147885050048434, "f1_ci": {"90": [0.5942964585813152, 0.6367114602499198], "95": [0.5886815262199213, 0.6417294334888041]}, "precision": 0.6029132362254591, "recall": 0.6271409749670619}, "location": {"f1": 0.679419525065963, "f1_ci": {"90": [0.6525572232398171, 0.7066645568922948], "95": [0.6461263147220855, 0.7104498210107556]}, "precision": 0.64375, "recall": 0.7192737430167597}, "person": {"f1": 0.83927591881514, "f1_ci": {"90": [0.8290404596313247, 0.8497318623631218], "95": [0.8264074120586163, 0.8510279929920115]}, "precision": 0.8324265505984766, "recall": 0.8462389380530974}, "product": {"f1": 0.6705945366898768, "f1_ci": {"90": [0.6480566067009147, 0.6920868989728419], "95": [0.6439099688540568, 0.6967782522764171]}, "precision": 0.6994413407821229, "recall": 0.6440329218106996}}}, "2020.test": {"micro/f1": 0.6545553145336225, "micro/f1_ci": {"90": [0.6337694636233485, 0.6740664541097675], "95": [0.6286942655715614, 0.6773336336898144]}, "micro/recall": 0.6263622210690192, "micro/precision": 0.6854060193072118, "macro/f1": 0.6121643911579755, "macro/f1_ci": {"90": [0.5886445730361866, 0.6317442192632546], "95": [0.5854534835525667, 0.6376447796836711]}, "macro/recall": 0.5898647290448411, "macro/precision": 0.6403532739362632, "per_entity_metric": {"corporation": {"f1": 0.5685279187817259, "f1_ci": {"90": [0.5082245989304813, 0.6205332894411982], "95": [0.49855067415904264, 0.6307356501580901]}, "precision": 0.5517241379310345, "recall": 0.5863874345549738}, "creative_work": {"f1": 0.5214899713467048, "f1_ci": {"90": [0.4615154306771073, 0.5762811565304089], "95": [0.4523527656187823, 0.5820755933952529]}, "precision": 0.5352941176470588, "recall": 0.5083798882681564}, "event": {"f1": 0.46680080482897385, "f1_ci": {"90": [0.4139061184152339, 0.5196369233051477], "95": [0.4056021681918053, 0.5302222222222222]}, "precision": 0.5, "recall": 0.4377358490566038}, "group": {"f1": 0.5668449197860962, "f1_ci": {"90": [0.5134957325746798, 0.6186378862301534], "95": [0.5024820823918631, 0.6271303331385156]}, "precision": 0.636, "recall": 0.5112540192926045}, "location": {"f1": 0.6510263929618768, "f1_ci": {"90": [0.5882352941176471, 0.711127694859038], "95": [0.5740167861420475, 0.7222263681592039]}, "precision": 0.6306818181818182, "recall": 0.6727272727272727}, "person": {"f1": 0.8454861111111112, "f1_ci": {"90": [0.8198943969474537, 0.8677685950413223], "95": [0.813751217094968, 0.8714086615122105]}, "precision": 0.8758992805755396, "recall": 0.8171140939597316}, "product": {"f1": 0.6649746192893401, "f1_ci": {"90": [0.6066754289322277, 0.7175162806745158], "95": [0.5919027721157766, 0.7241796440489434]}, "precision": 0.7528735632183908, "recall": 0.5954545454545455}}}, "2021.test (span detection)": {"micro/f1": 0.7888869833647745, "micro/f1_ci": {}, "micro/recall": 0.7979646120041632, "micro/precision": 0.7800135654533122, "macro/f1": 0.7888869833647745, "macro/f1_ci": {}, "macro/recall": 0.7979646120041632, "macro/precision": 0.7800135654533122}, "2020.test (span detection)": {"micro/f1": 0.7643070246813126, "micro/f1_ci": {}, "micro/recall": 0.7311883757135443, "micro/precision": 0.8005681818181818, "macro/f1": 0.7643070246813126, "macro/f1_ci": {}, "macro/recall": 0.7311883757135443, "macro/precision": 0.8005681818181818}}
eval/prediction.2020.test.json ADDED
The diff for this file is too large to render. See raw diff
 
eval/prediction.2021.dev.json ADDED
The diff for this file is too large to render. See raw diff
 
eval/prediction.2021.test.json ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fbf02c6889b1e972d87f9fa787a22474d8c085073ef366ec7bb90c5297c84a6a
3
- size 496349169
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:369452c97bcc09f7cea3b17fe4265bac49ec11d141f65a37a448df63c4eef871
3
+ size 496351921
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "cner_output/model/baseline_2021/t_roberta_base_2019_concat/best_model", "tokenizer_class": "RobertaTokenizer"}
 
1
+ {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "cardiffnlp/twitter-roberta-base-2019-90m", "tokenizer_class": "RobertaTokenizer"}
trainer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"data_split": "2020_2021.train", "model": "cardiffnlp/twitter-roberta-base-2019-90m", "crf": true, "max_length": 128, "epoch": 30, "batch_size": 32, "lr": 1e-05, "random_seed": 0, "gradient_accumulation_steps": 1, "weight_decay": 1e-07, "lr_warmup_step_ratio": 0.15, "max_grad_norm": 1}