asahi417 commited on
Commit
a1c1fb1
1 Parent(s): c5ed068

model update

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "cner_output/model/baseline/t_roberta_base_2019/best_model",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
1
  {
2
+ "_name_or_path": "cardiffnlp/twitter-roberta-base-2019-90m",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
eval/metric.json ADDED
@@ -0,0 +1 @@
 
1
+ {"2020.dev": {"micro/f1": 0.6341463414634146, "micro/f1_ci": {}, "micro/recall": 0.6044932079414838, "micro/precision": 0.6668587896253603, "macro/f1": 0.5725336564500102, "macro/f1_ci": {}, "macro/recall": 0.547299581336728, "macro/precision": 0.6045770051810889, "per_entity_metric": {"corporation": {"f1": 0.4741144414168937, "f1_ci": {}, "precision": 0.5304878048780488, "recall": 0.42857142857142855}, "creative_work": {"f1": 0.4736842105263158, "f1_ci": {}, "precision": 0.5232558139534884, "recall": 0.4326923076923077}, "event": {"f1": 0.3905579399141631, "f1_ci": {}, "precision": 0.43333333333333335, "recall": 0.35546875}, "group": {"f1": 0.5396825396825395, "f1_ci": {}, "precision": 0.5560747663551402, "recall": 0.5242290748898678}, "location": {"f1": 0.6077922077922078, "f1_ci": {}, "precision": 0.5735294117647058, "recall": 0.6464088397790055}, "person": {"f1": 0.8705281090289607, "f1_ci": {}, "precision": 0.8871527777777778, "recall": 0.8545150501672241}, "product": {"f1": 0.6513761467889908, "f1_ci": {}, "precision": 0.7282051282051282, "recall": 0.5892116182572614}}}, "2021.test": {"micro/f1": 0.6427956619039422, "micro/f1_ci": {"90": [0.6341597289758578, 0.6524372908527413], "95": [0.6326184232151462, 0.6539625614316887]}, "micro/recall": 0.6476641998149861, "micro/precision": 0.63799977218362, "macro/f1": 0.5931418933396797, "macro/f1_ci": {"90": [0.5829882769964541, 0.6029253957111215], "95": [0.581816657712503, 0.6041961940759853]}, "macro/recall": 0.6003736375632336, "macro/precision": 0.5885274267802955, "per_entity_metric": {"corporation": {"f1": 0.48535564853556484, "f1_ci": {"90": [0.46085441774110664, 0.5119725803305396], "95": [0.45520505216640084, 0.5171770970744771]}, "precision": 0.45849802371541504, "recall": 0.5155555555555555}, "creative_work": {"f1": 0.46893787575150303, "f1_ci": {"90": [0.4397129644242311, 0.49841951445170957], "95": [0.43447079440029895, 0.5046330558125194]}, "precision": 0.45822454308093996, "recall": 0.4801641586867305}, "event": {"f1": 0.4369260512324794, "f1_ci": {"90": [0.41453756735790565, 0.46084953195682354], "95": [0.4096712538810093, 0.46454062974413185]}, "precision": 0.465979381443299, "recall": 0.41128298453139217}, "group": {"f1": 0.5908798972382787, "f1_ci": {"90": [0.5695393272947947, 0.6123359251552747], "95": [0.5664627459133553, 0.6174932887262934]}, "precision": 0.5764411027568922, "recall": 0.6060606060606061}, "location": {"f1": 0.6701366297983083, "f1_ci": {"90": [0.6429253017545784, 0.6962477707064031], "95": [0.6379536594421498, 0.7007031745845312]}, "precision": 0.6272838002436053, "recall": 0.7192737430167597}, "person": {"f1": 0.8399633363886344, "f1_ci": {"90": [0.8290721665958414, 0.8508194267661195], "95": [0.8262450150763545, 0.8526792684560923]}, "precision": 0.8352169157856362, "recall": 0.8447640117994101}, "product": {"f1": 0.6597938144329897, "f1_ci": {"90": [0.6385839770940236, 0.6815940704640066], "95": [0.6339610512677912, 0.6852420327037805]}, "precision": 0.6980482204362801, "recall": 0.6255144032921811}}}, "2020.test": {"micro/f1": 0.6541700624830209, "micro/f1_ci": {"90": [0.635059380660795, 0.6729375721264049], "95": [0.6307634421882851, 0.6758834108900299]}, "micro/recall": 0.6248053969901401, "micro/precision": 0.6864310148232611, "macro/f1": 0.6111250287364248, "macro/f1_ci": {"90": [0.5905920126042988, 0.6311945655032225], "95": [0.5847070260170446, 0.6346146083812573]}, "macro/recall": 0.5881138886316531, "macro/precision": 0.6418894762960121, "per_entity_metric": {"corporation": {"f1": 0.5628140703517588, "f1_ci": {"90": [0.5040526283125426, 0.6157834502662088], "95": [0.49274048209056087, 0.6271466174858398]}, "precision": 0.5410628019323671, "recall": 0.5863874345549738}, "creative_work": {"f1": 0.5368731563421829, "f1_ci": {"90": [0.479085175656292, 0.5886075949367089], "95": [0.4668668548987869, 0.6]}, "precision": 0.56875, "recall": 0.5083798882681564}, "event": {"f1": 0.43388429752066116, "f1_ci": {"90": [0.383236701245796, 0.48343508343508346], "95": [0.37419282994467995, 0.4941783112677321]}, "precision": 0.4794520547945205, "recall": 0.39622641509433965}, "group": {"f1": 0.5622775800711743, "f1_ci": {"90": [0.5072004976892996, 0.6153846153846153], "95": [0.4948220528910802, 0.6231760502760004]}, "precision": 0.6294820717131474, "recall": 0.5080385852090032}, "location": {"f1": 0.64756446991404, "f1_ci": {"90": [0.5840220385674931, 0.7058823529411764], "95": [0.5744330305431932, 0.7183161845323438]}, "precision": 0.6141304347826086, "recall": 0.6848484848484848}, "person": {"f1": 0.8472821397756686, "f1_ci": {"90": [0.8225270104159926, 0.8691725550928349], "95": [0.818687006214862, 0.8734017382906808]}, "precision": 0.872113676731794, "recall": 0.8238255033557047}, "product": {"f1": 0.6871794871794871, "f1_ci": {"90": [0.6374695863746959, 0.7352148543737329], "95": [0.6271313488554868, 0.7454203021919558]}, "precision": 0.788235294117647, "recall": 0.6090909090909091}}}, "2021.test (span detection)": {"micro/f1": 0.778950992769425, "micro/f1_ci": {}, "micro/recall": 0.7848964958945299, "micro/precision": 0.773094885522269, "macro/f1": 0.778950992769425, "macro/f1_ci": {}, "macro/recall": 0.7848964958945299, "macro/precision": 0.773094885522269}, "2020.test (span detection)": {"micro/f1": 0.7655528389024722, "micro/f1_ci": {}, "micro/recall": 0.7311883757135443, "micro/precision": 0.8033067274800456, "macro/f1": 0.7655528389024722, "macro/f1_ci": {}, "macro/recall": 0.7311883757135443, "macro/precision": 0.8033067274800456}}
eval/prediction.2020.dev.json ADDED
The diff for this file is too large to render. See raw diff
eval/prediction.2020.test.json ADDED
The diff for this file is too large to render. See raw diff
eval/prediction.2021.test.json ADDED
The diff for this file is too large to render. See raw diff
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b88fab18160032759f66df29e929bd9e3a38da4f8e6469e046aea063be4fb985
3
- size 496349169
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db3c7baa0f665bc46cc32755c24461929a1cb593a8dce248130b7129ef9c9053
3
+ size 496351921
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "cner_output/model/baseline/t_roberta_base_2019/best_model", "tokenizer_class": "RobertaTokenizer"}
1
+ {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "cardiffnlp/twitter-roberta-base-2019-90m", "tokenizer_class": "RobertaTokenizer"}
trainer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"data_split": "2020.train", "model": "cardiffnlp/twitter-roberta-base-2019-90m", "crf": true, "max_length": 128, "epoch": 30, "batch_size": 32, "lr": 1e-05, "random_seed": 0, "gradient_accumulation_steps": 1, "weight_decay": 1e-07, "lr_warmup_step_ratio": 0.15, "max_grad_norm": 1}