asahi417 commited on
Commit
d199959
1 Parent(s): 0c38a91

model update

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "cner_output/model/random_split/roberta_base/best_model",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
 
1
  {
2
+ "_name_or_path": "cner_output/model/random_split/roberta_base/model_mwzvua/epoch_10",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
eval/metric.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"random.dev": {"micro/f1": 0.6366382861851139, "micro/f1_ci": {}, "micro/recall": 0.6187933796049119, "micro/precision": 0.6555429864253394, "macro/f1": 0.5877085684942696, "macro/f1_ci": {}, "macro/recall": 0.5721804509246399, "macro/precision": 0.6073972739006512, "per_entity_metric": {"corporation": {"f1": 0.57356608478803, "f1_ci": {}, "precision": 0.5528846153846154, "recall": 0.5958549222797928}, "creative_work": {"f1": 0.46258503401360546, "f1_ci": {}, "precision": 0.5151515151515151, "recall": 0.41975308641975306}, "event": {"f1": 0.3905579399141631, "f1_ci": {}, "precision": 0.4117647058823529, "recall": 0.37142857142857144}, "group": {"f1": 0.61236802413273, "f1_ci": {}, "precision": 0.6403785488958991, "recall": 0.5867052023121387}, "location": {"f1": 0.6246246246246248, "f1_ci": {}, "precision": 0.611764705882353, "recall": 0.6380368098159509}, "person": {"f1": 0.8488262910798122, "f1_ci": {}, "precision": 0.849624060150376, "recall": 0.8480300187617261}, "product": {"f1": 0.6014319809069213, "f1_ci": {}, "precision": 0.6702127659574468, "recall": 0.5454545454545454}}}, "2021.test": {"micro/f1": 0.6404223573969929, "micro/f1_ci": {"90": [0.6318442200239734, 0.6492710692925945], "95": [0.6304569517689761, 0.6507742568199575]}, "micro/recall": 0.6452358926919519, "micro/precision": 0.6356801093643198, "macro/f1": 0.5923474605228576, "macro/f1_ci": {"90": [0.5834248476754859, 0.6017201600387958], "95": [0.5814450008984621, 0.6034547429897775]}, "macro/recall": 0.6009576961702408, "macro/precision": 0.5882611136070073, "per_entity_metric": {"corporation": {"f1": 0.5073313782991202, "f1_ci": {"90": [0.48285841008729546, 0.5319891471347782], "95": [0.4769294748422465, 0.5359498674646497]}, "precision": 0.45287958115183247, "recall": 0.5766666666666667}, "creative_work": {"f1": 0.4235127478753541, "f1_ci": {"90": [0.3920038797569273, 0.4535109162157119], "95": [0.3865055698995115, 0.45722587346643756]}, "precision": 0.4390602055800294, "recall": 0.40902872777017785}, "event": {"f1": 0.4598470363288719, "f1_ci": {"90": [0.4361915405444048, 0.4815770919408292], "95": [0.43228713377486544, 0.48659304158476635]}, "precision": 0.48439073514602216, "recall": 0.43767060964513194}, "group": {"f1": 0.5972820682797481, "f1_ci": {"90": [0.5769763743967469, 0.6182142646082546], "95": [0.5723013739350298, 0.6231996920200187]}, "precision": 0.6010673782521682, "recall": 0.5935441370223979}, "location": {"f1": 0.6794871794871795, "f1_ci": {"90": [0.6532722132111245, 0.7069142033886049], "95": [0.6483586531023602, 0.711041212742627]}, "precision": 0.6279620853080569, "recall": 0.7402234636871509}, "person": {"f1": 0.8231629100238226, "f1_ci": {"90": [0.8123723616450855, 0.8337047799239938], "95": [0.8103268247356568, 0.8359212540153715]}, "precision": 0.8182149362477231, "recall": 0.8281710914454278}, "product": {"f1": 0.6558089033659067, "f1_ci": {"90": [0.6343529502269406, 0.6783496906386242], "95": [0.6296462048936172, 0.6817721129053114]}, "precision": 0.6942528735632184, "recall": 0.6213991769547325}}}, "2020.test": {"micro/f1": 0.6413755458515283, "micro/f1_ci": {"90": [0.6222555476147691, 0.6597865946700048], "95": [0.6193131081459617, 0.6639230650053427]}, "micro/recall": 0.6097560975609756, "micro/precision": 0.6764536557282671, "macro/f1": 0.5978359372811374, "macro/f1_ci": {"90": [0.5764363670094901, 0.618103079896346], "95": [0.5718154248562978, 0.6229228623936891]}, "macro/recall": 0.5696483348245823, "macro/precision": 0.6331635922016912, "per_entity_metric": {"corporation": {"f1": 0.5757575757575757, "f1_ci": {"90": [0.5194705037892698, 0.6281803562144057], "95": [0.5085437100213219, 0.6400349854227406]}, "precision": 0.5560975609756098, "recall": 0.5968586387434555}, "creative_work": {"f1": 0.4761904761904763, "f1_ci": {"90": [0.4155083554583789, 0.5296136875104415], "95": [0.404413869605668, 0.5397183276493623]}, "precision": 0.5514705882352942, "recall": 0.41899441340782123}, "event": {"f1": 0.42190669371196754, "f1_ci": {"90": [0.3673381231542293, 0.4758234343588129], "95": [0.3601694915254237, 0.48509915610074]}, "precision": 0.45614035087719296, "recall": 0.39245283018867927}, "group": {"f1": 0.5648312611012433, "f1_ci": {"90": [0.5137745892154593, 0.6134932533733134], "95": [0.5036973180076628, 0.6260172918988073]}, "precision": 0.6309523809523809, "recall": 0.5112540192926045}, "location": {"f1": 0.6707317073170732, "f1_ci": {"90": [0.6065793065793067, 0.7238615901724621], "95": [0.5954137077134027, 0.7374486720829003]}, "precision": 0.6748466257668712, "recall": 0.6666666666666666}, "person": {"f1": 0.8270547945205479, "f1_ci": {"90": [0.7992982351897211, 0.8511725937669974], "95": [0.7930445979361251, 0.8559374070770146]}, "precision": 0.8444055944055944, "recall": 0.8104026845637584}, "product": {"f1": 0.6483790523690773, "f1_ci": {"90": [0.5959166297912384, 0.7033119235335302], "95": [0.5850000000000001, 0.7156025968817367]}, "precision": 0.7182320441988951, "recall": 0.5909090909090909}}}, "2021.test (span detection)": {"micro/f1": 0.7803730272596844, "micro/f1_ci": {}, "micro/recall": 0.7862842604371458, "micro/precision": 0.7745500113921167, "macro/f1": 0.7803730272596844, "macro/f1_ci": {}, "macro/recall": 0.7862842604371458, "macro/precision": 0.7745500113921167}, "2020.test (span detection)": {"micro/f1": 0.7425607425607427, "micro/f1_ci": {}, "micro/recall": 0.7057602490918526, "micro/precision": 0.783410138248848, "macro/f1": 0.7425607425607427, "macro/f1_ci": {}, "macro/recall": 0.7057602490918526, "macro/precision": 0.783410138248848}}
eval/prediction.2020.test.json ADDED
The diff for this file is too large to render. See raw diff
 
eval/prediction.2021.test.json ADDED
The diff for this file is too large to render. See raw diff
 
eval/prediction.random.dev.json ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b3c20c47641a6f0aca24fc7fd8c7dc7c4c3b4d41a19ce17e67e433da0277f0d4
3
- size 496349169
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:77f0c1044a21d90e54db6c260d5fec76005b05722336236c8872c11200731106
3
+ size 496351921
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "name_or_path": "cner_output/model/random_split/roberta_base/best_model", "special_tokens_map_file": "cner_output/model/random_split/roberta_base/model_mwzvua/epoch_10/special_tokens_map.json", "tokenizer_class": "RobertaTokenizer"}
 
1
+ {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "name_or_path": "cner_output/model/random_split/roberta_base/model_mwzvua/epoch_10", "special_tokens_map_file": "cner_output/model/random_split/roberta_base/model_mwzvua/epoch_10/special_tokens_map.json", "tokenizer_class": "RobertaTokenizer"}
trainer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"data_split": "random.train", "model": "roberta-base", "crf": true, "max_length": 128, "epoch": 30, "batch_size": 32, "lr": 1e-05, "random_seed": 0, "gradient_accumulation_steps": 1, "weight_decay": 1e-07, "lr_warmup_step_ratio": 0.15, "max_grad_norm": 1}