asahi417 commited on
Commit
2d662e4
1 Parent(s): 2dde7bb

model update

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "roberta_large/best_model",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
 
1
  {
2
+ "_name_or_path": "roberta-large",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
eval/metric.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"2020.dev": {"micro/f1": 0.6474548882305414, "micro/f1_ci": {"90": [0.6251332062213224, 0.6670476285795741], "95": [0.6205911794997434, 0.67120696522025]}, "micro/recall": 0.6280041797283177, "micro/precision": 0.6681489716509171, "macro/f1": 0.5911775809856791, "macro/f1_ci": {"90": [0.5681715870342675, 0.6122873634693781], "95": [0.5640485681333156, 0.6169323276234078]}, "macro/recall": 0.5810153156344426, "macro/precision": 0.6063396515514192, "per_entity_metric": {"corporation": {"f1": 0.4831168831168831, "f1_ci": {"90": [0.4237219975462353, 0.535817575083426], "95": [0.41321427780116543, 0.5432772435897437]}, "precision": 0.510989010989011, "recall": 0.458128078817734}, "creative_work": {"f1": 0.5185185185185185, "f1_ci": {"90": [0.4568348318348318, 0.5721930646672915], "95": [0.4481858684738956, 0.5821889671361504]}, "precision": 0.5329949238578681, "recall": 0.5048076923076923}, "event": {"f1": 0.3955056179775281, "f1_ci": {"90": [0.3372904383491412, 0.45154347784144616], "95": [0.32790198679835547, 0.4629234936043549]}, "precision": 0.4656084656084656, "recall": 0.34375}, "group": {"f1": 0.5649202733485195, "f1_ci": {"90": [0.5100676577091672, 0.6154024216524216], "95": [0.4986442357498367, 0.6242171747519663]}, "precision": 0.5849056603773585, "recall": 0.5462555066079295}, "location": {"f1": 0.6565656565656566, "f1_ci": {"90": [0.5999727520435967, 0.7094007639969646], "95": [0.5914786967418546, 0.7182328994406718]}, "precision": 0.6046511627906976, "recall": 0.7182320441988951}, "person": {"f1": 0.8721934369602763, "f1_ci": {"90": [0.8487710408794229, 0.891500904159132], "95": [0.8433924577649624, 0.8951823117210236]}, "precision": 0.9017857142857143, "recall": 0.8444816053511706}, "product": {"f1": 0.6474226804123713, "f1_ci": {"90": [0.6024459556836606, 0.6881334194543706], "95": [0.5968071283726847, 0.6960387877928365]}, "precision": 0.6434426229508197, "recall": 0.6514522821576764}}}, "2021.test": {"micro/f1": 0.6462470548636822, "micro/f1_ci": {"90": [0.6372260846745648, 0.6561176673813233], "95": [0.6356931310787601, 0.65793895225239]}, "micro/recall": 0.666049953746531, "micro/precision": 0.6275877097406842, "macro/f1": 0.5928409160028406, "macro/f1_ci": {"90": [0.5834082661200564, 0.6027369680005952], "95": [0.5818688353807716, 0.605082698131854]}, "macro/recall": 0.6189640265396619, "macro/precision": 0.5724916013944144, "per_entity_metric": {"corporation": {"f1": 0.5072463768115941, "f1_ci": {"90": [0.4841311533759873, 0.5308367147024602], "95": [0.4780501760108873, 0.5353895110824435]}, "precision": 0.4748062015503876, "recall": 0.5444444444444444}, "creative_work": {"f1": 0.4475439660400243, "f1_ci": {"90": [0.417735935143208, 0.4789241308307595], "95": [0.4129394441748634, 0.483871457986077]}, "precision": 0.4019607843137255, "recall": 0.5047879616963065}, "event": {"f1": 0.4441147378832839, "f1_ci": {"90": [0.42026714581758057, 0.4666994267703445], "95": [0.4166656358238495, 0.4719287082649152]}, "precision": 0.48645720476706394, "recall": 0.40855323020928114}, "group": {"f1": 0.6164611796067978, "f1_ci": {"90": [0.5955383856233197, 0.6392795789974397], "95": [0.5925303118049978, 0.6432657618645466]}, "precision": 0.6237356709372893, "recall": 0.6093544137022397}, "location": {"f1": 0.6525369299935774, "f1_ci": {"90": [0.6236680208089783, 0.6789095769653518], "95": [0.6183311085024601, 0.6837810101906017]}, "precision": 0.6040428061831153, "recall": 0.7094972067039106}, "person": {"f1": 0.8447562511243031, "f1_ci": {"90": [0.8347581551573588, 0.8550080856877156], "95": [0.8330373704042398, 0.857357295533503]}, "precision": 0.8247277836318933, "recall": 0.8657817109144543}, "product": {"f1": 0.6372269705603039, "f1_ci": {"90": [0.6168343256834969, 0.65883557479466], "95": [0.6118048570481955, 0.6628747232460417]}, "precision": 0.591710758377425, "recall": 0.6903292181069959}}}, "2020.test": {"micro/f1": 0.6549676025917925, "micro/f1_ci": {"90": [0.6352459708873094, 0.6732379144840148], "95": [0.6307727388454223, 0.6765402395650086]}, "micro/recall": 0.6294758692267773, "micro/precision": 0.6826111423747889, "macro/f1": 0.6138147315351224, "macro/f1_ci": {"90": [0.59154314177607, 0.632968570392166], "95": [0.5880351346026891, 0.6375830372418823]}, "macro/recall": 0.5969575608532809, "macro/precision": 0.6357805538496937, "per_entity_metric": {"corporation": {"f1": 0.5618556701030927, "f1_ci": {"90": [0.5043428993718438, 0.6161616161616161], "95": [0.4929799612152553, 0.6258776567621869]}, "precision": 0.5532994923857868, "recall": 0.5706806282722513}, "creative_work": {"f1": 0.5054945054945056, "f1_ci": {"90": [0.4477227045289312, 0.5593776914979425], "95": [0.4360349193893565, 0.5691652270109716]}, "precision": 0.4972972972972973, "recall": 0.5139664804469274}, "event": {"f1": 0.45161290322580644, "f1_ci": {"90": [0.3991120764639928, 0.5042428150331614], "95": [0.38859196675315893, 0.5133542561134965]}, "precision": 0.525, "recall": 0.39622641509433965}, "group": {"f1": 0.5842293906810035, "f1_ci": {"90": [0.5388991248272684, 0.6301894841522088], "95": [0.5320783674869244, 0.6394850127551021]}, "precision": 0.659919028340081, "recall": 0.5241157556270096}, "location": {"f1": 0.6786786786786787, "f1_ci": {"90": [0.6159137901932652, 0.7348132862943487], "95": [0.6036022709099632, 0.7437546327493262]}, "precision": 0.6726190476190477, "recall": 0.6848484848484848}, "person": {"f1": 0.8314606741573034, "f1_ci": {"90": [0.8036569096349134, 0.85612247777621], "95": [0.7982603184486745, 0.8602760969440842]}, "precision": 0.857397504456328, "recall": 0.8070469798657718}, "product": {"f1": 0.683371298405467, "f1_ci": {"90": [0.6351726068296324, 0.7309063535079794], "95": [0.6238927904522206, 0.7402053274139844]}, "precision": 0.684931506849315, "recall": 0.6818181818181818}}}, "2021.test (span detection)": {"micro/f1": 0.7861037719939289, "micro/f1_ci": {}, "micro/recall": 0.8086041401642188, "micro/precision": 0.7648217020345658, "macro/f1": 0.7861037719939289, "macro/f1_ci": {}, "macro/recall": 0.8086041401642188, "macro/precision": 0.7648217020345658}, "2020.test (span detection)": {"micro/f1": 0.7624966225344502, "micro/f1_ci": {}, "micro/recall": 0.7322262584327971, "micro/precision": 0.7953776775648252, "macro/f1": 0.7624966225344502, "macro/f1_ci": {}, "macro/recall": 0.7322262584327971, "macro/precision": 0.7953776775648252}, "2020.dev (span detection)": {"micro/f1": 0.7686084142394823, "micro/f1_ci": {}, "micro/recall": 0.7445141065830722, "micro/precision": 0.794314381270903, "macro/f1": 0.7686084142394823, "macro/f1_ci": {}, "macro/recall": 0.7445141065830722, "macro/precision": 0.794314381270903}}
eval/prediction.2020.dev.json ADDED
The diff for this file is too large to render. See raw diff
 
eval/prediction.2020.test.json ADDED
The diff for this file is too large to render. See raw diff
 
eval/prediction.2021.test.json ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cf7a566c776e67b7048827bb20ebc92881a51dbb1febdbf40bebd61053639fa8
3
- size 1417461873
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9390992876684900baea558cc24c4458c5867142f3e3c1fd9dd0ef2e1bc7d191
3
+ size 1417467377
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "name_or_path": "roberta_large/best_model", "special_tokens_map_file": "roberta_large/best_model/special_tokens_map.json", "tokenizer_class": "RobertaTokenizer"}
 
1
+ {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "name_or_path": "roberta-large", "tokenizer_class": "RobertaTokenizer"}