asahi417 commited on
Commit
ed88a7b
1 Parent(s): 5a6c40c

model update

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "cner_output/model/baseline_2021/bertweet_large/best_model",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
 
1
  {
2
+ "_name_or_path": "vinai/bertweet-large",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
eval/metric.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"2021.dev": {"micro/f1": 0.6263627353815658, "micro/f1_ci": {}, "micro/recall": 0.632, "micro/precision": 0.6208251473477406, "macro/f1": 0.5851732462052094, "macro/f1_ci": {}, "macro/recall": 0.5802751033017407, "macro/precision": 0.593621347021893, "per_entity_metric": {"corporation": {"f1": 0.5463917525773196, "f1_ci": {}, "precision": 0.5760869565217391, "recall": 0.5196078431372549}, "creative_work": {"f1": 0.4852941176470588, "f1_ci": {}, "precision": 0.532258064516129, "recall": 0.44594594594594594}, "event": {"f1": 0.3858267716535433, "f1_ci": {}, "precision": 0.3983739837398374, "recall": 0.37404580152671757}, "group": {"f1": 0.5991902834008097, "f1_ci": {}, "precision": 0.5543071161048689, "recall": 0.6519823788546255}, "location": {"f1": 0.6619718309859155, "f1_ci": {}, "precision": 0.6714285714285714, "recall": 0.6527777777777778}, "person": {"f1": 0.8109028960817718, "f1_ci": {}, "precision": 0.7828947368421053, "recall": 0.8409893992932862}, "product": {"f1": 0.6066350710900474, "f1_ci": {}, "precision": 0.64, "recall": 0.5765765765765766}}}, "2021.test": {"micro/f1": 0.6289961994187345, "micro/f1_ci": {"90": [0.6203286893040556, 0.6380714998883444], "95": [0.618788856392642, 0.6396755249087301]}, "micro/recall": 0.6506706753006476, "micro/precision": 0.6087191691908265, "macro/f1": 0.5812541319576294, "macro/f1_ci": {"90": [0.5718189466207007, 0.5907047023159696], "95": [0.570148090704244, 0.5931380949842975]}, "macro/recall": 0.5939651947848222, "macro/precision": 0.5735814298740157, "per_entity_metric": {"corporation": {"f1": 0.48868253047011023, "f1_ci": {"90": [0.4622326197034712, 0.5141309821972517], "95": [0.45822312966462947, 0.5197904004107121]}, "precision": 0.511543134872418, "recall": 0.4677777777777778}, "creative_work": {"f1": 0.42327272727272724, "f1_ci": {"90": [0.3913589008836675, 0.4552146375791695], "95": [0.3850330909993861, 0.4622114861898092]}, "precision": 0.4518633540372671, "recall": 0.39808481532147744}, "event": {"f1": 0.44868301544050865, "f1_ci": {"90": [0.42517128763823137, 0.47089546185055303], "95": [0.4216687641767551, 0.47516390056355695]}, "precision": 0.44786944696282865, "recall": 0.4494995450409463}, "group": {"f1": 0.5640022358859699, "f1_ci": {"90": [0.5448494589371451, 0.5843005073412731], "95": [0.5421339693259705, 0.5878924507524776]}, "precision": 0.48980582524271843, "recall": 0.6646903820816864}, "location": {"f1": 0.6620973269362577, "f1_ci": {"90": [0.6352623534621973, 0.690561115703398], "95": [0.6289274269426315, 0.6947830874588952]}, "precision": 0.6500672947510094, "recall": 0.6745810055865922}, "person": {"f1": 0.8104852993269572, "f1_ci": {"90": [0.799557380100791, 0.8217653397796916], "95": [0.7971684255110281, 0.8235506568956528]}, "precision": 0.7798227675528289, "recall": 0.8436578171091446}, "product": {"f1": 0.6715557883708747, "f1_ci": {"90": [0.6500796119207709, 0.6930743533068904], "95": [0.6454759597916919, 0.6962197964351413]}, "precision": 0.6840981856990395, "recall": 0.6594650205761317}}}, "2020.test": {"micro/f1": 0.6161290322580646, "micro/f1_ci": {"90": [0.5949692044423199, 0.6348133495240706], "95": [0.5906211738918474, 0.6387329419785034]}, "micro/recall": 0.5947067981318112, "micro/precision": 0.6391522587841606, "macro/f1": 0.5683990688463066, "macro/f1_ci": {"90": [0.5462082120303768, 0.5875745777633213], "95": [0.5437194741427591, 0.5916963455087217]}, "macro/recall": 0.5400218465464566, "macro/precision": 0.6097755448151788, "per_entity_metric": {"corporation": {"f1": 0.5423728813559322, "f1_ci": {"90": [0.48275862068965514, 0.5929934808499968], "95": [0.47023042714482405, 0.6019455599044377]}, "precision": 0.588957055214724, "recall": 0.5026178010471204}, "creative_work": {"f1": 0.4083044982698961, "f1_ci": {"90": [0.346557894572335, 0.46643109540636035], "95": [0.3343989071038251, 0.47619747899159665]}, "precision": 0.5363636363636364, "recall": 0.329608938547486}, "event": {"f1": 0.4333996023856858, "f1_ci": {"90": [0.38188421268382666, 0.4827709579914872], "95": [0.37525349684513126, 0.4931558836526215]}, "precision": 0.4579831932773109, "recall": 0.41132075471698115}, "group": {"f1": 0.5029761904761905, "f1_ci": {"90": [0.4562762107198253, 0.5490223970100132], "95": [0.44656489065498883, 0.5570021645021644]}, "precision": 0.46814404432132967, "recall": 0.5434083601286174}, "location": {"f1": 0.6455696202531647, "f1_ci": {"90": [0.5733723179930872, 0.7066023126161469], "95": [0.5611300842594408, 0.7190768823321695]}, "precision": 0.6754966887417219, "recall": 0.6181818181818182}, "person": {"f1": 0.8156797331109257, "f1_ci": {"90": [0.7894149420580739, 0.8392227413778143], "95": [0.7836139407658933, 0.842927744644483]}, "precision": 0.8109452736318408, "recall": 0.8204697986577181}, "product": {"f1": 0.6304909560723515, "f1_ci": {"90": [0.5734299459382219, 0.6784810126582278], "95": [0.5633052659716654, 0.6857142857142857]}, "precision": 0.7305389221556886, "recall": 0.5545454545454546}}}, "2021.test (span detection)": {"micro/f1": 0.7649656251746688, "micro/f1_ci": {}, "micro/recall": 0.7913727304267376, "micro/precision": 0.7402639549978365, "macro/f1": 0.7649656251746688, "macro/f1_ci": {}, "macro/recall": 0.7913727304267376, "macro/precision": 0.7402639549978365}, "2020.test (span detection)": {"micro/f1": 0.7446236559139785, "micro/f1_ci": {}, "micro/recall": 0.7187337830825117, "micro/precision": 0.7724484104852203, "macro/f1": 0.7446236559139785, "macro/f1_ci": {}, "macro/recall": 0.7187337830825117, "macro/precision": 0.7724484104852203}}
eval/prediction.2020.test.json ADDED
The diff for this file is too large to render. See raw diff
 
eval/prediction.2021.dev.json ADDED
The diff for this file is too large to render. See raw diff
 
eval/prediction.2021.test.json ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b097c4b90b3afe38de867cff8c4ca42f154a413a6944d54cfce51a781014bb7c
3
- size 1417461873
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:697a7a2a8ddeef589c00436d8d119b282df5ad2a2c921a1a68d1ab9be77aeba4
3
+ size 1417467377
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "name_or_path": "cner_output/model/baseline_2021/bertweet_large/best_model", "special_tokens_map_file": "cner_output/model/baseline_2021/bertweet_large/best_model/special_tokens_map.json", "tokenizer_class": "RobertaTokenizer"}
 
1
+ {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "name_or_path": "vinai/bertweet-large", "tokenizer_class": "RobertaTokenizer"}
trainer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"data_split": "2021.train", "model": "vinai/bertweet-large", "crf": true, "max_length": 128, "epoch": 30, "batch_size": 32, "lr": 0.0001, "random_seed": 0, "gradient_accumulation_steps": 1, "weight_decay": 1e-07, "lr_warmup_step_ratio": 0.3, "max_grad_norm": 1}