HHansi commited on
Commit
d8c50fc
1 Parent(s): 0edb1e0
added_tokens.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"<e1>": 30000, "</e1>": 30001, "<e2>": 30002, "</e2>": 30003}
config.json ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "albert-large-v2",
3
+ "architectures": [
4
+ "AlbertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0,
7
+ "bos_token_id": 2,
8
+ "classifier_dropout_prob": 0.1,
9
+ "down_scale_factor": 1,
10
+ "embedding_size": 128,
11
+ "eos_token_id": 3,
12
+ "gap_size": 0,
13
+ "hidden_act": "gelu_new",
14
+ "hidden_dropout_prob": 0,
15
+ "hidden_size": 1024,
16
+ "id2label": {
17
+ "0": "LABEL_0",
18
+ "1": "LABEL_1",
19
+ "2": "LABEL_2",
20
+ "3": "LABEL_3",
21
+ "4": "LABEL_4",
22
+ "5": "LABEL_5",
23
+ "6": "LABEL_6",
24
+ "7": "LABEL_7",
25
+ "8": "LABEL_8",
26
+ "9": "LABEL_9"
27
+ },
28
+ "initializer_range": 0.02,
29
+ "inner_group_num": 1,
30
+ "intermediate_size": 4096,
31
+ "label2id": {
32
+ "LABEL_0": 0,
33
+ "LABEL_1": 1,
34
+ "LABEL_2": 2,
35
+ "LABEL_3": 3,
36
+ "LABEL_4": 4,
37
+ "LABEL_5": 5,
38
+ "LABEL_6": 6,
39
+ "LABEL_7": 7,
40
+ "LABEL_8": 8,
41
+ "LABEL_9": 9
42
+ },
43
+ "layer_norm_eps": 1e-12,
44
+ "max_position_embeddings": 512,
45
+ "model_type": "albert",
46
+ "net_structure_type": 0,
47
+ "num_attention_heads": 16,
48
+ "num_hidden_groups": 1,
49
+ "num_hidden_layers": 24,
50
+ "num_memory_blocks": 0,
51
+ "pad_token_id": 0,
52
+ "position_embedding_type": "absolute",
53
+ "torch_dtype": "float32",
54
+ "transformers_version": "4.16.2",
55
+ "type_vocab_size": 2,
56
+ "vocab_size": 30004
57
+ }
eval_results.txt ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cls_report = precision recall f1-score support
2
+
3
+ 0.0 0.8916 0.7957 0.8409 93
4
+ 1.0 0.8537 0.8537 0.8537 82
5
+ 2.0 0.6962 0.6875 0.6918 80
6
+ 3.0 1.0000 1.0000 1.0000 57
7
+ 4.0 0.7714 0.8308 0.8000 65
8
+ 5.0 0.9494 0.9740 0.9615 77
9
+ 6.0 0.9889 1.0000 0.9944 89
10
+ 7.0 0.9474 0.9863 0.9664 73
11
+ 8.0 1.0000 1.0000 1.0000 13
12
+ 9.0 1.0000 1.0000 1.0000 9
13
+
14
+ accuracy 0.8903 638
15
+ macro avg 0.9098 0.9128 0.9109 638
16
+ weighted avg 0.8903 0.8903 0.8897 638
17
+
18
+ eval_loss = 0.29622152000665664
19
+ macro_f1 = 0.9108786349245351
20
+ macro_p = 0.9098480303268863
21
+ macro_r = 0.9127954035974767
22
+ mcc = 0.8756280066894239
model_args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"adam_epsilon": 1e-08, "best_model_dir": "outputs/best_model", "cache_dir": "temp/cache_dir", "config": {}, "custom_layer_parameters": [], "custom_parameter_groups": [], "dataloader_num_workers": 1, "do_lower_case": false, "dynamic_quantize": false, "early_stopping_consider_epochs": false, "early_stopping_delta": 0, "early_stopping_metric": "eval_loss", "early_stopping_metric_minimize": true, "early_stopping_patience": 10, "encoding": null, "eval_batch_size": 64, "evaluate_during_training": true, "evaluate_during_training_silent": false, "evaluate_during_training_steps": 16, "evaluate_during_training_verbose": true, "evaluate_each_epoch": true, "fp16": false, "gradient_accumulation_steps": 1, "learning_rate": 1e-05, "local_rank": -1, "logging_steps": 16, "manual_seed": 157, "max_grad_norm": 1.0, "max_seq_length": 128, "model_name": "albert-large-v2", "model_type": "albert", "multiprocessing_chunksize": 500, "n_gpu": 1, "no_cache": false, "no_save": false, "not_saved_args": [], "num_train_epochs": 5, "output_dir": "temp/outputs", "overwrite_output_dir": true, "process_count": 70, "quantized_model": false, "reprocess_input_data": true, "save_best_model": true, "save_eval_checkpoints": false, "save_model_every_epoch": false, "save_optimizer_and_scheduler": true, "save_recent_only": true, "save_steps": 16, "silent": false, "tensorboard_dir": null, "thread_count": null, "train_batch_size": 16, "train_custom_parameters_only": false, "use_cached_eval_features": false, "use_early_stopping": true, "use_multiprocessing": true, "wandb_kwargs": {"name": "albert-large_1e-05_5_strain"}, "wandb_project": "relation-extraction", "warmup_ratio": 0.1, "warmup_steps": 180, "weight_decay": 0, "skip_special_tokens": true, "model_class": "REModel", "labels_list": ["selection", "necessity", "none", "greater", "part-of", "equal", "greater-equal", "less-equal", "not-part-of", "less"], "labels_map": {"selection": 0, "necessity": 1, "none": 2, "greater": 3, "part-of": 4, "equal": 5, "greater-equal": 6, "less-equal": 7, "not-part-of": 8, "less": 9}, "lazy_delimiter": "\t", "lazy_labels_column": 1, "lazy_loading": false, "lazy_loading_start_line": 1, "lazy_text_a_column": null, "lazy_text_b_column": null, "lazy_text_column": 0, "onnx": false, "regression": false, "sliding_window": false, "stride": 0.8, "tie_value": 1, "special_tags": ["<e1>", "<e2>"]}
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08b2b8b1922f0a78ad21af68bce00894f1bc51ec4193da2c0b3b213f16661396
3
+ size 133257527
predictions.csv ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3706f27a792a5136e7c7cb27e35d6e90a899db8eb60ee604b3cec162cb0a59f4
3
+ size 70835589
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11f50369285b5cf5581b201e6927989d0a3510d8d20b496bcb574dc070dae8b1
3
+ size 627
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "[CLS]", "eos_token": "[SEP]", "unk_token": "<unk>", "sep_token": "[SEP]", "pad_token": "<pad>", "cls_token": "[CLS]", "mask_token": {"content": "[MASK]", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}}
spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fefb02b667a6c5c2fe27602d28e5fb3428f66ab89c7d6f388e7c8d44a02d0336
3
+ size 760289
test_eval.txt ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Default classification report:
2
+ precision recall f1-score support
3
+
4
+ equal 0.6316 0.8000 0.7059 15
5
+ greater 0.8889 0.7273 0.8000 11
6
+ greater-equal 0.6333 0.9048 0.7451 21
7
+ less 1.0000 0.5000 0.6667 2
8
+ less-equal 0.7222 0.9286 0.8125 14
9
+ necessity 0.8654 0.8738 0.8696 206
10
+ none 0.7616 0.6550 0.7043 200
11
+ not-part-of 1.0000 0.5000 0.6667 2
12
+ part-of 0.7135 0.8148 0.7608 162
13
+ selection 0.8430 0.8069 0.8246 233
14
+
15
+ accuracy 0.7910 866
16
+ macro avg 0.8060 0.7511 0.7556 866
17
+ weighted avg 0.7959 0.7910 0.7903 866
18
+
19
+ mcc = 0.7350731543361181
20
+ precision(macro) = 0.8059598755041998
21
+ recall(macro) = 0.7511074235977566
22
+ f1_score(macro) = 0.7556048338085604
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"do_lower_case": false, "remove_space": true, "keep_accents": false, "bos_token": "[CLS]", "eos_token": "[SEP]", "unk_token": "<unk>", "sep_token": "[SEP]", "pad_token": "<pad>", "cls_token": "[CLS]", "mask_token": {"content": "[MASK]", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false, "__type": "AddedToken"}, "sp_model_kwargs": {}, "model_max_length": 512, "special_tokens_map_file": null, "tokenizer_file": "/home/hh2/.cache/huggingface/transformers/8f1144987c0a5fcedc8808300dc830a4a00787ceaccb85e9f913ef047103bd89.670e237d152dd53ef77575d4f4a6cd34158db03128fe4f63437ce0d5992bac74", "name_or_path": "albert-large-v2", "tokenizer_class": "AlbertTokenizer"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c64c20dc4f191610afcb0f1fc8b18f8f868d61be3a2e3f281d4ff0309ef3a26b
3
+ size 2875
training_progress_scores.csv ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ global_step,mcc,train_loss,eval_loss,macro_f1,macro_r,macro_p
2
+ 16,-0.019900788671624974,2.9176669120788574,2.7577230453491213,0.05071276292650338,0.13962316593455917,0.03595888914663562
3
+ 32,0.01896735425446388,2.3855130672454834,2.482264995574951,0.0748952131580529,0.1274600623439576,0.12313109005641196
4
+ 48,0.09829335376244754,2.1113998889923096,2.189507818222046,0.15135405119337908,0.16912306775650054,0.17676773855086764
5
+ 64,0.14318611218609176,2.1108689308166504,1.997204637527466,0.18659279778646148,0.19589056262459326,0.2422504744328302
6
+ 80,0.22700313868350605,1.9506219625473022,1.808273184299469,0.22987451738697628,0.2500234041372756,0.242636392838173
7
+ 96,0.2836956952739408,1.957509160041809,1.6482650518417359,0.28411840633521984,0.2951348510502628,0.30867560171402697
8
+ 112,0.4294910491837687,1.543533205986023,1.5014062881469727,0.36948833652994345,0.40318000439382473,0.40829811495376084
9
+ 128,0.45758081110540144,1.5891720056533813,1.3822079658508302,0.39917943664346167,0.42457402259083066,0.41283652315502284
10
+ 144,0.5171626777555369,1.5926759243011475,1.241822397708893,0.4640387976449173,0.48053736813532566,0.4674273625900284
11
+ 160,0.5954350655538331,1.153721809387207,1.1381484627723695,0.5258990732525785,0.55085899506163,0.7055223712172689
12
+ 176,0.6129097232400027,0.7601088881492615,1.0453828275203705,0.5190504450469409,0.5509975627927032,0.5168341166412181
13
+ 192,0.6603719677301354,0.6856927275657654,0.9110442399978638,0.6406878652114656,0.6431273680889114,0.738420415093543
14
+ 208,0.6548644017806339,0.8756203651428223,0.9112367153167724,0.6413016073762968,0.6289320927441532,0.7573597350595849
15
+ 224,0.6750310639774599,0.5980286598205566,0.8420216798782348,0.7138423047758056,0.7217208617351695,0.7724306957628414
16
+ 240,0.7242136474993308,0.4550694525241852,0.7409556448459625,0.8003046787389492,0.7963632305297071,0.8196498660653072
17
+ 256,0.7484707184910141,1.2159749269485474,0.6843716263771057,0.7996268383157805,0.8133672831271639,0.8057709554592968
18
+ 272,0.755656498566965,0.6610125303268433,0.6560277760028839,0.795916130121963,0.7940367314640797,0.8122740702489061
19
+ 288,0.7608047622438662,0.6614609360694885,0.615761011838913,0.8110027050134596,0.8230651287865743,0.8119978966406324
20
+ 304,0.7933651206298179,0.44056111574172974,0.5634822160005569,0.8401253771057338,0.8453069371498371,0.843808872245094
21
+ 320,0.7737251896995814,1.207240104675293,0.58423031270504,0.8289580994027574,0.8330018337451147,0.8450762680235983
22
+ 336,0.7687928002357585,0.33741188049316406,0.5619922488927841,0.8327610625288088,0.8310542131774772,0.857223531967365
23
+ 352,0.7768223211883698,0.8026289939880371,0.5618907034397125,0.8155645127607845,0.8266944536571217,0.8384632584905083
24
+ 359,0.7738073256233085,0.22649285197257996,0.5317420870065689,0.8257931810230964,0.825830051629046,0.8359794684664543
25
+ 368,0.7853065853318998,0.6343364715576172,0.5414443105459213,0.8421400177782971,0.8448402020201542,0.851965172867797
26
+ 384,0.8131615482488691,0.48215845227241516,0.5015912175178527,0.853923963685767,0.86618897378653,0.8516284347219265
27
+ 400,0.7987150652305356,0.3759557604789734,0.5017808675765991,0.8546504656427414,0.8528646081464316,0.8657968914774014
28
+ 416,0.8213196416784109,0.3108811378479004,0.4750411927700043,0.8680908218289514,0.872958589009509,0.8685816294375963
29
+ 432,0.8153876244953852,0.3858645558357239,0.4663199096918106,0.8653778990518471,0.8689995010549019,0.8641539321888946
30
+ 448,0.8150308366882459,0.8411672115325928,0.4780208468437195,0.853830524115628,0.8613227449497429,0.8674912389279423
31
+ 464,0.8242963738145804,0.5327943563461304,0.43986133933067323,0.8690428867744844,0.8731477255852489,0.8677793450466247
32
+ 480,0.8271484349412196,0.290338933467865,0.4351601868867874,0.8689439392549687,0.8754449761588192,0.8709552058934349
33
+ 496,0.8330253992561918,0.23187156021595,0.4328933447599411,0.8767578507309217,0.8798044992551096,0.8755133371302589
34
+ 512,0.8152051679439581,0.2671823501586914,0.44846763014793395,0.8641217464919302,0.8707846175260061,0.8660044749496606
35
+ 528,0.8470256738353205,0.28651538491249084,0.40750558078289034,0.8885802335693951,0.8897340210265934,0.8882710217696754
36
+ 544,0.8165328111672656,0.5753006339073181,0.4587712585926056,0.8596182086467271,0.8696116948016244,0.8680808585684596
37
+ 560,0.8357182048841524,0.2809944748878479,0.43095352947711946,0.8810415058828012,0.8787726745583854,0.8920368507554048
38
+ 576,0.8300992334337708,0.3200606405735016,0.4270805954933167,0.8707456906153513,0.8787847608086874,0.8756809357561522
39
+ 592,0.8419963000661477,0.3049326241016388,0.42966145277023315,0.8764692210330349,0.8847992132286029,0.8815115215245486
40
+ 608,0.8322769068073239,0.5528422594070435,0.4114215701818466,0.8756250411263566,0.8804214986114793,0.877317375072673
41
+ 624,0.8514128347890603,0.06622407585382462,0.3739346921443939,0.888728298193312,0.893441496197463,0.8887670245338384
42
+ 640,0.8505453763174895,0.5189220905303955,0.41107386648654937,0.8886232962320854,0.8952018134369191,0.890736422377872
43
+ 656,0.8448224093483313,0.08478119969367981,0.42175557017326354,0.8784267588337968,0.8818743707881669,0.8830882746103874
44
+ 672,0.8294473860984356,0.5338335037231445,0.42437698841094973,0.875568106918943,0.8793488389457107,0.8813900066144301
45
+ 688,0.8561134499433378,0.24268218874931335,0.37197435200214385,0.8949540643508354,0.8953790207585521,0.8973693835104182
46
+ 704,0.8538201001274893,0.17651621997356415,0.35765143036842345,0.8903634478476811,0.8969886782243741,0.8922449420086338
47
+ 718,0.8616138139415771,0.26507410407066345,0.3492047190666199,0.8965492491068154,0.9001786879192896,0.8954349737698635
48
+ 720,0.859816109304192,0.22566919028759003,0.3461580514907837,0.895485066444796,0.8995015827663781,0.8936898420616399
49
+ 736,0.856320120376356,0.12201298773288727,0.35456290692090986,0.8920967168585445,0.8939825088128543,0.8935556744745312
50
+ 752,0.8685088889783572,0.1193850189447403,0.34843851774930956,0.9060057925994773,0.906605904893933,0.9066781586508771
51
+ 768,0.8505252312057967,0.08080403506755829,0.3534913003444672,0.8878580789112451,0.8947015247729684,0.8906146543540869
52
+ 784,0.868641818643933,0.35239607095718384,0.34798774868249893,0.9054448615988313,0.9046475126673489,0.9082227723737422
53
+ 800,0.8565935278986617,0.26940011978149414,0.37316794097423556,0.8907160355745715,0.898878037601578,0.8960592508769538
54
+ 816,0.8667616248713915,0.09688262641429901,0.33822707533836366,0.9020135352226791,0.9039409370129707,0.9015900168471852
55
+ 832,0.8477805582813888,0.2700226306915283,0.38478106558322905,0.8893493368774574,0.8922371789261113,0.890447245566272
56
+ 848,0.8683479795235767,0.19187584519386292,0.35812396705150606,0.903683007388356,0.9037283528384,0.9048823232089684
57
+ 864,0.8617568673131343,0.2557620406150818,0.35592348873615265,0.901365440398805,0.9023120573583752,0.9028591991429611
58
+ 880,0.8830525217373529,0.5907897353172302,0.33398967534303664,0.9136785857394418,0.9170354276044165,0.9131417891859759
59
+ 896,0.8703915209617685,0.10421495884656906,0.34161652624607086,0.9052505899558054,0.9080539728756131,0.9039757311053709
60
+ 912,0.8755140731029062,0.09095887094736099,0.3315072670578957,0.9100782387608503,0.9118385051327105,0.9088789348539988
61
+ 928,0.8702370232321267,0.27618253231048584,0.34342073649168015,0.9057481374240222,0.9076273916222775,0.9047699473428705
62
+ 944,0.8651860967680773,0.7244361639022827,0.35274043679237366,0.8999830848436844,0.9031007036192765,0.8993765164766788
63
+ 960,0.8722821491156563,0.2438454031944275,0.3312693417072296,0.9068053859574287,0.9059906465920904,0.9109508074959727
64
+ 976,0.8572821382831003,0.08759821206331253,0.35559146404266356,0.8919726651102342,0.8995261980456197,0.9014741658101159
65
+ 992,0.8825244293210537,0.16793879866600037,0.32445732057094573,0.9145697340227901,0.9144920537215462,0.9150266854989113
66
+ 1008,0.8757666359396403,0.04808886721730232,0.314313443005085,0.9082522208698895,0.9107901684706838,0.9077443978171142
67
+ 1024,0.8784305300230224,0.18677453696727753,0.3252834931015968,0.9102008364516664,0.9145795141237194,0.9119659012525279
68
+ 1040,0.8807611503143051,0.06716598570346832,0.3063933774828911,0.9143307282750964,0.914617471838703,0.9143120891789621
69
+ 1056,0.8743104735388003,0.2238241732120514,0.33053135573863984,0.9090281351321338,0.9066982899846275,0.9165271566361792
70
+ 1072,0.888106119206211,0.21250680088996887,0.2987434506416321,0.9183699460066217,0.920549796463986,0.9173677936715098
71
+ 1077,0.8756280066894239,0.04205189272761345,0.29622152000665664,0.9108786349245351,0.9127954035974767,0.9098480303268863
72
+ 1088,0.8687287650779049,0.12630878388881683,0.30217102319002154,0.9051151466095939,0.9077670039984123,0.9044123226600869
73
+ 1104,0.8828641157820302,0.09195571392774582,0.30920580625534055,0.9137277327325304,0.913825670788692,0.9160260207564879
74
+ 1120,0.8814150044751581,0.03218909725546837,0.30503893047571184,0.9127879585969332,0.91636853377412,0.9126454378125548
75
+ 1136,0.8809861872033495,0.03695806488394737,0.3044722229242325,0.9136817229985269,0.9127635827807676,0.916612427894686
76
+ 1152,0.8636863412597975,0.015260403975844383,0.32104508876800536,0.8997236896955869,0.9029344628142411,0.9002738578763487
77
+ 1168,0.8756009338548387,0.17928747832775116,0.31845082640647887,0.9094918052529886,0.9109435139932993,0.909009054075484
78
+ 1184,0.8795419097394208,0.09307418018579483,0.31228183060884473,0.9113221253273766,0.9150407452849277,0.9105620677934209
79
+ 1200,0.8794949344842061,0.33760514855384827,0.31626777946949003,0.9125820216988663,0.9151627217218138,0.9124981332866738
80
+ 1216,0.8778307685945126,0.08208451420068741,0.31151695996522905,0.9106828925261976,0.9142180213570852,0.9103753878288503
81
+ 1232,0.8790235585307142,0.06618580222129822,0.3093097746372223,0.9121660023433638,0.9130687899826373,0.9119481190701795
82
+ 1248,0.8790280541710288,0.04112425446510315,0.3102340966463089,0.9131189586365558,0.9131611346781229,0.9136048979103062