liuyanchen1015 commited on
Commit
e7a84b2
1 Parent(s): 12e442f

Training in progress, step 2500

Browse files
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
config.json ADDED
@@ -0,0 +1,113 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "liuyanchen1015/FLAN-T5_GLUE_finetuning_lr3e-4",
3
+ "adapters": {
4
+ "adapters": {
5
+ "been_done": "9076f36a74755ac4",
6
+ "dey_it": "9076f36a74755ac4",
7
+ "drop_aux": "9076f36a74755ac4",
8
+ "got": "9076f36a74755ac4",
9
+ "lexical": "9076f36a74755ac4",
10
+ "negative_concord": "9076f36a74755ac4",
11
+ "negative_inversion": "9076f36a74755ac4",
12
+ "null_genetive": "9076f36a74755ac4",
13
+ "null_relcl": "9076f36a74755ac4",
14
+ "uninflect": "9076f36a74755ac4"
15
+ },
16
+ "config_map": {
17
+ "9076f36a74755ac4": {
18
+ "adapter_residual_before_ln": false,
19
+ "cross_adapter": false,
20
+ "factorized_phm_W": true,
21
+ "factorized_phm_rule": false,
22
+ "hypercomplex_nonlinearity": "glorot-uniform",
23
+ "init_weights": "bert",
24
+ "inv_adapter": null,
25
+ "inv_adapter_reduction_factor": null,
26
+ "is_parallel": false,
27
+ "learn_phm": true,
28
+ "leave_out": [],
29
+ "ln_after": false,
30
+ "ln_before": false,
31
+ "mh_adapter": false,
32
+ "non_linearity": "relu",
33
+ "original_ln_after": true,
34
+ "original_ln_before": true,
35
+ "output_adapter": true,
36
+ "phm_bias": true,
37
+ "phm_c_init": "normal",
38
+ "phm_dim": 4,
39
+ "phm_init_range": 0.0001,
40
+ "phm_layer": false,
41
+ "phm_rank": 1,
42
+ "reduction_factor": 16,
43
+ "residual_before_ln": true,
44
+ "scaling": 1.0,
45
+ "shared_W_phm": false,
46
+ "shared_phm_rule": true,
47
+ "use_gating": false
48
+ }
49
+ },
50
+ "fusion_config_map": {},
51
+ "fusions": {
52
+ "been_done,dey_it,drop_aux,got,lexical,negative_concord,negative_inversion,null_genetive,null_relcl,uninflect": "dynamic"
53
+ }
54
+ },
55
+ "architectures": [
56
+ "T5ForConditionalGeneration"
57
+ ],
58
+ "d_ff": 2048,
59
+ "d_kv": 64,
60
+ "d_model": 768,
61
+ "decoder_start_token_id": 0,
62
+ "dense_act_fn": "gelu_new",
63
+ "dropout_rate": 0.1,
64
+ "eos_token_id": 1,
65
+ "feed_forward_proj": "gated-gelu",
66
+ "initializer_factor": 1.0,
67
+ "is_encoder_decoder": true,
68
+ "is_gated_act": true,
69
+ "layer_norm_epsilon": 1e-06,
70
+ "model_type": "t5",
71
+ "n_positions": 512,
72
+ "num_decoder_layers": 12,
73
+ "num_heads": 12,
74
+ "num_layers": 12,
75
+ "output_past": true,
76
+ "pad_token_id": 0,
77
+ "relative_attention_max_distance": 128,
78
+ "relative_attention_num_buckets": 32,
79
+ "task_specific_params": {
80
+ "summarization": {
81
+ "early_stopping": true,
82
+ "length_penalty": 2.0,
83
+ "max_length": 200,
84
+ "min_length": 30,
85
+ "no_repeat_ngram_size": 3,
86
+ "num_beams": 4,
87
+ "prefix": "summarize: "
88
+ },
89
+ "translation_en_to_de": {
90
+ "early_stopping": true,
91
+ "max_length": 300,
92
+ "num_beams": 4,
93
+ "prefix": "translate English to German: "
94
+ },
95
+ "translation_en_to_fr": {
96
+ "early_stopping": true,
97
+ "max_length": 300,
98
+ "num_beams": 4,
99
+ "prefix": "translate English to French: "
100
+ },
101
+ "translation_en_to_ro": {
102
+ "early_stopping": true,
103
+ "max_length": 300,
104
+ "num_beams": 4,
105
+ "prefix": "translate English to Romanian: "
106
+ }
107
+ },
108
+ "tie_word_embeddings": false,
109
+ "torch_dtype": "float32",
110
+ "transformers_version": "4.26.1",
111
+ "use_cache": true,
112
+ "vocab_size": 32128
113
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5dae418a8a7f072fd8dd85964426baecc2f67e51a28d152d4ea0280b3d8eaf4
3
+ size 1232387711
special_tokens_map.json ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<extra_id_0>",
4
+ "<extra_id_1>",
5
+ "<extra_id_2>",
6
+ "<extra_id_3>",
7
+ "<extra_id_4>",
8
+ "<extra_id_5>",
9
+ "<extra_id_6>",
10
+ "<extra_id_7>",
11
+ "<extra_id_8>",
12
+ "<extra_id_9>",
13
+ "<extra_id_10>",
14
+ "<extra_id_11>",
15
+ "<extra_id_12>",
16
+ "<extra_id_13>",
17
+ "<extra_id_14>",
18
+ "<extra_id_15>",
19
+ "<extra_id_16>",
20
+ "<extra_id_17>",
21
+ "<extra_id_18>",
22
+ "<extra_id_19>",
23
+ "<extra_id_20>",
24
+ "<extra_id_21>",
25
+ "<extra_id_22>",
26
+ "<extra_id_23>",
27
+ "<extra_id_24>",
28
+ "<extra_id_25>",
29
+ "<extra_id_26>",
30
+ "<extra_id_27>",
31
+ "<extra_id_28>",
32
+ "<extra_id_29>",
33
+ "<extra_id_30>",
34
+ "<extra_id_31>",
35
+ "<extra_id_32>",
36
+ "<extra_id_33>",
37
+ "<extra_id_34>",
38
+ "<extra_id_35>",
39
+ "<extra_id_36>",
40
+ "<extra_id_37>",
41
+ "<extra_id_38>",
42
+ "<extra_id_39>",
43
+ "<extra_id_40>",
44
+ "<extra_id_41>",
45
+ "<extra_id_42>",
46
+ "<extra_id_43>",
47
+ "<extra_id_44>",
48
+ "<extra_id_45>",
49
+ "<extra_id_46>",
50
+ "<extra_id_47>",
51
+ "<extra_id_48>",
52
+ "<extra_id_49>",
53
+ "<extra_id_50>",
54
+ "<extra_id_51>",
55
+ "<extra_id_52>",
56
+ "<extra_id_53>",
57
+ "<extra_id_54>",
58
+ "<extra_id_55>",
59
+ "<extra_id_56>",
60
+ "<extra_id_57>",
61
+ "<extra_id_58>",
62
+ "<extra_id_59>",
63
+ "<extra_id_60>",
64
+ "<extra_id_61>",
65
+ "<extra_id_62>",
66
+ "<extra_id_63>",
67
+ "<extra_id_64>",
68
+ "<extra_id_65>",
69
+ "<extra_id_66>",
70
+ "<extra_id_67>",
71
+ "<extra_id_68>",
72
+ "<extra_id_69>",
73
+ "<extra_id_70>",
74
+ "<extra_id_71>",
75
+ "<extra_id_72>",
76
+ "<extra_id_73>",
77
+ "<extra_id_74>",
78
+ "<extra_id_75>",
79
+ "<extra_id_76>",
80
+ "<extra_id_77>",
81
+ "<extra_id_78>",
82
+ "<extra_id_79>",
83
+ "<extra_id_80>",
84
+ "<extra_id_81>",
85
+ "<extra_id_82>",
86
+ "<extra_id_83>",
87
+ "<extra_id_84>",
88
+ "<extra_id_85>",
89
+ "<extra_id_86>",
90
+ "<extra_id_87>",
91
+ "<extra_id_88>",
92
+ "<extra_id_89>",
93
+ "<extra_id_90>",
94
+ "<extra_id_91>",
95
+ "<extra_id_92>",
96
+ "<extra_id_93>",
97
+ "<extra_id_94>",
98
+ "<extra_id_95>",
99
+ "<extra_id_96>",
100
+ "<extra_id_97>",
101
+ "<extra_id_98>",
102
+ "<extra_id_99>"
103
+ ],
104
+ "eos_token": "</s>",
105
+ "pad_token": "<pad>",
106
+ "unk_token": "<unk>"
107
+ }
spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d60acb128cf7b7f2536e8f38a5b18a05535c9e14c7a355904270e15b0945ea86
3
+ size 791656
tokenizer_config.json ADDED
@@ -0,0 +1,113 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<extra_id_0>",
4
+ "<extra_id_1>",
5
+ "<extra_id_2>",
6
+ "<extra_id_3>",
7
+ "<extra_id_4>",
8
+ "<extra_id_5>",
9
+ "<extra_id_6>",
10
+ "<extra_id_7>",
11
+ "<extra_id_8>",
12
+ "<extra_id_9>",
13
+ "<extra_id_10>",
14
+ "<extra_id_11>",
15
+ "<extra_id_12>",
16
+ "<extra_id_13>",
17
+ "<extra_id_14>",
18
+ "<extra_id_15>",
19
+ "<extra_id_16>",
20
+ "<extra_id_17>",
21
+ "<extra_id_18>",
22
+ "<extra_id_19>",
23
+ "<extra_id_20>",
24
+ "<extra_id_21>",
25
+ "<extra_id_22>",
26
+ "<extra_id_23>",
27
+ "<extra_id_24>",
28
+ "<extra_id_25>",
29
+ "<extra_id_26>",
30
+ "<extra_id_27>",
31
+ "<extra_id_28>",
32
+ "<extra_id_29>",
33
+ "<extra_id_30>",
34
+ "<extra_id_31>",
35
+ "<extra_id_32>",
36
+ "<extra_id_33>",
37
+ "<extra_id_34>",
38
+ "<extra_id_35>",
39
+ "<extra_id_36>",
40
+ "<extra_id_37>",
41
+ "<extra_id_38>",
42
+ "<extra_id_39>",
43
+ "<extra_id_40>",
44
+ "<extra_id_41>",
45
+ "<extra_id_42>",
46
+ "<extra_id_43>",
47
+ "<extra_id_44>",
48
+ "<extra_id_45>",
49
+ "<extra_id_46>",
50
+ "<extra_id_47>",
51
+ "<extra_id_48>",
52
+ "<extra_id_49>",
53
+ "<extra_id_50>",
54
+ "<extra_id_51>",
55
+ "<extra_id_52>",
56
+ "<extra_id_53>",
57
+ "<extra_id_54>",
58
+ "<extra_id_55>",
59
+ "<extra_id_56>",
60
+ "<extra_id_57>",
61
+ "<extra_id_58>",
62
+ "<extra_id_59>",
63
+ "<extra_id_60>",
64
+ "<extra_id_61>",
65
+ "<extra_id_62>",
66
+ "<extra_id_63>",
67
+ "<extra_id_64>",
68
+ "<extra_id_65>",
69
+ "<extra_id_66>",
70
+ "<extra_id_67>",
71
+ "<extra_id_68>",
72
+ "<extra_id_69>",
73
+ "<extra_id_70>",
74
+ "<extra_id_71>",
75
+ "<extra_id_72>",
76
+ "<extra_id_73>",
77
+ "<extra_id_74>",
78
+ "<extra_id_75>",
79
+ "<extra_id_76>",
80
+ "<extra_id_77>",
81
+ "<extra_id_78>",
82
+ "<extra_id_79>",
83
+ "<extra_id_80>",
84
+ "<extra_id_81>",
85
+ "<extra_id_82>",
86
+ "<extra_id_83>",
87
+ "<extra_id_84>",
88
+ "<extra_id_85>",
89
+ "<extra_id_86>",
90
+ "<extra_id_87>",
91
+ "<extra_id_88>",
92
+ "<extra_id_89>",
93
+ "<extra_id_90>",
94
+ "<extra_id_91>",
95
+ "<extra_id_92>",
96
+ "<extra_id_93>",
97
+ "<extra_id_94>",
98
+ "<extra_id_95>",
99
+ "<extra_id_96>",
100
+ "<extra_id_97>",
101
+ "<extra_id_98>",
102
+ "<extra_id_99>"
103
+ ],
104
+ "eos_token": "</s>",
105
+ "extra_ids": 100,
106
+ "model_max_length": 512,
107
+ "name_or_path": "google/flan-t5-base",
108
+ "pad_token": "<pad>",
109
+ "sp_model_kwargs": {},
110
+ "special_tokens_map_file": "/home/younes_huggingface_co/.cache/huggingface/hub/models--google--t5-v1_1-base/snapshots/650d7745bf1e502d6949b22cc19155cd656d3d4e/special_tokens_map.json",
111
+ "tokenizer_class": "T5Tokenizer",
112
+ "unk_token": "<unk>"
113
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:280a1bfdcbc93cc4c81579cc5c6b17828fd4a466d1c5b8f0d5afd06bb29868cd
3
+ size 3771