sara-nabhani commited on
Commit
fb60382
1 Parent(s): 5a1162f

Model save

Browse files
last-checkpoint/generation_config.json → generation_config.json RENAMED
File without changes
last-checkpoint/config.json DELETED
@@ -1,61 +0,0 @@
1
- {
2
- "_name_or_path": "google/flan-t5-small",
3
- "architectures": [
4
- "T5ForConditionalGeneration"
5
- ],
6
- "d_ff": 1024,
7
- "d_kv": 64,
8
- "d_model": 512,
9
- "decoder_start_token_id": 0,
10
- "dense_act_fn": "gelu_new",
11
- "dropout_rate": 0.1,
12
- "eos_token_id": 1,
13
- "feed_forward_proj": "gated-gelu",
14
- "initializer_factor": 1.0,
15
- "is_encoder_decoder": true,
16
- "is_gated_act": true,
17
- "layer_norm_epsilon": 1e-06,
18
- "model_type": "t5",
19
- "n_positions": 512,
20
- "num_decoder_layers": 8,
21
- "num_heads": 6,
22
- "num_layers": 8,
23
- "output_past": true,
24
- "pad_token_id": 0,
25
- "relative_attention_max_distance": 128,
26
- "relative_attention_num_buckets": 32,
27
- "task_specific_params": {
28
- "summarization": {
29
- "early_stopping": true,
30
- "length_penalty": 2.0,
31
- "max_length": 200,
32
- "min_length": 30,
33
- "no_repeat_ngram_size": 3,
34
- "num_beams": 4,
35
- "prefix": "summarize: "
36
- },
37
- "translation_en_to_de": {
38
- "early_stopping": true,
39
- "max_length": 300,
40
- "num_beams": 4,
41
- "prefix": "translate English to German: "
42
- },
43
- "translation_en_to_fr": {
44
- "early_stopping": true,
45
- "max_length": 300,
46
- "num_beams": 4,
47
- "prefix": "translate English to French: "
48
- },
49
- "translation_en_to_ro": {
50
- "early_stopping": true,
51
- "max_length": 300,
52
- "num_beams": 4,
53
- "prefix": "translate English to Romanian: "
54
- }
55
- },
56
- "tie_word_embeddings": false,
57
- "torch_dtype": "float32",
58
- "transformers_version": "4.27.4",
59
- "use_cache": true,
60
- "vocab_size": 32128
61
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
last-checkpoint/optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4a54a4a31b31cbcad9e386403f783c66ac5d7abbe7f401c23658bb39c407bbe9
3
- size 1135685
 
 
 
 
last-checkpoint/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:09d3138a7165d438b83836834da671edfe79b107ed24189658714a9b794d36db
3
- size 307910149
 
 
 
 
last-checkpoint/rng_state.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:d1ec407948436ccc80e93f11a93c1f5e1ff6f5e9d21d8a10b12885e1b8426660
3
- size 14575
 
 
 
 
last-checkpoint/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8768affab55180e629d6c31ff86e7291be472952121ca2693615f1bb55de2777
3
- size 627
 
 
 
 
last-checkpoint/special_tokens_map.json DELETED
@@ -1,107 +0,0 @@
1
- {
2
- "additional_special_tokens": [
3
- "<extra_id_0>",
4
- "<extra_id_1>",
5
- "<extra_id_2>",
6
- "<extra_id_3>",
7
- "<extra_id_4>",
8
- "<extra_id_5>",
9
- "<extra_id_6>",
10
- "<extra_id_7>",
11
- "<extra_id_8>",
12
- "<extra_id_9>",
13
- "<extra_id_10>",
14
- "<extra_id_11>",
15
- "<extra_id_12>",
16
- "<extra_id_13>",
17
- "<extra_id_14>",
18
- "<extra_id_15>",
19
- "<extra_id_16>",
20
- "<extra_id_17>",
21
- "<extra_id_18>",
22
- "<extra_id_19>",
23
- "<extra_id_20>",
24
- "<extra_id_21>",
25
- "<extra_id_22>",
26
- "<extra_id_23>",
27
- "<extra_id_24>",
28
- "<extra_id_25>",
29
- "<extra_id_26>",
30
- "<extra_id_27>",
31
- "<extra_id_28>",
32
- "<extra_id_29>",
33
- "<extra_id_30>",
34
- "<extra_id_31>",
35
- "<extra_id_32>",
36
- "<extra_id_33>",
37
- "<extra_id_34>",
38
- "<extra_id_35>",
39
- "<extra_id_36>",
40
- "<extra_id_37>",
41
- "<extra_id_38>",
42
- "<extra_id_39>",
43
- "<extra_id_40>",
44
- "<extra_id_41>",
45
- "<extra_id_42>",
46
- "<extra_id_43>",
47
- "<extra_id_44>",
48
- "<extra_id_45>",
49
- "<extra_id_46>",
50
- "<extra_id_47>",
51
- "<extra_id_48>",
52
- "<extra_id_49>",
53
- "<extra_id_50>",
54
- "<extra_id_51>",
55
- "<extra_id_52>",
56
- "<extra_id_53>",
57
- "<extra_id_54>",
58
- "<extra_id_55>",
59
- "<extra_id_56>",
60
- "<extra_id_57>",
61
- "<extra_id_58>",
62
- "<extra_id_59>",
63
- "<extra_id_60>",
64
- "<extra_id_61>",
65
- "<extra_id_62>",
66
- "<extra_id_63>",
67
- "<extra_id_64>",
68
- "<extra_id_65>",
69
- "<extra_id_66>",
70
- "<extra_id_67>",
71
- "<extra_id_68>",
72
- "<extra_id_69>",
73
- "<extra_id_70>",
74
- "<extra_id_71>",
75
- "<extra_id_72>",
76
- "<extra_id_73>",
77
- "<extra_id_74>",
78
- "<extra_id_75>",
79
- "<extra_id_76>",
80
- "<extra_id_77>",
81
- "<extra_id_78>",
82
- "<extra_id_79>",
83
- "<extra_id_80>",
84
- "<extra_id_81>",
85
- "<extra_id_82>",
86
- "<extra_id_83>",
87
- "<extra_id_84>",
88
- "<extra_id_85>",
89
- "<extra_id_86>",
90
- "<extra_id_87>",
91
- "<extra_id_88>",
92
- "<extra_id_89>",
93
- "<extra_id_90>",
94
- "<extra_id_91>",
95
- "<extra_id_92>",
96
- "<extra_id_93>",
97
- "<extra_id_94>",
98
- "<extra_id_95>",
99
- "<extra_id_96>",
100
- "<extra_id_97>",
101
- "<extra_id_98>",
102
- "<extra_id_99>"
103
- ],
104
- "eos_token": "</s>",
105
- "pad_token": "<pad>",
106
- "unk_token": "<unk>"
107
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
last-checkpoint/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
last-checkpoint/tokenizer_config.json DELETED
@@ -1,112 +0,0 @@
1
- {
2
- "additional_special_tokens": [
3
- "<extra_id_0>",
4
- "<extra_id_1>",
5
- "<extra_id_2>",
6
- "<extra_id_3>",
7
- "<extra_id_4>",
8
- "<extra_id_5>",
9
- "<extra_id_6>",
10
- "<extra_id_7>",
11
- "<extra_id_8>",
12
- "<extra_id_9>",
13
- "<extra_id_10>",
14
- "<extra_id_11>",
15
- "<extra_id_12>",
16
- "<extra_id_13>",
17
- "<extra_id_14>",
18
- "<extra_id_15>",
19
- "<extra_id_16>",
20
- "<extra_id_17>",
21
- "<extra_id_18>",
22
- "<extra_id_19>",
23
- "<extra_id_20>",
24
- "<extra_id_21>",
25
- "<extra_id_22>",
26
- "<extra_id_23>",
27
- "<extra_id_24>",
28
- "<extra_id_25>",
29
- "<extra_id_26>",
30
- "<extra_id_27>",
31
- "<extra_id_28>",
32
- "<extra_id_29>",
33
- "<extra_id_30>",
34
- "<extra_id_31>",
35
- "<extra_id_32>",
36
- "<extra_id_33>",
37
- "<extra_id_34>",
38
- "<extra_id_35>",
39
- "<extra_id_36>",
40
- "<extra_id_37>",
41
- "<extra_id_38>",
42
- "<extra_id_39>",
43
- "<extra_id_40>",
44
- "<extra_id_41>",
45
- "<extra_id_42>",
46
- "<extra_id_43>",
47
- "<extra_id_44>",
48
- "<extra_id_45>",
49
- "<extra_id_46>",
50
- "<extra_id_47>",
51
- "<extra_id_48>",
52
- "<extra_id_49>",
53
- "<extra_id_50>",
54
- "<extra_id_51>",
55
- "<extra_id_52>",
56
- "<extra_id_53>",
57
- "<extra_id_54>",
58
- "<extra_id_55>",
59
- "<extra_id_56>",
60
- "<extra_id_57>",
61
- "<extra_id_58>",
62
- "<extra_id_59>",
63
- "<extra_id_60>",
64
- "<extra_id_61>",
65
- "<extra_id_62>",
66
- "<extra_id_63>",
67
- "<extra_id_64>",
68
- "<extra_id_65>",
69
- "<extra_id_66>",
70
- "<extra_id_67>",
71
- "<extra_id_68>",
72
- "<extra_id_69>",
73
- "<extra_id_70>",
74
- "<extra_id_71>",
75
- "<extra_id_72>",
76
- "<extra_id_73>",
77
- "<extra_id_74>",
78
- "<extra_id_75>",
79
- "<extra_id_76>",
80
- "<extra_id_77>",
81
- "<extra_id_78>",
82
- "<extra_id_79>",
83
- "<extra_id_80>",
84
- "<extra_id_81>",
85
- "<extra_id_82>",
86
- "<extra_id_83>",
87
- "<extra_id_84>",
88
- "<extra_id_85>",
89
- "<extra_id_86>",
90
- "<extra_id_87>",
91
- "<extra_id_88>",
92
- "<extra_id_89>",
93
- "<extra_id_90>",
94
- "<extra_id_91>",
95
- "<extra_id_92>",
96
- "<extra_id_93>",
97
- "<extra_id_94>",
98
- "<extra_id_95>",
99
- "<extra_id_96>",
100
- "<extra_id_97>",
101
- "<extra_id_98>",
102
- "<extra_id_99>"
103
- ],
104
- "eos_token": "</s>",
105
- "extra_ids": 100,
106
- "model_max_length": 512,
107
- "pad_token": "<pad>",
108
- "sp_model_kwargs": {},
109
- "special_tokens_map_file": "/home/younes_huggingface_co/.cache/huggingface/hub/models--google--t5-v1_1-small/snapshots/fb7e6cba609f7bab11c614294bc04f82f613c7b1/special_tokens_map.json",
110
- "tokenizer_class": "T5Tokenizer",
111
- "unk_token": "<unk>"
112
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
last-checkpoint/trainer_state.json DELETED
@@ -1,148 +0,0 @@
1
- {
2
- "best_metric": 1.748366355895996,
3
- "best_model_checkpoint": "/home2/s5432073/nlp-final-project/results/google-flan-t5-small-e-snli-generation-label_and_explanation-selected-b48/checkpoint-2000",
4
- "epoch": 1.0484011881880133,
5
- "global_step": 12000,
6
- "is_hyper_param_search": false,
7
- "is_local_process_zero": true,
8
- "is_world_process_zero": true,
9
- "log_history": [
10
- {
11
- "epoch": 0.17,
12
- "learning_rate": 0.00034946706272933774,
13
- "loss": 1.5084,
14
- "step": 2000
15
- },
16
- {
17
- "epoch": 0.17,
18
- "eval_accuracy": 0.8001422475106685,
19
- "eval_bertscore_f1": 0.9270728814895016,
20
- "eval_bleu": 0.3703095761353542,
21
- "eval_f1": 0.7996773374065786,
22
- "eval_loss": 1.748366355895996,
23
- "eval_rouge1": 0.5768497047116532,
24
- "eval_rouge2": 0.3694908776701721,
25
- "eval_rougeL": 0.5209417040574017,
26
- "eval_rougeLsum": 0.5229441483498007,
27
- "eval_runtime": 179.7853,
28
- "eval_samples_per_second": 54.743,
29
- "eval_steps_per_second": 1.146,
30
- "step": 2000
31
- },
32
- {
33
- "epoch": 0.35,
34
- "learning_rate": 0.0006989341254586755,
35
- "loss": 1.2745,
36
- "step": 4000
37
- },
38
- {
39
- "epoch": 0.35,
40
- "eval_accuracy": 0.8113188376346271,
41
- "eval_bertscore_f1": 0.9303517953163966,
42
- "eval_bleu": 0.3853382481222733,
43
- "eval_f1": 0.8109674919534894,
44
- "eval_loss": 1.8137184381484985,
45
- "eval_rouge1": 0.5881389505976721,
46
- "eval_rouge2": 0.38041241129632175,
47
- "eval_rougeL": 0.530537720895386,
48
- "eval_rougeLsum": 0.5325074635967986,
49
- "eval_runtime": 162.507,
50
- "eval_samples_per_second": 60.564,
51
- "eval_steps_per_second": 1.268,
52
- "step": 4000
53
- },
54
- {
55
- "epoch": 0.52,
56
- "learning_rate": 0.0009974525690427361,
57
- "loss": 1.2287,
58
- "step": 6000
59
- },
60
- {
61
- "epoch": 0.52,
62
- "eval_accuracy": 0.8391587075797602,
63
- "eval_bertscore_f1": 0.9298215870921677,
64
- "eval_bleu": 0.37776072091558766,
65
- "eval_f1": 0.8403159186762296,
66
- "eval_loss": 1.8357921838760376,
67
- "eval_rouge1": 0.5828317671095468,
68
- "eval_rouge2": 0.37472211399813016,
69
- "eval_rougeL": 0.5282183551855593,
70
- "eval_rougeLsum": 0.5300630209960154,
71
- "eval_runtime": 173.4887,
72
- "eval_samples_per_second": 56.73,
73
- "eval_steps_per_second": 1.187,
74
- "step": 6000
75
- },
76
- {
77
- "epoch": 0.7,
78
- "learning_rate": 0.000979059565741192,
79
- "loss": 1.1964,
80
- "step": 8000
81
- },
82
- {
83
- "epoch": 0.7,
84
- "eval_accuracy": 0.843019711440764,
85
- "eval_bertscore_f1": 0.9325893103010503,
86
- "eval_bleu": 0.39984253680250587,
87
- "eval_f1": 0.8437490943343028,
88
- "eval_loss": 1.843224287033081,
89
- "eval_rouge1": 0.5973841993048026,
90
- "eval_rouge2": 0.3904593338424962,
91
- "eval_rougeL": 0.5446862892314619,
92
- "eval_rougeLsum": 0.5461965827777935,
93
- "eval_runtime": 158.9404,
94
- "eval_samples_per_second": 61.923,
95
- "eval_steps_per_second": 1.296,
96
- "step": 8000
97
- },
98
- {
99
- "epoch": 0.87,
100
- "learning_rate": 0.000960666562439648,
101
- "loss": 1.1674,
102
- "step": 10000
103
- },
104
- {
105
- "epoch": 0.87,
106
- "eval_accuracy": 0.8507417191627717,
107
- "eval_bertscore_f1": 0.9309739923058087,
108
- "eval_bleu": 0.38917736442742895,
109
- "eval_f1": 0.8485245801942701,
110
- "eval_loss": 1.8567092418670654,
111
- "eval_rouge1": 0.5947498091541006,
112
- "eval_rouge2": 0.38877870802357656,
113
- "eval_rougeL": 0.5382707915260707,
114
- "eval_rougeLsum": 0.5401697430632839,
115
- "eval_runtime": 171.2441,
116
- "eval_samples_per_second": 57.474,
117
- "eval_steps_per_second": 1.203,
118
- "step": 10000
119
- },
120
- {
121
- "epoch": 1.05,
122
- "learning_rate": 0.0009422735591381039,
123
- "loss": 1.1371,
124
- "step": 12000
125
- },
126
- {
127
- "epoch": 1.05,
128
- "eval_accuracy": 0.8622231253810201,
129
- "eval_bertscore_f1": 0.9329314648717478,
130
- "eval_bleu": 0.40472213589689604,
131
- "eval_f1": 0.8623314280769628,
132
- "eval_loss": 1.871994137763977,
133
- "eval_rouge1": 0.605873896307076,
134
- "eval_rouge2": 0.398828693131973,
135
- "eval_rougeL": 0.5475319717542413,
136
- "eval_rougeLsum": 0.5495598969128583,
137
- "eval_runtime": 154.2147,
138
- "eval_samples_per_second": 63.82,
139
- "eval_steps_per_second": 1.336,
140
- "step": 12000
141
- }
142
- ],
143
- "max_steps": 114460,
144
- "num_train_epochs": 10,
145
- "total_flos": 1.1869746997825536e+16,
146
- "trial_name": null,
147
- "trial_params": null
148
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
last-checkpoint/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:08cee757bb38543504c6b54bcf6e986e3f58156f2e564d8864b7754c1998065c
3
- size 4027
 
 
 
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:09d3138a7165d438b83836834da671edfe79b107ed24189658714a9b794d36db
3
  size 307910149
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0886f6de8b90deb62ecd7a4a259d444f7402c6ed157c77a78ada8711237ed3a0
3
  size 307910149