Litzy619 commited on
Commit
37d7252
1 Parent(s): 0828266

End of training

Browse files
README.md ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: mit
3
+ base_model: microsoft/phi-2
4
+ tags:
5
+ - generated_from_trainer
6
+ model-index:
7
+ - name: 0503LayerAnalysis31
8
+ results: []
9
+ ---
10
+
11
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
12
+ should probably proofread and complete it, then remove this comment. -->
13
+
14
+ # 0503LayerAnalysis31
15
+
16
+ This model is a fine-tuned version of [microsoft/phi-2](https://huggingface.co/microsoft/phi-2) on an unknown dataset.
17
+ It achieves the following results on the evaluation set:
18
+ - Loss: 0.0564
19
+
20
+ ## Model description
21
+
22
+ More information needed
23
+
24
+ ## Intended uses & limitations
25
+
26
+ More information needed
27
+
28
+ ## Training and evaluation data
29
+
30
+ More information needed
31
+
32
+ ## Training procedure
33
+
34
+ ### Training hyperparameters
35
+
36
+ The following hyperparameters were used during training:
37
+ - learning_rate: 0.0003
38
+ - train_batch_size: 8
39
+ - eval_batch_size: 8
40
+ - seed: 42
41
+ - gradient_accumulation_steps: 16
42
+ - total_train_batch_size: 128
43
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
44
+ - lr_scheduler_type: cosine_with_restarts
45
+ - lr_scheduler_warmup_steps: 60
46
+ - num_epochs: 3
47
+ - mixed_precision_training: Native AMP
48
+
49
+ ### Training results
50
+
51
+ | Training Loss | Epoch | Step | Validation Loss |
52
+ |:-------------:|:-----:|:----:|:---------------:|
53
+ | 2.066 | 0.09 | 10 | 0.9101 |
54
+ | 0.2987 | 0.18 | 20 | 0.1450 |
55
+ | 0.1452 | 0.27 | 30 | 0.1357 |
56
+ | 0.142 | 0.36 | 40 | 0.1149 |
57
+ | 0.1195 | 0.45 | 50 | 0.1107 |
58
+ | 0.1109 | 0.54 | 60 | 0.0863 |
59
+ | 0.0885 | 0.63 | 70 | 0.0826 |
60
+ | 0.081 | 0.73 | 80 | 0.0820 |
61
+ | 0.0793 | 0.82 | 90 | 0.0738 |
62
+ | 0.0804 | 0.91 | 100 | 0.0770 |
63
+ | 0.084 | 1.0 | 110 | 0.0733 |
64
+ | 0.078 | 1.09 | 120 | 0.0719 |
65
+ | 0.0745 | 1.18 | 130 | 0.0741 |
66
+ | 0.0835 | 1.27 | 140 | 0.0727 |
67
+ | 0.0738 | 1.36 | 150 | 0.0723 |
68
+ | 0.0808 | 1.45 | 160 | 0.0760 |
69
+ | 0.0772 | 1.54 | 170 | 0.0687 |
70
+ | 0.08 | 1.63 | 180 | 0.0687 |
71
+ | 0.0745 | 1.72 | 190 | 0.0663 |
72
+ | 0.0742 | 1.81 | 200 | 0.0678 |
73
+ | 0.0672 | 1.9 | 210 | 0.0693 |
74
+ | 0.0671 | 1.99 | 220 | 0.0643 |
75
+ | 0.0571 | 2.08 | 230 | 0.0643 |
76
+ | 0.061 | 2.18 | 240 | 0.0639 |
77
+ | 0.0611 | 2.27 | 250 | 0.0617 |
78
+ | 0.0551 | 2.36 | 260 | 0.0645 |
79
+ | 0.0615 | 2.45 | 270 | 0.0599 |
80
+ | 0.0552 | 2.54 | 280 | 0.0593 |
81
+ | 0.0533 | 2.63 | 290 | 0.0580 |
82
+ | 0.0551 | 2.72 | 300 | 0.0573 |
83
+ | 0.0592 | 2.81 | 310 | 0.0569 |
84
+ | 0.0524 | 2.9 | 320 | 0.0566 |
85
+ | 0.0536 | 2.99 | 330 | 0.0564 |
86
+
87
+
88
+ ### Framework versions
89
+
90
+ - Transformers 4.36.0.dev0
91
+ - Pytorch 2.1.2+cu121
92
+ - Datasets 2.14.6
93
+ - Tokenizers 0.14.1
adapter_config.json ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "adaptive_ratio": 1.0,
3
+ "adaptive_ratio_decay": 1.0,
4
+ "additive_modeling": false,
5
+ "auto_mapping": null,
6
+ "base_model_name_or_path": "microsoft/phi-2",
7
+ "bias": "none",
8
+ "curr_learning": false,
9
+ "detached_training": false,
10
+ "dynamic_adapter_pool": false,
11
+ "enable_lora": null,
12
+ "encoder_hidden_size": 2560,
13
+ "fan_in_fan_out": false,
14
+ "hypernetwork": true,
15
+ "inference_mode": true,
16
+ "input_based_adapter_selection": true,
17
+ "layer_to_lora": [
18
+ 31
19
+ ],
20
+ "lora_alpha": 16,
21
+ "lora_dropout": 0.05,
22
+ "merge_weights": false,
23
+ "modules_to_save": null,
24
+ "num_attention_heads": 32,
25
+ "num_layers": 32,
26
+ "num_prefix_set": 3,
27
+ "num_transformer_submodules": 1,
28
+ "num_virtual_tokens": 30,
29
+ "number_of_adapter_pre_layer": 8,
30
+ "ot_diversified_dispatcher": false,
31
+ "ot_diversified_prefix": false,
32
+ "peft_type": "PREFIX_MA_LORA",
33
+ "pool_selective_inference": true,
34
+ "pool_selective_training": true,
35
+ "prefix_projection": true,
36
+ "r": 16,
37
+ "random_routing": false,
38
+ "random_routing_inference": false,
39
+ "scale": 64,
40
+ "selective_num": 8,
41
+ "simple_hidden_matching": false,
42
+ "simple_instance_matching": true,
43
+ "target_modules": [
44
+ "q_proj",
45
+ "k_proj",
46
+ "v_proj"
47
+ ],
48
+ "task_type": "CAUSAL_LM",
49
+ "token_dim": 2560
50
+ }
adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95d3d9e3bff979e59a2c4520b61f735881aca1df8f4639f414f0d98a5d0e61e6
3
+ size 100009006
added_tokens.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "\t\t": 50294,
3
+ "\t\t\t": 50293,
4
+ "\t\t\t\t": 50292,
5
+ "\t\t\t\t\t": 50291,
6
+ "\t\t\t\t\t\t": 50290,
7
+ "\t\t\t\t\t\t\t": 50289,
8
+ "\t\t\t\t\t\t\t\t": 50288,
9
+ "\t\t\t\t\t\t\t\t\t": 50287,
10
+ " ": 50286,
11
+ " ": 50285,
12
+ " ": 50284,
13
+ " ": 50283,
14
+ " ": 50282,
15
+ " ": 50281,
16
+ " ": 50280,
17
+ " ": 50279,
18
+ " ": 50278,
19
+ " ": 50277,
20
+ " ": 50276,
21
+ " ": 50275,
22
+ " ": 50274,
23
+ " ": 50273,
24
+ " ": 50272,
25
+ " ": 50271,
26
+ " ": 50270,
27
+ " ": 50269,
28
+ " ": 50268,
29
+ " ": 50267,
30
+ " ": 50266,
31
+ " ": 50265,
32
+ " ": 50264,
33
+ " ": 50263,
34
+ " ": 50262,
35
+ " ": 50261,
36
+ " ": 50260,
37
+ " ": 50259,
38
+ " ": 50258,
39
+ " ": 50257
40
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "!",
17
+ "unk_token": {
18
+ "content": "<|endoftext|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,328 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": true,
4
+ "add_prefix_space": false,
5
+ "added_tokens_decoder": {
6
+ "50256": {
7
+ "content": "<|endoftext|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "50257": {
15
+ "content": " ",
16
+ "lstrip": false,
17
+ "normalized": true,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": false
21
+ },
22
+ "50258": {
23
+ "content": " ",
24
+ "lstrip": false,
25
+ "normalized": true,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": false
29
+ },
30
+ "50259": {
31
+ "content": " ",
32
+ "lstrip": false,
33
+ "normalized": true,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": false
37
+ },
38
+ "50260": {
39
+ "content": " ",
40
+ "lstrip": false,
41
+ "normalized": true,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": false
45
+ },
46
+ "50261": {
47
+ "content": " ",
48
+ "lstrip": false,
49
+ "normalized": true,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": false
53
+ },
54
+ "50262": {
55
+ "content": " ",
56
+ "lstrip": false,
57
+ "normalized": true,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": false
61
+ },
62
+ "50263": {
63
+ "content": " ",
64
+ "lstrip": false,
65
+ "normalized": true,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": false
69
+ },
70
+ "50264": {
71
+ "content": " ",
72
+ "lstrip": false,
73
+ "normalized": true,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": false
77
+ },
78
+ "50265": {
79
+ "content": " ",
80
+ "lstrip": false,
81
+ "normalized": true,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": false
85
+ },
86
+ "50266": {
87
+ "content": " ",
88
+ "lstrip": false,
89
+ "normalized": true,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": false
93
+ },
94
+ "50267": {
95
+ "content": " ",
96
+ "lstrip": false,
97
+ "normalized": true,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": false
101
+ },
102
+ "50268": {
103
+ "content": " ",
104
+ "lstrip": false,
105
+ "normalized": true,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": false
109
+ },
110
+ "50269": {
111
+ "content": " ",
112
+ "lstrip": false,
113
+ "normalized": true,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": false
117
+ },
118
+ "50270": {
119
+ "content": " ",
120
+ "lstrip": false,
121
+ "normalized": true,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": false
125
+ },
126
+ "50271": {
127
+ "content": " ",
128
+ "lstrip": false,
129
+ "normalized": true,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": false
133
+ },
134
+ "50272": {
135
+ "content": " ",
136
+ "lstrip": false,
137
+ "normalized": true,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": false
141
+ },
142
+ "50273": {
143
+ "content": " ",
144
+ "lstrip": false,
145
+ "normalized": true,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": false
149
+ },
150
+ "50274": {
151
+ "content": " ",
152
+ "lstrip": false,
153
+ "normalized": true,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": false
157
+ },
158
+ "50275": {
159
+ "content": " ",
160
+ "lstrip": false,
161
+ "normalized": true,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": false
165
+ },
166
+ "50276": {
167
+ "content": " ",
168
+ "lstrip": false,
169
+ "normalized": true,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": false
173
+ },
174
+ "50277": {
175
+ "content": " ",
176
+ "lstrip": false,
177
+ "normalized": true,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": false
181
+ },
182
+ "50278": {
183
+ "content": " ",
184
+ "lstrip": false,
185
+ "normalized": true,
186
+ "rstrip": false,
187
+ "single_word": false,
188
+ "special": false
189
+ },
190
+ "50279": {
191
+ "content": " ",
192
+ "lstrip": false,
193
+ "normalized": true,
194
+ "rstrip": false,
195
+ "single_word": false,
196
+ "special": false
197
+ },
198
+ "50280": {
199
+ "content": " ",
200
+ "lstrip": false,
201
+ "normalized": true,
202
+ "rstrip": false,
203
+ "single_word": false,
204
+ "special": false
205
+ },
206
+ "50281": {
207
+ "content": " ",
208
+ "lstrip": false,
209
+ "normalized": true,
210
+ "rstrip": false,
211
+ "single_word": false,
212
+ "special": false
213
+ },
214
+ "50282": {
215
+ "content": " ",
216
+ "lstrip": false,
217
+ "normalized": true,
218
+ "rstrip": false,
219
+ "single_word": false,
220
+ "special": false
221
+ },
222
+ "50283": {
223
+ "content": " ",
224
+ "lstrip": false,
225
+ "normalized": true,
226
+ "rstrip": false,
227
+ "single_word": false,
228
+ "special": false
229
+ },
230
+ "50284": {
231
+ "content": " ",
232
+ "lstrip": false,
233
+ "normalized": true,
234
+ "rstrip": false,
235
+ "single_word": false,
236
+ "special": false
237
+ },
238
+ "50285": {
239
+ "content": " ",
240
+ "lstrip": false,
241
+ "normalized": true,
242
+ "rstrip": false,
243
+ "single_word": false,
244
+ "special": false
245
+ },
246
+ "50286": {
247
+ "content": " ",
248
+ "lstrip": false,
249
+ "normalized": true,
250
+ "rstrip": false,
251
+ "single_word": false,
252
+ "special": false
253
+ },
254
+ "50287": {
255
+ "content": "\t\t\t\t\t\t\t\t\t",
256
+ "lstrip": false,
257
+ "normalized": true,
258
+ "rstrip": false,
259
+ "single_word": false,
260
+ "special": false
261
+ },
262
+ "50288": {
263
+ "content": "\t\t\t\t\t\t\t\t",
264
+ "lstrip": false,
265
+ "normalized": true,
266
+ "rstrip": false,
267
+ "single_word": false,
268
+ "special": false
269
+ },
270
+ "50289": {
271
+ "content": "\t\t\t\t\t\t\t",
272
+ "lstrip": false,
273
+ "normalized": true,
274
+ "rstrip": false,
275
+ "single_word": false,
276
+ "special": false
277
+ },
278
+ "50290": {
279
+ "content": "\t\t\t\t\t\t",
280
+ "lstrip": false,
281
+ "normalized": true,
282
+ "rstrip": false,
283
+ "single_word": false,
284
+ "special": false
285
+ },
286
+ "50291": {
287
+ "content": "\t\t\t\t\t",
288
+ "lstrip": false,
289
+ "normalized": true,
290
+ "rstrip": false,
291
+ "single_word": false,
292
+ "special": false
293
+ },
294
+ "50292": {
295
+ "content": "\t\t\t\t",
296
+ "lstrip": false,
297
+ "normalized": true,
298
+ "rstrip": false,
299
+ "single_word": false,
300
+ "special": false
301
+ },
302
+ "50293": {
303
+ "content": "\t\t\t",
304
+ "lstrip": false,
305
+ "normalized": true,
306
+ "rstrip": false,
307
+ "single_word": false,
308
+ "special": false
309
+ },
310
+ "50294": {
311
+ "content": "\t\t",
312
+ "lstrip": false,
313
+ "normalized": true,
314
+ "rstrip": false,
315
+ "single_word": false,
316
+ "special": false
317
+ }
318
+ },
319
+ "bos_token": "<|endoftext|>",
320
+ "clean_up_tokenization_spaces": true,
321
+ "eos_token": "<|endoftext|>",
322
+ "errors": "replace",
323
+ "model_max_length": 2048,
324
+ "pad_token": "!",
325
+ "padding_side": "left",
326
+ "tokenizer_class": "CodeGenTokenizer",
327
+ "unk_token": "<|endoftext|>"
328
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99fcdf6a8dd5e7cd754f63e2ddfdeacaf908e997da706da0cf518f2f9a2f98fa
3
+ size 5176
vocab.json ADDED
The diff for this file is too large to render. See raw diff