fasterinnerlooper commited on
Commit
144a394
1 Parent(s): 301c98b

Training in progress, epoch 0

Browse files
Files changed (23) hide show
  1. adapter_config.json +31 -0
  2. adapter_model.safetensors +3 -0
  3. runs/Jan26_22-14-01_nfj4hjxxx1/events.out.tfevents.1706307253.nfj4hjxxx1.32.0 +3 -0
  4. runs/Jan26_22-14-01_nfj4hjxxx1/events.out.tfevents.1706311421.nfj4hjxxx1.32.1 +3 -0
  5. runs/Jan26_23-24-08_nfj4hjxxx1/events.out.tfevents.1706311471.nfj4hjxxx1.32.2 +3 -0
  6. runs/Jan26_23-28-34_nfj4hjxxx1/events.out.tfevents.1706311720.nfj4hjxxx1.32.3 +3 -0
  7. runs/Jan26_23-29-51_nfj4hjxxx1/events.out.tfevents.1706311794.nfj4hjxxx1.32.4 +3 -0
  8. runs/Jan26_23-29-51_nfj4hjxxx1/events.out.tfevents.1706312127.nfj4hjxxx1.32.5 +3 -0
  9. runs/Jan27_00-22-13_nfj4hjxxx1/events.out.tfevents.1706314950.nfj4hjxxx1.3954.0 +3 -0
  10. runs/Jan27_00-25-21_nfj4hjxxx1/events.out.tfevents.1706315137.nfj4hjxxx1.3954.1 +3 -0
  11. runs/Jan27_00-27-48_nfj4hjxxx1/events.out.tfevents.1706315269.nfj4hjxxx1.3954.2 +3 -0
  12. runs/Jan27_00-28-41_nfj4hjxxx1/events.out.tfevents.1706315323.nfj4hjxxx1.4561.0 +3 -0
  13. runs/Jan27_00-29-27_nfj4hjxxx1/events.out.tfevents.1706315368.nfj4hjxxx1.4561.1 +3 -0
  14. runs/Jan27_00-33-06_nfj4hjxxx1/events.out.tfevents.1706315603.nfj4hjxxx1.5152.0 +3 -0
  15. runs/Jan27_00-35-30_nfj4hjxxx1/events.out.tfevents.1706315746.nfj4hjxxx1.5152.1 +3 -0
  16. runs/Jan27_02-31-34_nt6iy4g0lx/events.out.tfevents.1706323573.nt6iy4g0lx.61.0 +3 -0
  17. runs/Jan27_04-52-01_nt6iy4g0lx/events.out.tfevents.1706331258.nt6iy4g0lx.3488.0 +3 -0
  18. runs/Jan27_04-52-01_nt6iy4g0lx/events.out.tfevents.1706331371.nt6iy4g0lx.3488.1 +3 -0
  19. runs/Jan27_04-52-01_nt6iy4g0lx/events.out.tfevents.1706331394.nt6iy4g0lx.3488.2 +3 -0
  20. special_tokens_map.json +24 -0
  21. tokenizer.json +0 -0
  22. tokenizer_config.json +364 -0
  23. training_args.bin +3 -0
adapter_config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "stabilityai/stable-code-3b",
5
+ "bias": "none",
6
+ "fan_in_fan_out": false,
7
+ "inference_mode": false,
8
+ "init_lora_weights": true,
9
+ "layers_pattern": null,
10
+ "layers_to_transform": null,
11
+ "loftq_config": {},
12
+ "lora_alpha": 8,
13
+ "lora_dropout": 0.0,
14
+ "megatron_config": null,
15
+ "megatron_core": "megatron.core",
16
+ "modules_to_save": null,
17
+ "peft_type": "LORA",
18
+ "r": 8,
19
+ "rank_pattern": {},
20
+ "revision": null,
21
+ "target_modules": [
22
+ "q_proj",
23
+ "down_proj",
24
+ "up_proj",
25
+ "o_proj",
26
+ "v_proj",
27
+ "gate_proj",
28
+ "k_proj"
29
+ ],
30
+ "task_type": "CAUSAL_LM"
31
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eac90d152c960c0f7ac77f6a8cfe22e6c36ddea9212e4b6e58e17e1521a996ab
3
+ size 50128536
runs/Jan26_22-14-01_nfj4hjxxx1/events.out.tfevents.1706307253.nfj4hjxxx1.32.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27e22705da44827a359e323e1046fe63744df7479b75f196bed65ee692b19b69
3
+ size 5353
runs/Jan26_22-14-01_nfj4hjxxx1/events.out.tfevents.1706311421.nfj4hjxxx1.32.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f1a0d0dfee01ba16b9359b7d6210ada8cf744a6970090c1d70fb00111bd05d7
3
+ size 4970
runs/Jan26_23-24-08_nfj4hjxxx1/events.out.tfevents.1706311471.nfj4hjxxx1.32.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:912bf16633de6842fe6577e6054658301b741c8742c5c538b4c087d65549e068
3
+ size 4973
runs/Jan26_23-28-34_nfj4hjxxx1/events.out.tfevents.1706311720.nfj4hjxxx1.32.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11eaed471175630cea310aaad375c4404e75843551e956a920708c95cdc3985c
3
+ size 4970
runs/Jan26_23-29-51_nfj4hjxxx1/events.out.tfevents.1706311794.nfj4hjxxx1.32.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e4d1e201835a568413b51f003b9233c233b0f8b260b8a9a60eb2852783ad4b3
3
+ size 24690
runs/Jan26_23-29-51_nfj4hjxxx1/events.out.tfevents.1706312127.nfj4hjxxx1.32.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50a801ee50500b4b3d30560a29391b43e0076dc1d7d6f80038e260b54eb9a737
3
+ size 19760
runs/Jan27_00-22-13_nfj4hjxxx1/events.out.tfevents.1706314950.nfj4hjxxx1.3954.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:430f8fdf9ca745b749b105383480aa6a909a67422f3fb3ee834ac8113fcaf3dd
3
+ size 4999
runs/Jan27_00-25-21_nfj4hjxxx1/events.out.tfevents.1706315137.nfj4hjxxx1.3954.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d56ddc3a7a39840db289191ac668867135f9da44b12f082643d39d101bb93b02
3
+ size 4999
runs/Jan27_00-27-48_nfj4hjxxx1/events.out.tfevents.1706315269.nfj4hjxxx1.3954.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de02dfbfb24e4cc207f9df9fca0f69326eb24fc7b894ca80464c249e869dfd23
3
+ size 40
runs/Jan27_00-28-41_nfj4hjxxx1/events.out.tfevents.1706315323.nfj4hjxxx1.4561.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce5c24151c000dadaac5e5abcb7423b4e8225b2c8d27551ff92c636d8b340185
3
+ size 4999
runs/Jan27_00-29-27_nfj4hjxxx1/events.out.tfevents.1706315368.nfj4hjxxx1.4561.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2193512b923373edb0c09e56a094f8f381b3a0f4ae47cd3a8315c4b9b4f5def
3
+ size 4999
runs/Jan27_00-33-06_nfj4hjxxx1/events.out.tfevents.1706315603.nfj4hjxxx1.5152.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a6654418e018b935e5defb5ecfc4f185f6c9f81f7ac940a5e680a3b9bc154fd
3
+ size 4999
runs/Jan27_00-35-30_nfj4hjxxx1/events.out.tfevents.1706315746.nfj4hjxxx1.5152.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7fca81ba4730492cf3da808061289a022c34beaa47768c2f2462455ef92d811d
3
+ size 4999
runs/Jan27_02-31-34_nt6iy4g0lx/events.out.tfevents.1706323573.nt6iy4g0lx.61.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a07e7ac9159467f8a77ffa4e7d5608ff943ba2f3ade8ef38571a1fe73e1284e0
3
+ size 4999
runs/Jan27_04-52-01_nt6iy4g0lx/events.out.tfevents.1706331258.nt6iy4g0lx.3488.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7492229705d86da7530e4057888e4c89848edb4b486ae8eeb90c58ebdfb64977
3
+ size 4999
runs/Jan27_04-52-01_nt6iy4g0lx/events.out.tfevents.1706331371.nt6iy4g0lx.3488.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46bf3bc3e5d53a68e94337e27d76ddb8396a940c9695c035bae5afe731c00491
3
+ size 5797
runs/Jan27_04-52-01_nt6iy4g0lx/events.out.tfevents.1706331394.nt6iy4g0lx.3488.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db628f2238e525c2f16d1c491933c9e49c20b9b1f85937f56b04657d8ce1d3e6
3
+ size 5265
special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<|endoftext|>",
17
+ "unk_token": {
18
+ "content": "<|endoftext|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,364 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|padding|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "50254": {
21
+ "content": " ",
22
+ "lstrip": false,
23
+ "normalized": true,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": false
27
+ },
28
+ "50255": {
29
+ "content": " ",
30
+ "lstrip": false,
31
+ "normalized": true,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": false
35
+ },
36
+ "50256": {
37
+ "content": " ",
38
+ "lstrip": false,
39
+ "normalized": true,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": false
43
+ },
44
+ "50257": {
45
+ "content": " ",
46
+ "lstrip": false,
47
+ "normalized": true,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": false
51
+ },
52
+ "50258": {
53
+ "content": " ",
54
+ "lstrip": false,
55
+ "normalized": true,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": false
59
+ },
60
+ "50259": {
61
+ "content": " ",
62
+ "lstrip": false,
63
+ "normalized": true,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": false
67
+ },
68
+ "50260": {
69
+ "content": " ",
70
+ "lstrip": false,
71
+ "normalized": true,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": false
75
+ },
76
+ "50261": {
77
+ "content": " ",
78
+ "lstrip": false,
79
+ "normalized": true,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": false
83
+ },
84
+ "50262": {
85
+ "content": " ",
86
+ "lstrip": false,
87
+ "normalized": true,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": false
91
+ },
92
+ "50263": {
93
+ "content": " ",
94
+ "lstrip": false,
95
+ "normalized": true,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": false
99
+ },
100
+ "50264": {
101
+ "content": " ",
102
+ "lstrip": false,
103
+ "normalized": true,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": false
107
+ },
108
+ "50265": {
109
+ "content": " ",
110
+ "lstrip": false,
111
+ "normalized": true,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": false
115
+ },
116
+ "50266": {
117
+ "content": " ",
118
+ "lstrip": false,
119
+ "normalized": true,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": false
123
+ },
124
+ "50267": {
125
+ "content": " ",
126
+ "lstrip": false,
127
+ "normalized": true,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": false
131
+ },
132
+ "50268": {
133
+ "content": " ",
134
+ "lstrip": false,
135
+ "normalized": true,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": false
139
+ },
140
+ "50269": {
141
+ "content": " ",
142
+ "lstrip": false,
143
+ "normalized": true,
144
+ "rstrip": false,
145
+ "single_word": false,
146
+ "special": false
147
+ },
148
+ "50270": {
149
+ "content": " ",
150
+ "lstrip": false,
151
+ "normalized": true,
152
+ "rstrip": false,
153
+ "single_word": false,
154
+ "special": false
155
+ },
156
+ "50271": {
157
+ "content": " ",
158
+ "lstrip": false,
159
+ "normalized": true,
160
+ "rstrip": false,
161
+ "single_word": false,
162
+ "special": false
163
+ },
164
+ "50272": {
165
+ "content": " ",
166
+ "lstrip": false,
167
+ "normalized": true,
168
+ "rstrip": false,
169
+ "single_word": false,
170
+ "special": false
171
+ },
172
+ "50273": {
173
+ "content": " ",
174
+ "lstrip": false,
175
+ "normalized": true,
176
+ "rstrip": false,
177
+ "single_word": false,
178
+ "special": false
179
+ },
180
+ "50274": {
181
+ "content": " ",
182
+ "lstrip": false,
183
+ "normalized": true,
184
+ "rstrip": false,
185
+ "single_word": false,
186
+ "special": false
187
+ },
188
+ "50275": {
189
+ "content": " ",
190
+ "lstrip": false,
191
+ "normalized": true,
192
+ "rstrip": false,
193
+ "single_word": false,
194
+ "special": false
195
+ },
196
+ "50276": {
197
+ "content": " ",
198
+ "lstrip": false,
199
+ "normalized": true,
200
+ "rstrip": false,
201
+ "single_word": false,
202
+ "special": false
203
+ },
204
+ "50277": {
205
+ "content": "<fim_prefix>",
206
+ "lstrip": false,
207
+ "normalized": false,
208
+ "rstrip": false,
209
+ "single_word": false,
210
+ "special": true
211
+ },
212
+ "50278": {
213
+ "content": "<fim_middle>",
214
+ "lstrip": false,
215
+ "normalized": false,
216
+ "rstrip": false,
217
+ "single_word": false,
218
+ "special": true
219
+ },
220
+ "50279": {
221
+ "content": "<fim_suffix>",
222
+ "lstrip": false,
223
+ "normalized": false,
224
+ "rstrip": false,
225
+ "single_word": false,
226
+ "special": true
227
+ },
228
+ "50280": {
229
+ "content": "<fim_pad>",
230
+ "lstrip": false,
231
+ "normalized": false,
232
+ "rstrip": false,
233
+ "single_word": false,
234
+ "special": true
235
+ },
236
+ "50281": {
237
+ "content": "<filename>",
238
+ "lstrip": false,
239
+ "normalized": false,
240
+ "rstrip": false,
241
+ "single_word": false,
242
+ "special": true
243
+ },
244
+ "50282": {
245
+ "content": "<gh_stars>",
246
+ "lstrip": false,
247
+ "normalized": false,
248
+ "rstrip": false,
249
+ "single_word": false,
250
+ "special": true
251
+ },
252
+ "50283": {
253
+ "content": "<issue_start>",
254
+ "lstrip": false,
255
+ "normalized": false,
256
+ "rstrip": false,
257
+ "single_word": false,
258
+ "special": true
259
+ },
260
+ "50284": {
261
+ "content": "<issue_comment>",
262
+ "lstrip": false,
263
+ "normalized": false,
264
+ "rstrip": false,
265
+ "single_word": false,
266
+ "special": true
267
+ },
268
+ "50285": {
269
+ "content": "<issue_closed>",
270
+ "lstrip": false,
271
+ "normalized": false,
272
+ "rstrip": false,
273
+ "single_word": false,
274
+ "special": true
275
+ },
276
+ "50286": {
277
+ "content": "<jupyter_start>",
278
+ "lstrip": false,
279
+ "normalized": false,
280
+ "rstrip": false,
281
+ "single_word": false,
282
+ "special": true
283
+ },
284
+ "50287": {
285
+ "content": "<jupyter_text>",
286
+ "lstrip": false,
287
+ "normalized": false,
288
+ "rstrip": false,
289
+ "single_word": false,
290
+ "special": true
291
+ },
292
+ "50288": {
293
+ "content": "<jupyter_code>",
294
+ "lstrip": false,
295
+ "normalized": false,
296
+ "rstrip": false,
297
+ "single_word": false,
298
+ "special": true
299
+ },
300
+ "50289": {
301
+ "content": "<jupyter_output>",
302
+ "lstrip": false,
303
+ "normalized": false,
304
+ "rstrip": false,
305
+ "single_word": false,
306
+ "special": true
307
+ },
308
+ "50290": {
309
+ "content": "<empty_output>",
310
+ "lstrip": false,
311
+ "normalized": false,
312
+ "rstrip": false,
313
+ "single_word": false,
314
+ "special": true
315
+ },
316
+ "50291": {
317
+ "content": "<commit_before>",
318
+ "lstrip": false,
319
+ "normalized": false,
320
+ "rstrip": false,
321
+ "single_word": false,
322
+ "special": true
323
+ },
324
+ "50292": {
325
+ "content": "<commit_msg>",
326
+ "lstrip": false,
327
+ "normalized": false,
328
+ "rstrip": false,
329
+ "single_word": false,
330
+ "special": true
331
+ },
332
+ "50293": {
333
+ "content": "<commit_after>",
334
+ "lstrip": false,
335
+ "normalized": false,
336
+ "rstrip": false,
337
+ "single_word": false,
338
+ "special": true
339
+ },
340
+ "50294": {
341
+ "content": "<reponame>",
342
+ "lstrip": false,
343
+ "normalized": false,
344
+ "rstrip": false,
345
+ "single_word": false,
346
+ "special": true
347
+ },
348
+ "50295": {
349
+ "content": "<repo_continuation>",
350
+ "lstrip": false,
351
+ "normalized": false,
352
+ "rstrip": false,
353
+ "single_word": false,
354
+ "special": true
355
+ }
356
+ },
357
+ "bos_token": "<|endoftext|>",
358
+ "clean_up_tokenization_spaces": true,
359
+ "eos_token": "<|endoftext|>",
360
+ "model_max_length": 1000000000000000019884624838656,
361
+ "pad_token": "<|endoftext|>",
362
+ "tokenizer_class": "GPTNeoXTokenizer",
363
+ "unk_token": "<|endoftext|>"
364
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68b0390762e68124e6dd1e3e5270f93fb197dd3f0c1e4a4e7585b613d25c8663
3
+ size 4792