abgoswam commited on
Commit
3f96dbf
1 Parent(s): 95363f2

Training in progress, step 100

Browse files
config.json CHANGED
@@ -1,28 +1,26 @@
1
  {
2
- "_name_or_path": "/mnt/phyagimodelsdev/phi7b_lowerlr_phase2_hf",
3
  "architectures": [
4
- "LlamaForCausalLM"
5
  ],
6
- "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
  "eos_token_id": 2,
10
  "hidden_act": "silu",
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
13
- "intermediate_size": 11008,
14
- "max_position_embeddings": 2048,
15
- "model_type": "llama",
16
  "num_attention_heads": 32,
17
  "num_hidden_layers": 32,
18
- "num_key_value_heads": 32,
19
- "pretraining_tp": 1,
20
  "rms_norm_eps": 1e-05,
21
- "rope_scaling": null,
22
  "rope_theta": 10000.0,
 
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.38.2",
26
  "use_cache": false,
27
- "vocab_size": 32064
28
  }
 
1
  {
2
+ "_name_or_path": "alignment-handbook/zephyr-7b-sft-full",
3
  "architectures": [
4
+ "MistralForCausalLM"
5
  ],
 
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
  "eos_token_id": 2,
9
  "hidden_act": "silu",
10
  "hidden_size": 4096,
11
  "initializer_range": 0.02,
12
+ "intermediate_size": 14336,
13
+ "max_position_embeddings": 32768,
14
+ "model_type": "mistral",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
17
+ "num_key_value_heads": 8,
 
18
  "rms_norm_eps": 1e-05,
 
19
  "rope_theta": 10000.0,
20
+ "sliding_window": 4096,
21
  "tie_word_embeddings": false,
22
  "torch_dtype": "bfloat16",
23
  "transformers_version": "4.38.2",
24
  "use_cache": false,
25
+ "vocab_size": 32000
26
  }
runs/Mar11_14-51-18_node-0/events.out.tfevents.1710194156.node-0.10561.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bac6cde78a64aee0c0dcabc822886e6c0ba8b5e1e7c9223e597759881900371c
3
+ size 6067
runs/Mar11_14-59-16_node-0/events.out.tfevents.1710194390.node-0.16413.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6cb0dd77d31d61ecd6e76853b00186213989500447f90353445226c035c57c46
3
+ size 12878
special_tokens_map.json CHANGED
@@ -1,7 +1,4 @@
1
  {
2
- "additional_special_tokens": [
3
- "<|/inst|>"
4
- ],
5
  "bos_token": {
6
  "content": "<s>",
7
  "lstrip": false,
@@ -10,13 +7,19 @@
10
  "single_word": false
11
  },
12
  "eos_token": {
13
- "content": "<|endoftext|>",
 
 
 
 
 
 
 
14
  "lstrip": false,
15
  "normalized": false,
16
  "rstrip": false,
17
  "single_word": false
18
  },
19
- "pad_token": "<|endoftext|>",
20
  "unk_token": {
21
  "content": "<unk>",
22
  "lstrip": false,
 
1
  {
 
 
 
2
  "bos_token": {
3
  "content": "<s>",
4
  "lstrip": false,
 
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "</s>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
21
  "single_word": false
22
  },
 
23
  "unk_token": {
24
  "content": "<unk>",
25
  "lstrip": false,
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "add_bos_token": false,
3
  "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
@@ -22,327 +22,21 @@
22
  "content": "</s>",
23
  "lstrip": false,
24
  "normalized": false,
25
- "rstrip": true,
26
- "single_word": false,
27
- "special": false
28
- },
29
- "32000": {
30
- "content": "<|endoftext|>",
31
- "lstrip": false,
32
- "normalized": false,
33
  "rstrip": false,
34
  "single_word": false,
35
  "special": true
36
- },
37
- "32001": {
38
- "content": "<|assistant|>",
39
- "lstrip": false,
40
- "normalized": false,
41
- "rstrip": true,
42
- "single_word": false,
43
- "special": true
44
- },
45
- "32002": {
46
- "content": "<|step|>",
47
- "lstrip": false,
48
- "normalized": false,
49
- "rstrip": true,
50
- "single_word": false,
51
- "special": true
52
- },
53
- "32003": {
54
- "content": "<|function_output|>",
55
- "lstrip": false,
56
- "normalized": false,
57
- "rstrip": true,
58
- "single_word": false,
59
- "special": true
60
- },
61
- "32004": {
62
- "content": "<|tag|>",
63
- "lstrip": false,
64
- "normalized": false,
65
- "rstrip": true,
66
- "single_word": false,
67
- "special": true
68
- },
69
- "32005": {
70
- "content": "<|function_call|>",
71
- "lstrip": false,
72
- "normalized": false,
73
- "rstrip": true,
74
- "single_word": false,
75
- "special": true
76
- },
77
- "32006": {
78
- "content": "<|system|>",
79
- "lstrip": false,
80
- "normalized": false,
81
- "rstrip": true,
82
- "single_word": false,
83
- "special": true
84
- },
85
- "32007": {
86
- "content": "<|end|>",
87
- "lstrip": false,
88
- "normalized": false,
89
- "rstrip": true,
90
- "single_word": false,
91
- "special": true
92
- },
93
- "32008": {
94
- "content": "<|raw|>",
95
- "lstrip": false,
96
- "normalized": false,
97
- "rstrip": true,
98
- "single_word": false,
99
- "special": true
100
- },
101
- "32009": {
102
- "content": "<|continue|>",
103
- "lstrip": false,
104
- "normalized": false,
105
- "rstrip": true,
106
- "single_word": false,
107
- "special": true
108
- },
109
- "32010": {
110
- "content": "<|user|>",
111
- "lstrip": false,
112
- "normalized": false,
113
- "rstrip": true,
114
- "single_word": false,
115
- "special": true
116
- },
117
- "32011": {
118
- "content": "<|function_list|>",
119
- "lstrip": false,
120
- "normalized": false,
121
- "rstrip": true,
122
- "single_word": false,
123
- "special": true
124
- },
125
- "32012": {
126
- "content": "<|calc|>",
127
- "lstrip": false,
128
- "normalized": false,
129
- "rstrip": true,
130
- "single_word": false,
131
- "special": true
132
- },
133
- "32013": {
134
- "content": "<|code|>",
135
- "lstrip": false,
136
- "normalized": false,
137
- "rstrip": true,
138
- "single_word": false,
139
- "special": true
140
- },
141
- "32014": {
142
- "content": "<|/code|>",
143
- "lstrip": false,
144
- "normalized": false,
145
- "rstrip": true,
146
- "single_word": false,
147
- "special": true
148
- },
149
- "32015": {
150
- "content": "<|summary|>",
151
- "lstrip": false,
152
- "normalized": false,
153
- "rstrip": true,
154
- "single_word": false,
155
- "special": true
156
- },
157
- "32016": {
158
- "content": "<|resource|>",
159
- "lstrip": false,
160
- "normalized": false,
161
- "rstrip": true,
162
- "single_word": false,
163
- "special": true
164
- },
165
- "32017": {
166
- "content": "<|assistant_mask|>",
167
- "lstrip": false,
168
- "normalized": false,
169
- "rstrip": true,
170
- "single_word": false,
171
- "special": true
172
- },
173
- "32018": {
174
- "content": "<|start|>",
175
- "lstrip": false,
176
- "normalized": false,
177
- "rstrip": true,
178
- "single_word": false,
179
- "special": true
180
- },
181
- "32019": {
182
- "content": "<|message|>",
183
- "lstrip": false,
184
- "normalized": false,
185
- "rstrip": true,
186
- "single_word": false,
187
- "special": true
188
- },
189
- "32020": {
190
- "content": "<|fim_prefix|>",
191
- "lstrip": false,
192
- "normalized": false,
193
- "rstrip": true,
194
- "single_word": false,
195
- "special": true
196
- },
197
- "32021": {
198
- "content": "<|fim_middle|>",
199
- "lstrip": false,
200
- "normalized": false,
201
- "rstrip": true,
202
- "single_word": false,
203
- "special": true
204
- },
205
- "32022": {
206
- "content": "<|fim_suffix|>",
207
- "lstrip": false,
208
- "normalized": false,
209
- "rstrip": true,
210
- "single_word": false,
211
- "special": true
212
- },
213
- "32023": {
214
- "content": "<|meta_start|>",
215
- "lstrip": false,
216
- "normalized": false,
217
- "rstrip": true,
218
- "single_word": false,
219
- "special": true
220
- },
221
- "32024": {
222
- "content": "<|ipynb_marker|>",
223
- "lstrip": false,
224
- "normalized": false,
225
- "rstrip": true,
226
- "single_word": false,
227
- "special": true
228
- },
229
- "32025": {
230
- "content": "<|diff_marker|>",
231
- "lstrip": false,
232
- "normalized": false,
233
- "rstrip": true,
234
- "single_word": false,
235
- "special": true
236
- },
237
- "32026": {
238
- "content": "<|ghissue|>",
239
- "lstrip": false,
240
- "normalized": false,
241
- "rstrip": true,
242
- "single_word": false,
243
- "special": true
244
- },
245
- "32027": {
246
- "content": "<|ghreview|>",
247
- "lstrip": false,
248
- "normalized": false,
249
- "rstrip": true,
250
- "single_word": false,
251
- "special": true
252
- },
253
- "32028": {
254
- "content": "<|disc_start|>",
255
- "lstrip": false,
256
- "normalized": false,
257
- "rstrip": true,
258
- "single_word": false,
259
- "special": true
260
- },
261
- "32029": {
262
- "content": "<|disc_sep|>",
263
- "lstrip": false,
264
- "normalized": false,
265
- "rstrip": true,
266
- "single_word": false,
267
- "special": true
268
- },
269
- "32030": {
270
- "content": "<|disc_thread|><|query|>",
271
- "lstrip": false,
272
- "normalized": false,
273
- "rstrip": true,
274
- "single_word": false,
275
- "special": true
276
- },
277
- "32031": {
278
- "content": "<|/query|>",
279
- "lstrip": false,
280
- "normalized": false,
281
- "rstrip": true,
282
- "single_word": false,
283
- "special": true
284
- },
285
- "32032": {
286
- "content": "<|data|>",
287
- "lstrip": false,
288
- "normalized": false,
289
- "rstrip": true,
290
- "single_word": false,
291
- "special": true
292
- },
293
- "32033": {
294
- "content": "<|/data|>",
295
- "lstrip": false,
296
- "normalized": false,
297
- "rstrip": true,
298
- "single_word": false,
299
- "special": true
300
- },
301
- "32034": {
302
- "content": "<|sys|>",
303
- "lstrip": false,
304
- "normalized": false,
305
- "rstrip": true,
306
- "single_word": false,
307
- "special": true
308
- },
309
- "32035": {
310
- "content": "<|/sys|>",
311
- "lstrip": false,
312
- "normalized": false,
313
- "rstrip": true,
314
- "single_word": false,
315
- "special": true
316
- },
317
- "32036": {
318
- "content": "<|inst|>",
319
- "lstrip": false,
320
- "normalized": false,
321
- "rstrip": true,
322
- "single_word": false,
323
- "special": true
324
- },
325
- "32037": {
326
- "content": "<|/inst|>",
327
- "lstrip": false,
328
- "normalized": false,
329
- "rstrip": true,
330
- "single_word": false,
331
- "special": true
332
  }
333
  },
334
- "additional_special_tokens": [
335
- "<|/inst|>"
336
- ],
337
  "bos_token": "<s>",
338
- "chat_template": "{% for message in messages %}{{'<|' + message['role'] + '|>' + '\n' + message['content'] + '<|end|>\n' }}{% endfor %}{{ eos_token }}",
339
  "clean_up_tokenization_spaces": false,
340
- "eos_token": "<|endoftext|>",
341
- "legacy": false,
342
  "model_max_length": 2048,
343
- "pad_token": "<|endoftext|>",
344
- "padding_side": "right",
345
  "sp_model_kwargs": {},
 
346
  "tokenizer_class": "LlamaTokenizer",
347
  "unk_token": "<unk>",
348
  "use_default_system_prompt": false
 
1
  {
2
+ "add_bos_token": true,
3
  "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
 
22
  "content": "</s>",
23
  "lstrip": false,
24
  "normalized": false,
 
 
 
 
 
 
 
 
25
  "rstrip": false,
26
  "single_word": false,
27
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  }
29
  },
30
+ "additional_special_tokens": [],
 
 
31
  "bos_token": "<s>",
32
+ "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
33
  "clean_up_tokenization_spaces": false,
34
+ "eos_token": "</s>",
35
+ "legacy": true,
36
  "model_max_length": 2048,
37
+ "pad_token": "</s>",
 
38
  "sp_model_kwargs": {},
39
+ "spaces_between_special_tokens": false,
40
  "tokenizer_class": "LlamaTokenizer",
41
  "unk_token": "<unk>",
42
  "use_default_system_prompt": false
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c64276fa240ff0eb70dcd5ff05e57bd4927eeba68b6be48a0acf69c3e5553e9c
3
- size 6200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79977ddc382d5bbab94133509772dd71093425ed0571e054053369e997fddec0
3
+ size 6136