TharunSivamani commited on
Commit
6a33f9a
1 Parent(s): 9a44312

final commit

Browse files
app.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline, logging
3
+ import gradio as gr
4
+
5
+ model_name = "microsoft/phi-2"
6
+ model = AutoModelForCausalLM.from_pretrained(
7
+ model_name,
8
+ trust_remote_code=True
9
+ )
10
+ model.config.use_cache = False
11
+
12
+ tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
13
+ tokenizer.pad_token = tokenizer.eos_token
14
+
15
+ adapter_path = 'checkpoint-500'
16
+ model.load_adapter(adapter_path)
17
+
18
+
19
+ def generate_context(prompt, tokens=300):
20
+ pipe = pipeline(task="text-generation", model=model, tokenizer=tokenizer, max_length=tokens)
21
+ sentence = "[INST] " + prompt + " [/INST]"
22
+ result = pipe(sentence)
23
+ text = result[0]['generated_text']
24
+
25
+ return text[len(sentence):]
26
+
27
+
28
+ examples = [
29
+ ["What is a large language model?", 250],
30
+ ["Explain the process of photosynthesis", 350]
31
+ ]
32
+
33
+ demo = gr.Interface(
34
+ fn=generate_context,
35
+ inputs=[
36
+ gr.Textbox(label="How may I help you ? 🤖"),
37
+ gr.Slider(200, 500, value=300, label="Sentence length", step=50)
38
+ ],
39
+ outputs="text",
40
+ examples=examples
41
+ )
42
+
43
+ demo.launch(debug=True)
checkpoint-500/adapter_config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "microsoft/phi-2",
5
+ "bias": "none",
6
+ "fan_in_fan_out": false,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layers_pattern": null,
10
+ "layers_to_transform": null,
11
+ "loftq_config": {},
12
+ "lora_alpha": 16,
13
+ "lora_dropout": 0.1,
14
+ "megatron_config": null,
15
+ "megatron_core": "megatron.core",
16
+ "modules_to_save": null,
17
+ "peft_type": "LORA",
18
+ "r": 64,
19
+ "rank_pattern": {},
20
+ "revision": null,
21
+ "target_modules": [
22
+ "out_proj",
23
+ "fc1",
24
+ "fc2",
25
+ "Wqkv"
26
+ ],
27
+ "task_type": "CAUSAL_LM",
28
+ "use_rslora": false
29
+ }
checkpoint-500/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8caeeee0f23b9a47671fbccf15478b9fa8e060cfbfb67acc0cf46378db0fd557
3
+ size 335577832
checkpoint-500/added_tokens.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "\t\t": 50294,
3
+ "\t\t\t": 50293,
4
+ "\t\t\t\t": 50292,
5
+ "\t\t\t\t\t": 50291,
6
+ "\t\t\t\t\t\t": 50290,
7
+ "\t\t\t\t\t\t\t": 50289,
8
+ "\t\t\t\t\t\t\t\t": 50288,
9
+ "\t\t\t\t\t\t\t\t\t": 50287,
10
+ " ": 50286,
11
+ " ": 50285,
12
+ " ": 50284,
13
+ " ": 50283,
14
+ " ": 50282,
15
+ " ": 50281,
16
+ " ": 50280,
17
+ " ": 50279,
18
+ " ": 50278,
19
+ " ": 50277,
20
+ " ": 50276,
21
+ " ": 50275,
22
+ " ": 50274,
23
+ " ": 50273,
24
+ " ": 50272,
25
+ " ": 50271,
26
+ " ": 50270,
27
+ " ": 50269,
28
+ " ": 50268,
29
+ " ": 50267,
30
+ " ": 50266,
31
+ " ": 50265,
32
+ " ": 50264,
33
+ " ": 50263,
34
+ " ": 50262,
35
+ " ": 50261,
36
+ " ": 50260,
37
+ " ": 50259,
38
+ " ": 50258,
39
+ " ": 50257
40
+ }
checkpoint-500/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-500/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1875cba8e1106e45842022f5e44d5d21f417bff62681c1d7c7f7ed09789eec13
3
+ size 671247290
checkpoint-500/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39967747590da2c81781b330356d7c2472975386834b3867c919aa9613d68522
3
+ size 14244
checkpoint-500/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2c6178533defaae747c0ccac8e3ed8641e11ea8efe17d351f75ba8f017ad15b
3
+ size 1064
checkpoint-500/special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<|endoftext|>",
17
+ "unk_token": {
18
+ "content": "<|endoftext|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
checkpoint-500/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-500/tokenizer_config.json ADDED
@@ -0,0 +1,324 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "50256": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "50257": {
13
+ "content": " ",
14
+ "lstrip": false,
15
+ "normalized": true,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": false
19
+ },
20
+ "50258": {
21
+ "content": " ",
22
+ "lstrip": false,
23
+ "normalized": true,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": false
27
+ },
28
+ "50259": {
29
+ "content": " ",
30
+ "lstrip": false,
31
+ "normalized": true,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": false
35
+ },
36
+ "50260": {
37
+ "content": " ",
38
+ "lstrip": false,
39
+ "normalized": true,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": false
43
+ },
44
+ "50261": {
45
+ "content": " ",
46
+ "lstrip": false,
47
+ "normalized": true,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": false
51
+ },
52
+ "50262": {
53
+ "content": " ",
54
+ "lstrip": false,
55
+ "normalized": true,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": false
59
+ },
60
+ "50263": {
61
+ "content": " ",
62
+ "lstrip": false,
63
+ "normalized": true,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": false
67
+ },
68
+ "50264": {
69
+ "content": " ",
70
+ "lstrip": false,
71
+ "normalized": true,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": false
75
+ },
76
+ "50265": {
77
+ "content": " ",
78
+ "lstrip": false,
79
+ "normalized": true,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": false
83
+ },
84
+ "50266": {
85
+ "content": " ",
86
+ "lstrip": false,
87
+ "normalized": true,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": false
91
+ },
92
+ "50267": {
93
+ "content": " ",
94
+ "lstrip": false,
95
+ "normalized": true,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": false
99
+ },
100
+ "50268": {
101
+ "content": " ",
102
+ "lstrip": false,
103
+ "normalized": true,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": false
107
+ },
108
+ "50269": {
109
+ "content": " ",
110
+ "lstrip": false,
111
+ "normalized": true,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": false
115
+ },
116
+ "50270": {
117
+ "content": " ",
118
+ "lstrip": false,
119
+ "normalized": true,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": false
123
+ },
124
+ "50271": {
125
+ "content": " ",
126
+ "lstrip": false,
127
+ "normalized": true,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": false
131
+ },
132
+ "50272": {
133
+ "content": " ",
134
+ "lstrip": false,
135
+ "normalized": true,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": false
139
+ },
140
+ "50273": {
141
+ "content": " ",
142
+ "lstrip": false,
143
+ "normalized": true,
144
+ "rstrip": false,
145
+ "single_word": false,
146
+ "special": false
147
+ },
148
+ "50274": {
149
+ "content": " ",
150
+ "lstrip": false,
151
+ "normalized": true,
152
+ "rstrip": false,
153
+ "single_word": false,
154
+ "special": false
155
+ },
156
+ "50275": {
157
+ "content": " ",
158
+ "lstrip": false,
159
+ "normalized": true,
160
+ "rstrip": false,
161
+ "single_word": false,
162
+ "special": false
163
+ },
164
+ "50276": {
165
+ "content": " ",
166
+ "lstrip": false,
167
+ "normalized": true,
168
+ "rstrip": false,
169
+ "single_word": false,
170
+ "special": false
171
+ },
172
+ "50277": {
173
+ "content": " ",
174
+ "lstrip": false,
175
+ "normalized": true,
176
+ "rstrip": false,
177
+ "single_word": false,
178
+ "special": false
179
+ },
180
+ "50278": {
181
+ "content": " ",
182
+ "lstrip": false,
183
+ "normalized": true,
184
+ "rstrip": false,
185
+ "single_word": false,
186
+ "special": false
187
+ },
188
+ "50279": {
189
+ "content": " ",
190
+ "lstrip": false,
191
+ "normalized": true,
192
+ "rstrip": false,
193
+ "single_word": false,
194
+ "special": false
195
+ },
196
+ "50280": {
197
+ "content": " ",
198
+ "lstrip": false,
199
+ "normalized": true,
200
+ "rstrip": false,
201
+ "single_word": false,
202
+ "special": false
203
+ },
204
+ "50281": {
205
+ "content": " ",
206
+ "lstrip": false,
207
+ "normalized": true,
208
+ "rstrip": false,
209
+ "single_word": false,
210
+ "special": false
211
+ },
212
+ "50282": {
213
+ "content": " ",
214
+ "lstrip": false,
215
+ "normalized": true,
216
+ "rstrip": false,
217
+ "single_word": false,
218
+ "special": false
219
+ },
220
+ "50283": {
221
+ "content": " ",
222
+ "lstrip": false,
223
+ "normalized": true,
224
+ "rstrip": false,
225
+ "single_word": false,
226
+ "special": false
227
+ },
228
+ "50284": {
229
+ "content": " ",
230
+ "lstrip": false,
231
+ "normalized": true,
232
+ "rstrip": false,
233
+ "single_word": false,
234
+ "special": false
235
+ },
236
+ "50285": {
237
+ "content": " ",
238
+ "lstrip": false,
239
+ "normalized": true,
240
+ "rstrip": false,
241
+ "single_word": false,
242
+ "special": false
243
+ },
244
+ "50286": {
245
+ "content": " ",
246
+ "lstrip": false,
247
+ "normalized": true,
248
+ "rstrip": false,
249
+ "single_word": false,
250
+ "special": false
251
+ },
252
+ "50287": {
253
+ "content": "\t\t\t\t\t\t\t\t\t",
254
+ "lstrip": false,
255
+ "normalized": true,
256
+ "rstrip": false,
257
+ "single_word": false,
258
+ "special": false
259
+ },
260
+ "50288": {
261
+ "content": "\t\t\t\t\t\t\t\t",
262
+ "lstrip": false,
263
+ "normalized": true,
264
+ "rstrip": false,
265
+ "single_word": false,
266
+ "special": false
267
+ },
268
+ "50289": {
269
+ "content": "\t\t\t\t\t\t\t",
270
+ "lstrip": false,
271
+ "normalized": true,
272
+ "rstrip": false,
273
+ "single_word": false,
274
+ "special": false
275
+ },
276
+ "50290": {
277
+ "content": "\t\t\t\t\t\t",
278
+ "lstrip": false,
279
+ "normalized": true,
280
+ "rstrip": false,
281
+ "single_word": false,
282
+ "special": false
283
+ },
284
+ "50291": {
285
+ "content": "\t\t\t\t\t",
286
+ "lstrip": false,
287
+ "normalized": true,
288
+ "rstrip": false,
289
+ "single_word": false,
290
+ "special": false
291
+ },
292
+ "50292": {
293
+ "content": "\t\t\t\t",
294
+ "lstrip": false,
295
+ "normalized": true,
296
+ "rstrip": false,
297
+ "single_word": false,
298
+ "special": false
299
+ },
300
+ "50293": {
301
+ "content": "\t\t\t",
302
+ "lstrip": false,
303
+ "normalized": true,
304
+ "rstrip": false,
305
+ "single_word": false,
306
+ "special": false
307
+ },
308
+ "50294": {
309
+ "content": "\t\t",
310
+ "lstrip": false,
311
+ "normalized": true,
312
+ "rstrip": false,
313
+ "single_word": false,
314
+ "special": false
315
+ }
316
+ },
317
+ "bos_token": "<|endoftext|>",
318
+ "clean_up_tokenization_spaces": true,
319
+ "eos_token": "<|endoftext|>",
320
+ "model_max_length": 2048,
321
+ "pad_token": "<|endoftext|>",
322
+ "tokenizer_class": "CodeGenTokenizer",
323
+ "unk_token": "<|endoftext|>"
324
+ }
checkpoint-500/trainer_state.json ADDED
@@ -0,0 +1,321 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 0.06364562118126273,
5
+ "eval_steps": 500,
6
+ "global_step": 500,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.0,
13
+ "learning_rate": 0.0002,
14
+ "loss": 1.3744,
15
+ "step": 10
16
+ },
17
+ {
18
+ "epoch": 0.0,
19
+ "learning_rate": 0.0002,
20
+ "loss": 1.4719,
21
+ "step": 20
22
+ },
23
+ {
24
+ "epoch": 0.0,
25
+ "learning_rate": 0.0002,
26
+ "loss": 1.8097,
27
+ "step": 30
28
+ },
29
+ {
30
+ "epoch": 0.01,
31
+ "learning_rate": 0.0002,
32
+ "loss": 1.868,
33
+ "step": 40
34
+ },
35
+ {
36
+ "epoch": 0.01,
37
+ "learning_rate": 0.0002,
38
+ "loss": 2.0557,
39
+ "step": 50
40
+ },
41
+ {
42
+ "epoch": 0.01,
43
+ "learning_rate": 0.0002,
44
+ "loss": 1.4231,
45
+ "step": 60
46
+ },
47
+ {
48
+ "epoch": 0.01,
49
+ "learning_rate": 0.0002,
50
+ "loss": 1.3828,
51
+ "step": 70
52
+ },
53
+ {
54
+ "epoch": 0.01,
55
+ "learning_rate": 0.0002,
56
+ "loss": 1.7284,
57
+ "step": 80
58
+ },
59
+ {
60
+ "epoch": 0.01,
61
+ "learning_rate": 0.0002,
62
+ "loss": 2.14,
63
+ "step": 90
64
+ },
65
+ {
66
+ "epoch": 0.01,
67
+ "learning_rate": 0.0002,
68
+ "loss": 2.3983,
69
+ "step": 100
70
+ },
71
+ {
72
+ "epoch": 0.01,
73
+ "learning_rate": 0.0002,
74
+ "loss": 1.8301,
75
+ "step": 110
76
+ },
77
+ {
78
+ "epoch": 0.02,
79
+ "learning_rate": 0.0002,
80
+ "loss": 1.2601,
81
+ "step": 120
82
+ },
83
+ {
84
+ "epoch": 0.02,
85
+ "learning_rate": 0.0002,
86
+ "loss": 1.4198,
87
+ "step": 130
88
+ },
89
+ {
90
+ "epoch": 0.02,
91
+ "learning_rate": 0.0002,
92
+ "loss": 1.8985,
93
+ "step": 140
94
+ },
95
+ {
96
+ "epoch": 0.02,
97
+ "learning_rate": 0.0002,
98
+ "loss": 1.7728,
99
+ "step": 150
100
+ },
101
+ {
102
+ "epoch": 0.02,
103
+ "learning_rate": 0.0002,
104
+ "loss": 1.1191,
105
+ "step": 160
106
+ },
107
+ {
108
+ "epoch": 0.02,
109
+ "learning_rate": 0.0002,
110
+ "loss": 1.3876,
111
+ "step": 170
112
+ },
113
+ {
114
+ "epoch": 0.02,
115
+ "learning_rate": 0.0002,
116
+ "loss": 1.528,
117
+ "step": 180
118
+ },
119
+ {
120
+ "epoch": 0.02,
121
+ "learning_rate": 0.0002,
122
+ "loss": 2.067,
123
+ "step": 190
124
+ },
125
+ {
126
+ "epoch": 0.03,
127
+ "learning_rate": 0.0002,
128
+ "loss": 1.8507,
129
+ "step": 200
130
+ },
131
+ {
132
+ "epoch": 0.03,
133
+ "learning_rate": 0.0002,
134
+ "loss": 1.4472,
135
+ "step": 210
136
+ },
137
+ {
138
+ "epoch": 0.03,
139
+ "learning_rate": 0.0002,
140
+ "loss": 1.5308,
141
+ "step": 220
142
+ },
143
+ {
144
+ "epoch": 0.03,
145
+ "learning_rate": 0.0002,
146
+ "loss": 1.2405,
147
+ "step": 230
148
+ },
149
+ {
150
+ "epoch": 0.03,
151
+ "learning_rate": 0.0002,
152
+ "loss": 1.8404,
153
+ "step": 240
154
+ },
155
+ {
156
+ "epoch": 0.03,
157
+ "learning_rate": 0.0002,
158
+ "loss": 2.0961,
159
+ "step": 250
160
+ },
161
+ {
162
+ "epoch": 0.03,
163
+ "learning_rate": 0.0002,
164
+ "loss": 1.1615,
165
+ "step": 260
166
+ },
167
+ {
168
+ "epoch": 0.03,
169
+ "learning_rate": 0.0002,
170
+ "loss": 1.1319,
171
+ "step": 270
172
+ },
173
+ {
174
+ "epoch": 0.04,
175
+ "learning_rate": 0.0002,
176
+ "loss": 1.3976,
177
+ "step": 280
178
+ },
179
+ {
180
+ "epoch": 0.04,
181
+ "learning_rate": 0.0002,
182
+ "loss": 1.8732,
183
+ "step": 290
184
+ },
185
+ {
186
+ "epoch": 0.04,
187
+ "learning_rate": 0.0002,
188
+ "loss": 2.1415,
189
+ "step": 300
190
+ },
191
+ {
192
+ "epoch": 0.04,
193
+ "learning_rate": 0.0002,
194
+ "loss": 1.7423,
195
+ "step": 310
196
+ },
197
+ {
198
+ "epoch": 0.04,
199
+ "learning_rate": 0.0002,
200
+ "loss": 1.5519,
201
+ "step": 320
202
+ },
203
+ {
204
+ "epoch": 0.04,
205
+ "learning_rate": 0.0002,
206
+ "loss": 1.4975,
207
+ "step": 330
208
+ },
209
+ {
210
+ "epoch": 0.04,
211
+ "learning_rate": 0.0002,
212
+ "loss": 2.2549,
213
+ "step": 340
214
+ },
215
+ {
216
+ "epoch": 0.04,
217
+ "learning_rate": 0.0002,
218
+ "loss": 2.3298,
219
+ "step": 350
220
+ },
221
+ {
222
+ "epoch": 0.05,
223
+ "learning_rate": 0.0002,
224
+ "loss": 1.1939,
225
+ "step": 360
226
+ },
227
+ {
228
+ "epoch": 0.05,
229
+ "learning_rate": 0.0002,
230
+ "loss": 1.7077,
231
+ "step": 370
232
+ },
233
+ {
234
+ "epoch": 0.05,
235
+ "learning_rate": 0.0002,
236
+ "loss": 1.9593,
237
+ "step": 380
238
+ },
239
+ {
240
+ "epoch": 0.05,
241
+ "learning_rate": 0.0002,
242
+ "loss": 1.9703,
243
+ "step": 390
244
+ },
245
+ {
246
+ "epoch": 0.05,
247
+ "learning_rate": 0.0002,
248
+ "loss": 1.958,
249
+ "step": 400
250
+ },
251
+ {
252
+ "epoch": 0.05,
253
+ "learning_rate": 0.0002,
254
+ "loss": 1.3889,
255
+ "step": 410
256
+ },
257
+ {
258
+ "epoch": 0.05,
259
+ "learning_rate": 0.0002,
260
+ "loss": 1.4975,
261
+ "step": 420
262
+ },
263
+ {
264
+ "epoch": 0.05,
265
+ "learning_rate": 0.0002,
266
+ "loss": 1.9328,
267
+ "step": 430
268
+ },
269
+ {
270
+ "epoch": 0.06,
271
+ "learning_rate": 0.0002,
272
+ "loss": 2.1274,
273
+ "step": 440
274
+ },
275
+ {
276
+ "epoch": 0.06,
277
+ "learning_rate": 0.0002,
278
+ "loss": 2.0546,
279
+ "step": 450
280
+ },
281
+ {
282
+ "epoch": 0.06,
283
+ "learning_rate": 0.0002,
284
+ "loss": 1.2364,
285
+ "step": 460
286
+ },
287
+ {
288
+ "epoch": 0.06,
289
+ "learning_rate": 0.0002,
290
+ "loss": 1.3099,
291
+ "step": 470
292
+ },
293
+ {
294
+ "epoch": 0.06,
295
+ "learning_rate": 0.0002,
296
+ "loss": 1.2687,
297
+ "step": 480
298
+ },
299
+ {
300
+ "epoch": 0.06,
301
+ "learning_rate": 0.0002,
302
+ "loss": 1.8152,
303
+ "step": 490
304
+ },
305
+ {
306
+ "epoch": 0.06,
307
+ "learning_rate": 0.0002,
308
+ "loss": 2.1546,
309
+ "step": 500
310
+ }
311
+ ],
312
+ "logging_steps": 10,
313
+ "max_steps": 500,
314
+ "num_input_tokens_seen": 0,
315
+ "num_train_epochs": 1,
316
+ "save_steps": 100,
317
+ "total_flos": 1971890728980480.0,
318
+ "train_batch_size": 1,
319
+ "trial_name": null,
320
+ "trial_params": null
321
+ }
checkpoint-500/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b9b07b8e5c4061bdb8eb79cff35156deaa4e6c36cdeebb4ed0d92a2a3e988ee
3
+ size 4728
checkpoint-500/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
requirements.txt ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ transformers
2
+ trl
3
+ accelerate
4
+ git+https://github.com/huggingface/peft.git
5
+ datasets
6
+ bitsandbytes
7
+ scipy
8
+ einops
9
+ wandb