Floriankidev commited on
Commit
9b0bc2a
1 Parent(s): f1a2c70

End of training

Browse files
Files changed (5) hide show
  1. README.md +1 -1
  2. all_results.json +10 -10
  3. eval_results.json +6 -6
  4. train_results.json +5 -5
  5. trainer_state.json +1983 -367
README.md CHANGED
@@ -32,7 +32,7 @@ should probably proofread and complete it, then remove this comment. -->
32
 
33
  This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset.
34
  It achieves the following results on the evaluation set:
35
- - Loss: 0.6781
36
  - Accuracy: 0.7968
37
 
38
  ## Model description
 
32
 
33
  This model is a fine-tuned version of [microsoft/swin-tiny-patch4-window7-224](https://huggingface.co/microsoft/swin-tiny-patch4-window7-224) on the imagefolder dataset.
34
  It achieves the following results on the evaluation set:
35
+ - Loss: 0.6838
36
  - Accuracy: 0.7968
37
 
38
  ## Model description
all_results.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
- "epoch": 4.982857142857143,
3
- "eval_accuracy": 0.7499688084840923,
4
- "eval_loss": 0.8480741381645203,
5
- "eval_runtime": 72.5483,
6
- "eval_samples_per_second": 110.478,
7
- "eval_steps_per_second": 3.46,
8
- "total_flos": 3.468170115610067e+18,
9
- "train_loss": 1.2575330436776537,
10
- "train_runtime": 2631.6311,
11
- "train_samples_per_second": 53.178,
12
  "train_steps_per_second": 0.414
13
  }
 
1
  {
2
+ "epoch": 14.948571428571428,
3
+ "eval_accuracy": 0.796756082345602,
4
+ "eval_loss": 0.6838445663452148,
5
+ "eval_runtime": 70.7289,
6
+ "eval_samples_per_second": 113.32,
7
+ "eval_steps_per_second": 3.549,
8
+ "total_flos": 1.0403963301155365e+19,
9
+ "train_loss": 0.9863547705729073,
10
+ "train_runtime": 7901.2254,
11
+ "train_samples_per_second": 53.135,
12
  "train_steps_per_second": 0.414
13
  }
eval_results.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
- "epoch": 4.982857142857143,
3
- "eval_accuracy": 0.7499688084840923,
4
- "eval_loss": 0.8480741381645203,
5
- "eval_runtime": 72.5483,
6
- "eval_samples_per_second": 110.478,
7
- "eval_steps_per_second": 3.46
8
  }
 
1
  {
2
+ "epoch": 14.948571428571428,
3
+ "eval_accuracy": 0.796756082345602,
4
+ "eval_loss": 0.6838445663452148,
5
+ "eval_runtime": 70.7289,
6
+ "eval_samples_per_second": 113.32,
7
+ "eval_steps_per_second": 3.549
8
  }
train_results.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
- "epoch": 4.982857142857143,
3
- "total_flos": 3.468170115610067e+18,
4
- "train_loss": 1.2575330436776537,
5
- "train_runtime": 2631.6311,
6
- "train_samples_per_second": 53.178,
7
  "train_steps_per_second": 0.414
8
  }
 
1
  {
2
+ "epoch": 14.948571428571428,
3
+ "total_flos": 1.0403963301155365e+19,
4
+ "train_loss": 0.9863547705729073,
5
+ "train_runtime": 7901.2254,
6
+ "train_samples_per_second": 53.135,
7
  "train_steps_per_second": 0.414
8
  }
trainer_state.json CHANGED
@@ -1,835 +1,2451 @@
1
  {
2
- "best_metric": 0.7499688084840923,
3
- "best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-eurosat\\checkpoint-1090",
4
- "epoch": 4.982857142857143,
5
  "eval_steps": 500,
6
- "global_step": 1090,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
10
  "log_history": [
11
  {
12
  "epoch": 0.045714285714285714,
13
- "grad_norm": 6.963473320007324,
14
- "learning_rate": 4.587155963302753e-06,
15
- "loss": 2.8055,
16
  "step": 10
17
  },
18
  {
19
  "epoch": 0.09142857142857143,
20
- "grad_norm": 11.695841789245605,
21
- "learning_rate": 9.174311926605506e-06,
22
- "loss": 2.7626,
23
  "step": 20
24
  },
25
  {
26
  "epoch": 0.13714285714285715,
27
- "grad_norm": 10.859987258911133,
28
- "learning_rate": 1.3761467889908258e-05,
29
- "loss": 2.6863,
30
  "step": 30
31
  },
32
  {
33
  "epoch": 0.18285714285714286,
34
- "grad_norm": 14.530256271362305,
35
- "learning_rate": 1.834862385321101e-05,
36
- "loss": 2.5932,
37
  "step": 40
38
  },
39
  {
40
  "epoch": 0.22857142857142856,
41
- "grad_norm": 22.234731674194336,
42
- "learning_rate": 2.2935779816513765e-05,
43
- "loss": 2.4006,
44
  "step": 50
45
  },
46
  {
47
  "epoch": 0.2742857142857143,
48
- "grad_norm": 21.946706771850586,
49
- "learning_rate": 2.7522935779816515e-05,
50
- "loss": 2.1545,
51
  "step": 60
52
  },
53
  {
54
  "epoch": 0.32,
55
- "grad_norm": 31.88918685913086,
56
- "learning_rate": 3.211009174311927e-05,
57
- "loss": 1.9868,
58
  "step": 70
59
  },
60
  {
61
  "epoch": 0.3657142857142857,
62
- "grad_norm": 17.8656063079834,
63
- "learning_rate": 3.669724770642202e-05,
64
- "loss": 1.8264,
65
  "step": 80
66
  },
67
  {
68
  "epoch": 0.4114285714285714,
69
- "grad_norm": 15.480560302734375,
70
- "learning_rate": 4.1284403669724776e-05,
71
- "loss": 1.8301,
72
  "step": 90
73
  },
74
  {
75
  "epoch": 0.45714285714285713,
76
- "grad_norm": 13.637527465820312,
77
- "learning_rate": 4.587155963302753e-05,
78
- "loss": 1.7708,
79
  "step": 100
80
  },
81
  {
82
  "epoch": 0.5028571428571429,
83
- "grad_norm": 16.25617218017578,
84
- "learning_rate": 4.994903160040775e-05,
85
- "loss": 1.7133,
86
  "step": 110
87
  },
88
  {
89
  "epoch": 0.5485714285714286,
90
- "grad_norm": 10.56847095489502,
91
- "learning_rate": 4.943934760448522e-05,
92
- "loss": 1.5611,
93
  "step": 120
94
  },
95
  {
96
  "epoch": 0.5942857142857143,
97
- "grad_norm": 16.05897331237793,
98
- "learning_rate": 4.892966360856269e-05,
99
- "loss": 1.6387,
100
  "step": 130
101
  },
102
  {
103
  "epoch": 0.64,
104
- "grad_norm": 17.286190032958984,
105
- "learning_rate": 4.8419979612640164e-05,
106
- "loss": 1.519,
107
  "step": 140
108
  },
109
  {
110
  "epoch": 0.6857142857142857,
111
- "grad_norm": 13.946508407592773,
112
- "learning_rate": 4.7910295616717635e-05,
113
- "loss": 1.4708,
114
  "step": 150
115
  },
116
  {
117
  "epoch": 0.7314285714285714,
118
- "grad_norm": 10.632002830505371,
119
- "learning_rate": 4.740061162079511e-05,
120
- "loss": 1.5119,
121
  "step": 160
122
  },
123
  {
124
  "epoch": 0.7771428571428571,
125
- "grad_norm": 11.511331558227539,
126
- "learning_rate": 4.6890927624872586e-05,
127
- "loss": 1.4476,
128
  "step": 170
129
  },
130
  {
131
  "epoch": 0.8228571428571428,
132
- "grad_norm": 9.265079498291016,
133
- "learning_rate": 4.638124362895006e-05,
134
- "loss": 1.4864,
135
  "step": 180
136
  },
137
  {
138
  "epoch": 0.8685714285714285,
139
- "grad_norm": 12.077649116516113,
140
- "learning_rate": 4.587155963302753e-05,
141
- "loss": 1.4542,
142
  "step": 190
143
  },
144
  {
145
  "epoch": 0.9142857142857143,
146
- "grad_norm": 8.974185943603516,
147
- "learning_rate": 4.5361875637104995e-05,
148
- "loss": 1.4505,
149
  "step": 200
150
  },
151
  {
152
  "epoch": 0.96,
153
- "grad_norm": 7.196322441101074,
154
- "learning_rate": 4.4852191641182466e-05,
155
- "loss": 1.4736,
156
  "step": 210
157
  },
158
  {
159
  "epoch": 0.9965714285714286,
160
- "eval_accuracy": 0.6177167810355584,
161
- "eval_loss": 1.2365009784698486,
162
- "eval_runtime": 74.6739,
163
- "eval_samples_per_second": 107.333,
164
- "eval_steps_per_second": 3.361,
165
  "step": 218
166
  },
167
  {
168
  "epoch": 1.0057142857142858,
169
- "grad_norm": 8.575163841247559,
170
- "learning_rate": 4.434250764525994e-05,
171
- "loss": 1.2932,
172
  "step": 220
173
  },
174
  {
175
  "epoch": 1.0514285714285714,
176
- "grad_norm": 9.491937637329102,
177
- "learning_rate": 4.383282364933741e-05,
178
- "loss": 1.3322,
179
  "step": 230
180
  },
181
  {
182
  "epoch": 1.0971428571428572,
183
- "grad_norm": 7.442806720733643,
184
- "learning_rate": 4.332313965341488e-05,
185
- "loss": 1.3634,
186
  "step": 240
187
  },
188
  {
189
  "epoch": 1.1428571428571428,
190
- "grad_norm": 11.594585418701172,
191
- "learning_rate": 4.281345565749236e-05,
192
- "loss": 1.3838,
193
  "step": 250
194
  },
195
  {
196
  "epoch": 1.1885714285714286,
197
- "grad_norm": 9.72411823272705,
198
- "learning_rate": 4.230377166156983e-05,
199
- "loss": 1.3695,
200
  "step": 260
201
  },
202
  {
203
  "epoch": 1.2342857142857142,
204
- "grad_norm": 8.765079498291016,
205
- "learning_rate": 4.1794087665647304e-05,
206
- "loss": 1.241,
207
  "step": 270
208
  },
209
  {
210
  "epoch": 1.28,
211
- "grad_norm": 7.949846267700195,
212
- "learning_rate": 4.1284403669724776e-05,
213
- "loss": 1.3099,
214
  "step": 280
215
  },
216
  {
217
  "epoch": 1.3257142857142856,
218
- "grad_norm": 7.360624313354492,
219
- "learning_rate": 4.077471967380224e-05,
220
- "loss": 1.3464,
221
  "step": 290
222
  },
223
  {
224
  "epoch": 1.3714285714285714,
225
- "grad_norm": 9.947721481323242,
226
- "learning_rate": 4.026503567787971e-05,
227
- "loss": 1.269,
228
  "step": 300
229
  },
230
  {
231
  "epoch": 1.4171428571428573,
232
- "grad_norm": 7.578132629394531,
233
- "learning_rate": 3.9755351681957185e-05,
234
- "loss": 1.2697,
235
  "step": 310
236
  },
237
  {
238
  "epoch": 1.4628571428571429,
239
- "grad_norm": 7.614137172698975,
240
- "learning_rate": 3.9245667686034656e-05,
241
- "loss": 1.2017,
242
  "step": 320
243
  },
244
  {
245
  "epoch": 1.5085714285714285,
246
- "grad_norm": 6.3952107429504395,
247
- "learning_rate": 3.8735983690112135e-05,
248
- "loss": 1.3131,
249
  "step": 330
250
  },
251
  {
252
  "epoch": 1.5542857142857143,
253
- "grad_norm": 11.049560546875,
254
- "learning_rate": 3.822629969418961e-05,
255
- "loss": 1.2547,
256
  "step": 340
257
  },
258
  {
259
  "epoch": 1.6,
260
- "grad_norm": 7.0264129638671875,
261
- "learning_rate": 3.771661569826708e-05,
262
- "loss": 1.2384,
263
  "step": 350
264
  },
265
  {
266
  "epoch": 1.6457142857142857,
267
- "grad_norm": 8.316610336303711,
268
- "learning_rate": 3.720693170234455e-05,
269
- "loss": 1.2679,
270
  "step": 360
271
  },
272
  {
273
  "epoch": 1.6914285714285713,
274
- "grad_norm": 7.154911041259766,
275
- "learning_rate": 3.669724770642202e-05,
276
- "loss": 1.3185,
277
  "step": 370
278
  },
279
  {
280
  "epoch": 1.737142857142857,
281
- "grad_norm": 7.319551944732666,
282
- "learning_rate": 3.6187563710499494e-05,
283
- "loss": 1.1742,
284
  "step": 380
285
  },
286
  {
287
  "epoch": 1.782857142857143,
288
- "grad_norm": 7.423411846160889,
289
- "learning_rate": 3.567787971457696e-05,
290
- "loss": 1.1981,
291
  "step": 390
292
  },
293
  {
294
  "epoch": 1.8285714285714287,
295
- "grad_norm": 9.163586616516113,
296
- "learning_rate": 3.516819571865443e-05,
297
- "loss": 1.2371,
298
  "step": 400
299
  },
300
  {
301
  "epoch": 1.8742857142857143,
302
- "grad_norm": 7.521251678466797,
303
- "learning_rate": 3.465851172273191e-05,
304
- "loss": 1.2717,
305
  "step": 410
306
  },
307
  {
308
  "epoch": 1.92,
309
- "grad_norm": 10.229044914245605,
310
- "learning_rate": 3.414882772680938e-05,
311
- "loss": 1.206,
312
  "step": 420
313
  },
314
  {
315
  "epoch": 1.9657142857142857,
316
- "grad_norm": 7.633674621582031,
317
- "learning_rate": 3.363914373088685e-05,
318
- "loss": 1.2161,
319
  "step": 430
320
  },
321
  {
322
  "epoch": 1.9977142857142858,
323
- "eval_accuracy": 0.6915782907049283,
324
- "eval_loss": 1.0157994031906128,
325
- "eval_runtime": 74.9081,
326
- "eval_samples_per_second": 106.998,
327
- "eval_steps_per_second": 3.351,
328
  "step": 437
329
  },
330
  {
331
  "epoch": 2.0114285714285716,
332
- "grad_norm": 8.755953788757324,
333
- "learning_rate": 3.3129459734964325e-05,
334
- "loss": 1.1613,
335
  "step": 440
336
  },
337
  {
338
  "epoch": 2.057142857142857,
339
- "grad_norm": 7.305307865142822,
340
- "learning_rate": 3.26197757390418e-05,
341
- "loss": 1.0986,
342
  "step": 450
343
  },
344
  {
345
  "epoch": 2.1028571428571428,
346
- "grad_norm": 8.31972885131836,
347
- "learning_rate": 3.211009174311927e-05,
348
- "loss": 1.1722,
349
  "step": 460
350
  },
351
  {
352
  "epoch": 2.1485714285714286,
353
- "grad_norm": 8.749483108520508,
354
- "learning_rate": 3.160040774719674e-05,
355
- "loss": 1.1203,
356
  "step": 470
357
  },
358
  {
359
  "epoch": 2.1942857142857144,
360
- "grad_norm": 6.05934476852417,
361
- "learning_rate": 3.1090723751274206e-05,
362
- "loss": 1.1225,
363
  "step": 480
364
  },
365
  {
366
  "epoch": 2.24,
367
- "grad_norm": 10.040249824523926,
368
- "learning_rate": 3.0581039755351684e-05,
369
- "loss": 1.2017,
370
  "step": 490
371
  },
372
  {
373
  "epoch": 2.2857142857142856,
374
- "grad_norm": 8.599287986755371,
375
- "learning_rate": 3.0071355759429153e-05,
376
- "loss": 1.1669,
377
  "step": 500
378
  },
379
  {
380
  "epoch": 2.3314285714285714,
381
- "grad_norm": 7.941746711730957,
382
- "learning_rate": 2.9561671763506628e-05,
383
- "loss": 1.1189,
384
  "step": 510
385
  },
386
  {
387
  "epoch": 2.3771428571428572,
388
- "grad_norm": 13.228888511657715,
389
- "learning_rate": 2.90519877675841e-05,
390
- "loss": 1.1882,
391
  "step": 520
392
  },
393
  {
394
  "epoch": 2.422857142857143,
395
- "grad_norm": 7.503291130065918,
396
- "learning_rate": 2.854230377166157e-05,
397
- "loss": 1.185,
398
  "step": 530
399
  },
400
  {
401
  "epoch": 2.4685714285714284,
402
- "grad_norm": 7.043280124664307,
403
- "learning_rate": 2.8032619775739043e-05,
404
- "loss": 1.1577,
405
  "step": 540
406
  },
407
  {
408
  "epoch": 2.5142857142857142,
409
- "grad_norm": 8.937540054321289,
410
- "learning_rate": 2.7522935779816515e-05,
411
- "loss": 1.1539,
412
  "step": 550
413
  },
414
  {
415
  "epoch": 2.56,
416
- "grad_norm": 8.076375961303711,
417
- "learning_rate": 2.701325178389399e-05,
418
- "loss": 1.1124,
419
  "step": 560
420
  },
421
  {
422
  "epoch": 2.605714285714286,
423
- "grad_norm": 7.4971442222595215,
424
- "learning_rate": 2.6503567787971462e-05,
425
- "loss": 1.0799,
426
  "step": 570
427
  },
428
  {
429
  "epoch": 2.6514285714285712,
430
- "grad_norm": 9.443109512329102,
431
- "learning_rate": 2.5993883792048927e-05,
432
- "loss": 1.0234,
433
  "step": 580
434
  },
435
  {
436
  "epoch": 2.697142857142857,
437
- "grad_norm": 6.811278820037842,
438
- "learning_rate": 2.5484199796126402e-05,
439
- "loss": 1.0423,
440
  "step": 590
441
  },
442
  {
443
  "epoch": 2.742857142857143,
444
- "grad_norm": 8.011152267456055,
445
- "learning_rate": 2.4974515800203874e-05,
446
- "loss": 1.1336,
447
  "step": 600
448
  },
449
  {
450
  "epoch": 2.7885714285714287,
451
- "grad_norm": 6.383072853088379,
452
- "learning_rate": 2.4464831804281346e-05,
453
- "loss": 1.1203,
454
  "step": 610
455
  },
456
  {
457
  "epoch": 2.8342857142857145,
458
- "grad_norm": 9.429741859436035,
459
- "learning_rate": 2.3955147808358818e-05,
460
- "loss": 1.091,
461
  "step": 620
462
  },
463
  {
464
  "epoch": 2.88,
465
- "grad_norm": 6.606307506561279,
466
- "learning_rate": 2.3445463812436293e-05,
467
- "loss": 1.058,
468
  "step": 630
469
  },
470
  {
471
  "epoch": 2.9257142857142857,
472
- "grad_norm": 8.834084510803223,
473
- "learning_rate": 2.2935779816513765e-05,
474
- "loss": 1.1266,
475
  "step": 640
476
  },
477
  {
478
  "epoch": 2.9714285714285715,
479
- "grad_norm": 7.674890518188477,
480
- "learning_rate": 2.2426095820591233e-05,
481
- "loss": 1.0807,
482
  "step": 650
483
  },
484
  {
485
  "epoch": 2.998857142857143,
486
- "eval_accuracy": 0.727386150966937,
487
- "eval_loss": 0.9072983264923096,
488
- "eval_runtime": 74.6334,
489
- "eval_samples_per_second": 107.392,
490
- "eval_steps_per_second": 3.363,
491
  "step": 656
492
  },
493
  {
494
  "epoch": 3.0171428571428573,
495
- "grad_norm": 7.3469624519348145,
496
- "learning_rate": 2.1916411824668705e-05,
497
- "loss": 1.0863,
498
  "step": 660
499
  },
500
  {
501
  "epoch": 3.0628571428571427,
502
- "grad_norm": 7.052463531494141,
503
- "learning_rate": 2.140672782874618e-05,
504
- "loss": 1.0188,
505
  "step": 670
506
  },
507
  {
508
  "epoch": 3.1085714285714285,
509
- "grad_norm": 7.651565074920654,
510
- "learning_rate": 2.0897043832823652e-05,
511
- "loss": 0.9874,
512
  "step": 680
513
  },
514
  {
515
  "epoch": 3.1542857142857144,
516
- "grad_norm": 11.283343315124512,
517
- "learning_rate": 2.038735983690112e-05,
518
- "loss": 1.0876,
519
  "step": 690
520
  },
521
  {
522
  "epoch": 3.2,
523
- "grad_norm": 7.787779331207275,
524
- "learning_rate": 1.9877675840978592e-05,
525
- "loss": 1.0479,
526
  "step": 700
527
  },
528
  {
529
  "epoch": 3.2457142857142856,
530
- "grad_norm": 8.140477180480957,
531
- "learning_rate": 1.9367991845056068e-05,
532
- "loss": 1.009,
533
  "step": 710
534
  },
535
  {
536
  "epoch": 3.2914285714285714,
537
- "grad_norm": 8.705915451049805,
538
- "learning_rate": 1.885830784913354e-05,
539
- "loss": 1.056,
540
  "step": 720
541
  },
542
  {
543
  "epoch": 3.337142857142857,
544
- "grad_norm": 7.134729385375977,
545
- "learning_rate": 1.834862385321101e-05,
546
- "loss": 1.0378,
547
  "step": 730
548
  },
549
  {
550
  "epoch": 3.382857142857143,
551
- "grad_norm": 7.588448524475098,
552
- "learning_rate": 1.783893985728848e-05,
553
- "loss": 1.0508,
554
  "step": 740
555
  },
556
  {
557
  "epoch": 3.4285714285714284,
558
- "grad_norm": 7.663401126861572,
559
- "learning_rate": 1.7329255861365955e-05,
560
- "loss": 1.0207,
561
  "step": 750
562
  },
563
  {
564
  "epoch": 3.474285714285714,
565
- "grad_norm": 6.939538955688477,
566
- "learning_rate": 1.6819571865443427e-05,
567
- "loss": 1.1108,
568
  "step": 760
569
  },
570
  {
571
  "epoch": 3.52,
572
- "grad_norm": 7.630512714385986,
573
- "learning_rate": 1.63098878695209e-05,
574
- "loss": 0.9975,
575
  "step": 770
576
  },
577
  {
578
  "epoch": 3.565714285714286,
579
- "grad_norm": 9.263322830200195,
580
- "learning_rate": 1.580020387359837e-05,
581
- "loss": 1.0372,
582
  "step": 780
583
  },
584
  {
585
  "epoch": 3.611428571428571,
586
- "grad_norm": 8.593291282653809,
587
- "learning_rate": 1.5290519877675842e-05,
588
- "loss": 1.0863,
589
  "step": 790
590
  },
591
  {
592
  "epoch": 3.657142857142857,
593
- "grad_norm": 7.475892543792725,
594
- "learning_rate": 1.4780835881753314e-05,
595
- "loss": 1.0021,
596
  "step": 800
597
  },
598
  {
599
  "epoch": 3.702857142857143,
600
- "grad_norm": 9.492719650268555,
601
- "learning_rate": 1.4271151885830786e-05,
602
- "loss": 1.0572,
603
  "step": 810
604
  },
605
  {
606
  "epoch": 3.7485714285714287,
607
- "grad_norm": 11.467538833618164,
608
- "learning_rate": 1.3761467889908258e-05,
609
- "loss": 1.0216,
610
  "step": 820
611
  },
612
  {
613
  "epoch": 3.7942857142857145,
614
- "grad_norm": 7.794005870819092,
615
- "learning_rate": 1.3251783893985731e-05,
616
- "loss": 1.0205,
617
  "step": 830
618
  },
619
  {
620
  "epoch": 3.84,
621
- "grad_norm": 6.822214603424072,
622
- "learning_rate": 1.2742099898063201e-05,
623
- "loss": 1.0281,
624
  "step": 840
625
  },
626
  {
627
  "epoch": 3.8857142857142857,
628
- "grad_norm": 6.747819423675537,
629
- "learning_rate": 1.2232415902140673e-05,
630
- "loss": 1.0237,
631
  "step": 850
632
  },
633
  {
634
  "epoch": 3.9314285714285715,
635
- "grad_norm": 7.018404006958008,
636
- "learning_rate": 1.1722731906218146e-05,
637
- "loss": 1.0446,
638
  "step": 860
639
  },
640
  {
641
  "epoch": 3.977142857142857,
642
- "grad_norm": 7.553677082061768,
643
- "learning_rate": 1.1213047910295617e-05,
644
- "loss": 0.9977,
645
  "step": 870
646
  },
647
  {
648
  "epoch": 4.0,
649
- "eval_accuracy": 0.7456019962570181,
650
- "eval_loss": 0.8551267385482788,
651
- "eval_runtime": 74.4356,
652
- "eval_samples_per_second": 107.677,
653
- "eval_steps_per_second": 3.372,
654
  "step": 875
655
  },
656
  {
657
  "epoch": 4.022857142857143,
658
- "grad_norm": 8.310098648071289,
659
- "learning_rate": 1.070336391437309e-05,
660
- "loss": 1.0626,
661
  "step": 880
662
  },
663
  {
664
  "epoch": 4.0685714285714285,
665
- "grad_norm": 7.971031665802002,
666
- "learning_rate": 1.019367991845056e-05,
667
- "loss": 1.0294,
668
  "step": 890
669
  },
670
  {
671
  "epoch": 4.114285714285714,
672
- "grad_norm": 8.742574691772461,
673
- "learning_rate": 9.683995922528034e-06,
674
- "loss": 1.0401,
675
  "step": 900
676
  },
677
  {
678
  "epoch": 4.16,
679
- "grad_norm": 7.5797553062438965,
680
- "learning_rate": 9.174311926605506e-06,
681
- "loss": 1.0194,
682
  "step": 910
683
  },
684
  {
685
  "epoch": 4.2057142857142855,
686
- "grad_norm": 10.30631160736084,
687
- "learning_rate": 8.664627930682977e-06,
688
- "loss": 1.0334,
689
  "step": 920
690
  },
691
  {
692
  "epoch": 4.251428571428572,
693
- "grad_norm": 8.128324508666992,
694
- "learning_rate": 8.15494393476045e-06,
695
- "loss": 1.0027,
696
  "step": 930
697
  },
698
  {
699
  "epoch": 4.297142857142857,
700
- "grad_norm": 7.413013935089111,
701
- "learning_rate": 7.645259938837921e-06,
702
- "loss": 0.9981,
703
  "step": 940
704
  },
705
  {
706
  "epoch": 4.3428571428571425,
707
- "grad_norm": 7.2544426918029785,
708
- "learning_rate": 7.135575942915393e-06,
709
- "loss": 0.9954,
710
  "step": 950
711
  },
712
  {
713
  "epoch": 4.388571428571429,
714
- "grad_norm": 6.764856815338135,
715
- "learning_rate": 6.6258919469928655e-06,
716
- "loss": 0.9712,
717
  "step": 960
718
  },
719
  {
720
  "epoch": 4.434285714285714,
721
- "grad_norm": 7.096611022949219,
722
- "learning_rate": 6.1162079510703365e-06,
723
- "loss": 0.9646,
724
  "step": 970
725
  },
726
  {
727
  "epoch": 4.48,
728
- "grad_norm": 7.685801982879639,
729
- "learning_rate": 5.606523955147808e-06,
730
- "loss": 1.0362,
731
  "step": 980
732
  },
733
  {
734
  "epoch": 4.525714285714286,
735
- "grad_norm": 7.416136741638184,
736
- "learning_rate": 5.09683995922528e-06,
737
- "loss": 1.0596,
738
  "step": 990
739
  },
740
  {
741
  "epoch": 4.571428571428571,
742
- "grad_norm": 7.128796100616455,
743
- "learning_rate": 4.587155963302753e-06,
744
- "loss": 0.9479,
745
  "step": 1000
746
  },
747
  {
748
  "epoch": 4.617142857142857,
749
- "grad_norm": 6.152943134307861,
750
- "learning_rate": 4.077471967380225e-06,
751
- "loss": 0.9503,
752
  "step": 1010
753
  },
754
  {
755
  "epoch": 4.662857142857143,
756
- "grad_norm": 8.12887191772461,
757
- "learning_rate": 3.5677879714576964e-06,
758
- "loss": 1.0071,
759
  "step": 1020
760
  },
761
  {
762
  "epoch": 4.708571428571428,
763
- "grad_norm": 5.702174186706543,
764
- "learning_rate": 3.0581039755351682e-06,
765
- "loss": 1.0149,
766
  "step": 1030
767
  },
768
  {
769
  "epoch": 4.7542857142857144,
770
- "grad_norm": 6.921694278717041,
771
- "learning_rate": 2.54841997961264e-06,
772
- "loss": 0.981,
773
  "step": 1040
774
  },
775
  {
776
  "epoch": 4.8,
777
- "grad_norm": 9.122147560119629,
778
- "learning_rate": 2.0387359836901123e-06,
779
- "loss": 0.9939,
780
  "step": 1050
781
  },
782
  {
783
  "epoch": 4.845714285714286,
784
- "grad_norm": 8.022086143493652,
785
- "learning_rate": 1.5290519877675841e-06,
786
- "loss": 0.8968,
787
  "step": 1060
788
  },
789
  {
790
  "epoch": 4.8914285714285715,
791
- "grad_norm": 7.508928298950195,
792
- "learning_rate": 1.0193679918450562e-06,
793
- "loss": 0.9698,
794
  "step": 1070
795
  },
796
  {
797
  "epoch": 4.937142857142857,
798
- "grad_norm": 9.233248710632324,
799
- "learning_rate": 5.096839959225281e-07,
800
- "loss": 0.9584,
801
  "step": 1080
802
  },
803
  {
804
  "epoch": 4.982857142857143,
805
- "grad_norm": 7.491491794586182,
806
- "learning_rate": 0.0,
807
- "loss": 0.9737,
808
  "step": 1090
809
  },
810
  {
811
- "epoch": 4.982857142857143,
812
- "eval_accuracy": 0.7499688084840923,
813
- "eval_loss": 0.8480741381645203,
814
- "eval_runtime": 71.0957,
815
- "eval_samples_per_second": 112.735,
816
- "eval_steps_per_second": 3.53,
817
- "step": 1090
818
  },
819
  {
820
- "epoch": 4.982857142857143,
821
- "step": 1090,
822
- "total_flos": 3.468170115610067e+18,
823
- "train_loss": 1.2575330436776537,
824
- "train_runtime": 2631.6311,
825
- "train_samples_per_second": 53.178,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
826
  "train_steps_per_second": 0.414
827
  }
828
  ],
829
  "logging_steps": 10,
830
- "max_steps": 1090,
831
  "num_input_tokens_seen": 0,
832
- "num_train_epochs": 5,
833
  "save_steps": 500,
834
  "stateful_callbacks": {
835
  "TrainerControl": {
@@ -843,7 +2459,7 @@
843
  "attributes": {}
844
  }
845
  },
846
- "total_flos": 3.468170115610067e+18,
847
  "train_batch_size": 32,
848
  "trial_name": null,
849
  "trial_params": null
 
1
  {
2
+ "best_metric": 0.796756082345602,
3
+ "best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-eurosat\\checkpoint-3062",
4
+ "epoch": 14.948571428571428,
5
  "eval_steps": 500,
6
+ "global_step": 3270,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
10
  "log_history": [
11
  {
12
  "epoch": 0.045714285714285714,
13
+ "grad_norm": 13.994832992553711,
14
+ "learning_rate": 1.5290519877675841e-06,
15
+ "loss": 2.7494,
16
  "step": 10
17
  },
18
  {
19
  "epoch": 0.09142857142857143,
20
+ "grad_norm": 7.420119762420654,
21
+ "learning_rate": 3.0581039755351682e-06,
22
+ "loss": 2.7395,
23
  "step": 20
24
  },
25
  {
26
  "epoch": 0.13714285714285715,
27
+ "grad_norm": 5.467243194580078,
28
+ "learning_rate": 4.587155963302753e-06,
29
+ "loss": 2.7049,
30
  "step": 30
31
  },
32
  {
33
  "epoch": 0.18285714285714286,
34
+ "grad_norm": 7.835602760314941,
35
+ "learning_rate": 6.1162079510703365e-06,
36
+ "loss": 2.6367,
37
  "step": 40
38
  },
39
  {
40
  "epoch": 0.22857142857142856,
41
+ "grad_norm": 13.763399124145508,
42
+ "learning_rate": 7.645259938837921e-06,
43
+ "loss": 2.5592,
44
  "step": 50
45
  },
46
  {
47
  "epoch": 0.2742857142857143,
48
+ "grad_norm": 10.583270072937012,
49
+ "learning_rate": 9.174311926605506e-06,
50
+ "loss": 2.4444,
51
  "step": 60
52
  },
53
  {
54
  "epoch": 0.32,
55
+ "grad_norm": 19.168621063232422,
56
+ "learning_rate": 1.070336391437309e-05,
57
+ "loss": 2.3336,
58
  "step": 70
59
  },
60
  {
61
  "epoch": 0.3657142857142857,
62
+ "grad_norm": 20.13259506225586,
63
+ "learning_rate": 1.2232415902140673e-05,
64
+ "loss": 2.1877,
65
  "step": 80
66
  },
67
  {
68
  "epoch": 0.4114285714285714,
69
+ "grad_norm": 28.141359329223633,
70
+ "learning_rate": 1.3761467889908258e-05,
71
+ "loss": 2.0578,
72
  "step": 90
73
  },
74
  {
75
  "epoch": 0.45714285714285713,
76
+ "grad_norm": 36.60035705566406,
77
+ "learning_rate": 1.5290519877675842e-05,
78
+ "loss": 1.9729,
79
  "step": 100
80
  },
81
  {
82
  "epoch": 0.5028571428571429,
83
+ "grad_norm": 27.167247772216797,
84
+ "learning_rate": 1.6819571865443427e-05,
85
+ "loss": 1.884,
86
  "step": 110
87
  },
88
  {
89
  "epoch": 0.5485714285714286,
90
+ "grad_norm": 21.56601905822754,
91
+ "learning_rate": 1.834862385321101e-05,
92
+ "loss": 1.7821,
93
  "step": 120
94
  },
95
  {
96
  "epoch": 0.5942857142857143,
97
+ "grad_norm": 18.523468017578125,
98
+ "learning_rate": 1.9877675840978592e-05,
99
+ "loss": 1.8148,
100
  "step": 130
101
  },
102
  {
103
  "epoch": 0.64,
104
+ "grad_norm": 14.976977348327637,
105
+ "learning_rate": 2.140672782874618e-05,
106
+ "loss": 1.6829,
107
  "step": 140
108
  },
109
  {
110
  "epoch": 0.6857142857142857,
111
+ "grad_norm": 14.441283226013184,
112
+ "learning_rate": 2.2935779816513765e-05,
113
+ "loss": 1.6312,
114
  "step": 150
115
  },
116
  {
117
  "epoch": 0.7314285714285714,
118
+ "grad_norm": 17.649080276489258,
119
+ "learning_rate": 2.4464831804281346e-05,
120
+ "loss": 1.6482,
121
  "step": 160
122
  },
123
  {
124
  "epoch": 0.7771428571428571,
125
+ "grad_norm": 19.691490173339844,
126
+ "learning_rate": 2.5993883792048927e-05,
127
+ "loss": 1.5693,
128
  "step": 170
129
  },
130
  {
131
  "epoch": 0.8228571428571428,
132
+ "grad_norm": 12.245804786682129,
133
+ "learning_rate": 2.7522935779816515e-05,
134
+ "loss": 1.6267,
135
  "step": 180
136
  },
137
  {
138
  "epoch": 0.8685714285714285,
139
+ "grad_norm": 24.526466369628906,
140
+ "learning_rate": 2.90519877675841e-05,
141
+ "loss": 1.5964,
142
  "step": 190
143
  },
144
  {
145
  "epoch": 0.9142857142857143,
146
+ "grad_norm": 14.706289291381836,
147
+ "learning_rate": 3.0581039755351684e-05,
148
+ "loss": 1.6017,
149
  "step": 200
150
  },
151
  {
152
  "epoch": 0.96,
153
+ "grad_norm": 12.513092041015625,
154
+ "learning_rate": 3.211009174311927e-05,
155
+ "loss": 1.5891,
156
  "step": 210
157
  },
158
  {
159
  "epoch": 0.9965714285714286,
160
+ "eval_accuracy": 0.5723019338739863,
161
+ "eval_loss": 1.383291244506836,
162
+ "eval_runtime": 74.6009,
163
+ "eval_samples_per_second": 107.438,
164
+ "eval_steps_per_second": 3.365,
165
  "step": 218
166
  },
167
  {
168
  "epoch": 1.0057142857142858,
169
+ "grad_norm": 9.570089340209961,
170
+ "learning_rate": 3.363914373088685e-05,
171
+ "loss": 1.4337,
172
  "step": 220
173
  },
174
  {
175
  "epoch": 1.0514285714285714,
176
+ "grad_norm": 29.601459503173828,
177
+ "learning_rate": 3.516819571865443e-05,
178
+ "loss": 1.4987,
179
  "step": 230
180
  },
181
  {
182
  "epoch": 1.0971428571428572,
183
+ "grad_norm": 14.568526268005371,
184
+ "learning_rate": 3.669724770642202e-05,
185
+ "loss": 1.4594,
186
  "step": 240
187
  },
188
  {
189
  "epoch": 1.1428571428571428,
190
+ "grad_norm": 9.901498794555664,
191
+ "learning_rate": 3.822629969418961e-05,
192
+ "loss": 1.4964,
193
  "step": 250
194
  },
195
  {
196
  "epoch": 1.1885714285714286,
197
+ "grad_norm": 11.662745475769043,
198
+ "learning_rate": 3.9755351681957185e-05,
199
+ "loss": 1.4872,
200
  "step": 260
201
  },
202
  {
203
  "epoch": 1.2342857142857142,
204
+ "grad_norm": 8.338129043579102,
205
+ "learning_rate": 4.1284403669724776e-05,
206
+ "loss": 1.3592,
207
  "step": 270
208
  },
209
  {
210
  "epoch": 1.28,
211
+ "grad_norm": 12.640501022338867,
212
+ "learning_rate": 4.281345565749236e-05,
213
+ "loss": 1.4165,
214
  "step": 280
215
  },
216
  {
217
  "epoch": 1.3257142857142856,
218
+ "grad_norm": 9.851750373840332,
219
+ "learning_rate": 4.434250764525994e-05,
220
+ "loss": 1.4671,
221
  "step": 290
222
  },
223
  {
224
  "epoch": 1.3714285714285714,
225
+ "grad_norm": 9.80663013458252,
226
+ "learning_rate": 4.587155963302753e-05,
227
+ "loss": 1.4069,
228
  "step": 300
229
  },
230
  {
231
  "epoch": 1.4171428571428573,
232
+ "grad_norm": 10.75167179107666,
233
+ "learning_rate": 4.740061162079511e-05,
234
+ "loss": 1.3966,
235
  "step": 310
236
  },
237
  {
238
  "epoch": 1.4628571428571429,
239
+ "grad_norm": 9.546366691589355,
240
+ "learning_rate": 4.892966360856269e-05,
241
+ "loss": 1.3365,
242
  "step": 320
243
  },
244
  {
245
  "epoch": 1.5085714285714285,
246
+ "grad_norm": 11.726640701293945,
247
+ "learning_rate": 4.994903160040775e-05,
248
+ "loss": 1.4017,
249
  "step": 330
250
  },
251
  {
252
  "epoch": 1.5542857142857143,
253
+ "grad_norm": 9.12558650970459,
254
+ "learning_rate": 4.977913693510024e-05,
255
+ "loss": 1.3402,
256
  "step": 340
257
  },
258
  {
259
  "epoch": 1.6,
260
+ "grad_norm": 7.8366804122924805,
261
+ "learning_rate": 4.9609242269792734e-05,
262
+ "loss": 1.3675,
263
  "step": 350
264
  },
265
  {
266
  "epoch": 1.6457142857142857,
267
+ "grad_norm": 7.4768805503845215,
268
+ "learning_rate": 4.943934760448522e-05,
269
+ "loss": 1.3559,
270
  "step": 360
271
  },
272
  {
273
  "epoch": 1.6914285714285713,
274
+ "grad_norm": 8.65304183959961,
275
+ "learning_rate": 4.926945293917771e-05,
276
+ "loss": 1.3973,
277
  "step": 370
278
  },
279
  {
280
  "epoch": 1.737142857142857,
281
+ "grad_norm": 8.800814628601074,
282
+ "learning_rate": 4.9099558273870206e-05,
283
+ "loss": 1.2501,
284
  "step": 380
285
  },
286
  {
287
  "epoch": 1.782857142857143,
288
+ "grad_norm": 14.658060073852539,
289
+ "learning_rate": 4.892966360856269e-05,
290
+ "loss": 1.287,
291
  "step": 390
292
  },
293
  {
294
  "epoch": 1.8285714285714287,
295
+ "grad_norm": 9.586373329162598,
296
+ "learning_rate": 4.8759768943255185e-05,
297
+ "loss": 1.3249,
298
  "step": 400
299
  },
300
  {
301
  "epoch": 1.8742857142857143,
302
+ "grad_norm": 12.57496166229248,
303
+ "learning_rate": 4.858987427794767e-05,
304
+ "loss": 1.3564,
305
  "step": 410
306
  },
307
  {
308
  "epoch": 1.92,
309
+ "grad_norm": 9.330473899841309,
310
+ "learning_rate": 4.8419979612640164e-05,
311
+ "loss": 1.2967,
312
  "step": 420
313
  },
314
  {
315
  "epoch": 1.9657142857142857,
316
+ "grad_norm": 8.545197486877441,
317
+ "learning_rate": 4.8250084947332657e-05,
318
+ "loss": 1.2997,
319
  "step": 430
320
  },
321
  {
322
  "epoch": 1.9977142857142858,
323
+ "eval_accuracy": 0.6699937616968185,
324
+ "eval_loss": 1.0830751657485962,
325
+ "eval_runtime": 74.7614,
326
+ "eval_samples_per_second": 107.208,
327
+ "eval_steps_per_second": 3.357,
328
  "step": 437
329
  },
330
  {
331
  "epoch": 2.0114285714285716,
332
+ "grad_norm": 7.490045547485352,
333
+ "learning_rate": 4.808019028202514e-05,
334
+ "loss": 1.258,
335
  "step": 440
336
  },
337
  {
338
  "epoch": 2.057142857142857,
339
+ "grad_norm": 8.006925582885742,
340
+ "learning_rate": 4.7910295616717635e-05,
341
+ "loss": 1.168,
342
  "step": 450
343
  },
344
  {
345
  "epoch": 2.1028571428571428,
346
+ "grad_norm": 12.528056144714355,
347
+ "learning_rate": 4.774040095141013e-05,
348
+ "loss": 1.2195,
349
  "step": 460
350
  },
351
  {
352
  "epoch": 2.1485714285714286,
353
+ "grad_norm": 9.448918342590332,
354
+ "learning_rate": 4.7570506286102614e-05,
355
+ "loss": 1.2242,
356
  "step": 470
357
  },
358
  {
359
  "epoch": 2.1942857142857144,
360
+ "grad_norm": 7.972462177276611,
361
+ "learning_rate": 4.740061162079511e-05,
362
+ "loss": 1.1925,
363
  "step": 480
364
  },
365
  {
366
  "epoch": 2.24,
367
+ "grad_norm": 10.068849563598633,
368
+ "learning_rate": 4.72307169554876e-05,
369
+ "loss": 1.2521,
370
  "step": 490
371
  },
372
  {
373
  "epoch": 2.2857142857142856,
374
+ "grad_norm": 9.536931037902832,
375
+ "learning_rate": 4.706082229018009e-05,
376
+ "loss": 1.2375,
377
  "step": 500
378
  },
379
  {
380
  "epoch": 2.3314285714285714,
381
+ "grad_norm": 7.058177471160889,
382
+ "learning_rate": 4.6890927624872586e-05,
383
+ "loss": 1.1762,
384
  "step": 510
385
  },
386
  {
387
  "epoch": 2.3771428571428572,
388
+ "grad_norm": 8.200982093811035,
389
+ "learning_rate": 4.672103295956507e-05,
390
+ "loss": 1.259,
391
  "step": 520
392
  },
393
  {
394
  "epoch": 2.422857142857143,
395
+ "grad_norm": 6.11261510848999,
396
+ "learning_rate": 4.6551138294257565e-05,
397
+ "loss": 1.2291,
398
  "step": 530
399
  },
400
  {
401
  "epoch": 2.4685714285714284,
402
+ "grad_norm": 8.771502494812012,
403
+ "learning_rate": 4.638124362895006e-05,
404
+ "loss": 1.2237,
405
  "step": 540
406
  },
407
  {
408
  "epoch": 2.5142857142857142,
409
+ "grad_norm": 11.995628356933594,
410
+ "learning_rate": 4.6211348963642544e-05,
411
+ "loss": 1.2349,
412
  "step": 550
413
  },
414
  {
415
  "epoch": 2.56,
416
+ "grad_norm": 8.15999698638916,
417
+ "learning_rate": 4.604145429833504e-05,
418
+ "loss": 1.1786,
419
  "step": 560
420
  },
421
  {
422
  "epoch": 2.605714285714286,
423
+ "grad_norm": 7.252849578857422,
424
+ "learning_rate": 4.587155963302753e-05,
425
+ "loss": 1.1285,
426
  "step": 570
427
  },
428
  {
429
  "epoch": 2.6514285714285712,
430
+ "grad_norm": 8.760148048400879,
431
+ "learning_rate": 4.5701664967720016e-05,
432
+ "loss": 1.074,
433
  "step": 580
434
  },
435
  {
436
  "epoch": 2.697142857142857,
437
+ "grad_norm": 5.591124057769775,
438
+ "learning_rate": 4.553177030241251e-05,
439
+ "loss": 1.104,
440
  "step": 590
441
  },
442
  {
443
  "epoch": 2.742857142857143,
444
+ "grad_norm": 7.468674659729004,
445
+ "learning_rate": 4.5361875637104995e-05,
446
+ "loss": 1.1936,
447
  "step": 600
448
  },
449
  {
450
  "epoch": 2.7885714285714287,
451
+ "grad_norm": 7.683737754821777,
452
+ "learning_rate": 4.519198097179749e-05,
453
+ "loss": 1.1574,
454
  "step": 610
455
  },
456
  {
457
  "epoch": 2.8342857142857145,
458
+ "grad_norm": 12.986687660217285,
459
+ "learning_rate": 4.502208630648998e-05,
460
+ "loss": 1.1513,
461
  "step": 620
462
  },
463
  {
464
  "epoch": 2.88,
465
+ "grad_norm": 8.037307739257812,
466
+ "learning_rate": 4.4852191641182466e-05,
467
+ "loss": 1.1537,
468
  "step": 630
469
  },
470
  {
471
  "epoch": 2.9257142857142857,
472
+ "grad_norm": 8.727031707763672,
473
+ "learning_rate": 4.468229697587496e-05,
474
+ "loss": 1.1823,
475
  "step": 640
476
  },
477
  {
478
  "epoch": 2.9714285714285715,
479
+ "grad_norm": 7.474994659423828,
480
+ "learning_rate": 4.451240231056745e-05,
481
+ "loss": 1.1166,
482
  "step": 650
483
  },
484
  {
485
  "epoch": 2.998857142857143,
486
+ "eval_accuracy": 0.6958203368683719,
487
+ "eval_loss": 0.9937148094177246,
488
+ "eval_runtime": 75.7204,
489
+ "eval_samples_per_second": 105.85,
490
+ "eval_steps_per_second": 3.315,
491
  "step": 656
492
  },
493
  {
494
  "epoch": 3.0171428571428573,
495
+ "grad_norm": 5.9896016120910645,
496
+ "learning_rate": 4.434250764525994e-05,
497
+ "loss": 1.1378,
498
  "step": 660
499
  },
500
  {
501
  "epoch": 3.0628571428571427,
502
+ "grad_norm": 5.852768898010254,
503
+ "learning_rate": 4.417261297995243e-05,
504
+ "loss": 1.0542,
505
  "step": 670
506
  },
507
  {
508
  "epoch": 3.1085714285714285,
509
+ "grad_norm": 7.916086673736572,
510
+ "learning_rate": 4.400271831464492e-05,
511
+ "loss": 1.0331,
512
  "step": 680
513
  },
514
  {
515
  "epoch": 3.1542857142857144,
516
+ "grad_norm": 8.959929466247559,
517
+ "learning_rate": 4.383282364933741e-05,
518
+ "loss": 1.0943,
519
  "step": 690
520
  },
521
  {
522
  "epoch": 3.2,
523
+ "grad_norm": 6.69697904586792,
524
+ "learning_rate": 4.36629289840299e-05,
525
+ "loss": 1.0978,
526
  "step": 700
527
  },
528
  {
529
  "epoch": 3.2457142857142856,
530
+ "grad_norm": 7.420085430145264,
531
+ "learning_rate": 4.349303431872239e-05,
532
+ "loss": 1.0588,
533
  "step": 710
534
  },
535
  {
536
  "epoch": 3.2914285714285714,
537
+ "grad_norm": 10.829694747924805,
538
+ "learning_rate": 4.332313965341488e-05,
539
+ "loss": 1.0838,
540
  "step": 720
541
  },
542
  {
543
  "epoch": 3.337142857142857,
544
+ "grad_norm": 6.954169273376465,
545
+ "learning_rate": 4.3153244988107375e-05,
546
+ "loss": 1.1126,
547
  "step": 730
548
  },
549
  {
550
  "epoch": 3.382857142857143,
551
+ "grad_norm": 6.538149833679199,
552
+ "learning_rate": 4.298335032279987e-05,
553
+ "loss": 1.1049,
554
  "step": 740
555
  },
556
  {
557
  "epoch": 3.4285714285714284,
558
+ "grad_norm": 11.640555381774902,
559
+ "learning_rate": 4.281345565749236e-05,
560
+ "loss": 1.0577,
561
  "step": 750
562
  },
563
  {
564
  "epoch": 3.474285714285714,
565
+ "grad_norm": 7.022554397583008,
566
+ "learning_rate": 4.264356099218485e-05,
567
+ "loss": 1.1454,
568
  "step": 760
569
  },
570
  {
571
  "epoch": 3.52,
572
+ "grad_norm": 9.619643211364746,
573
+ "learning_rate": 4.247366632687734e-05,
574
+ "loss": 1.0374,
575
  "step": 770
576
  },
577
  {
578
  "epoch": 3.565714285714286,
579
+ "grad_norm": 8.229997634887695,
580
+ "learning_rate": 4.230377166156983e-05,
581
+ "loss": 1.0354,
582
  "step": 780
583
  },
584
  {
585
  "epoch": 3.611428571428571,
586
+ "grad_norm": 8.94089126586914,
587
+ "learning_rate": 4.213387699626232e-05,
588
+ "loss": 1.1144,
589
  "step": 790
590
  },
591
  {
592
  "epoch": 3.657142857142857,
593
+ "grad_norm": 7.949928283691406,
594
+ "learning_rate": 4.196398233095481e-05,
595
+ "loss": 1.0678,
596
  "step": 800
597
  },
598
  {
599
  "epoch": 3.702857142857143,
600
+ "grad_norm": 6.979115009307861,
601
+ "learning_rate": 4.1794087665647304e-05,
602
+ "loss": 1.0945,
603
  "step": 810
604
  },
605
  {
606
  "epoch": 3.7485714285714287,
607
+ "grad_norm": 8.669636726379395,
608
+ "learning_rate": 4.162419300033979e-05,
609
+ "loss": 1.0521,
610
  "step": 820
611
  },
612
  {
613
  "epoch": 3.7942857142857145,
614
+ "grad_norm": 6.849328994750977,
615
+ "learning_rate": 4.145429833503228e-05,
616
+ "loss": 1.0513,
617
  "step": 830
618
  },
619
  {
620
  "epoch": 3.84,
621
+ "grad_norm": 7.107924461364746,
622
+ "learning_rate": 4.1284403669724776e-05,
623
+ "loss": 1.0477,
624
  "step": 840
625
  },
626
  {
627
  "epoch": 3.8857142857142857,
628
+ "grad_norm": 6.533900737762451,
629
+ "learning_rate": 4.111450900441726e-05,
630
+ "loss": 1.0513,
631
  "step": 850
632
  },
633
  {
634
  "epoch": 3.9314285714285715,
635
+ "grad_norm": 6.826801776885986,
636
+ "learning_rate": 4.0944614339109755e-05,
637
+ "loss": 1.0932,
638
  "step": 860
639
  },
640
  {
641
  "epoch": 3.977142857142857,
642
+ "grad_norm": 6.449113845825195,
643
+ "learning_rate": 4.077471967380224e-05,
644
+ "loss": 1.0464,
645
  "step": 870
646
  },
647
  {
648
  "epoch": 4.0,
649
+ "eval_accuracy": 0.7231441048034934,
650
+ "eval_loss": 0.9180014729499817,
651
+ "eval_runtime": 74.9581,
652
+ "eval_samples_per_second": 106.926,
653
+ "eval_steps_per_second": 3.349,
654
  "step": 875
655
  },
656
  {
657
  "epoch": 4.022857142857143,
658
+ "grad_norm": 6.743358135223389,
659
+ "learning_rate": 4.0604825008494734e-05,
660
+ "loss": 1.0845,
661
  "step": 880
662
  },
663
  {
664
  "epoch": 4.0685714285714285,
665
+ "grad_norm": 6.779079437255859,
666
+ "learning_rate": 4.043493034318723e-05,
667
+ "loss": 1.0415,
668
  "step": 890
669
  },
670
  {
671
  "epoch": 4.114285714285714,
672
+ "grad_norm": 8.849162101745605,
673
+ "learning_rate": 4.026503567787971e-05,
674
+ "loss": 1.0487,
675
  "step": 900
676
  },
677
  {
678
  "epoch": 4.16,
679
+ "grad_norm": 7.157181739807129,
680
+ "learning_rate": 4.0095141012572206e-05,
681
+ "loss": 1.0647,
682
  "step": 910
683
  },
684
  {
685
  "epoch": 4.2057142857142855,
686
+ "grad_norm": 8.016735076904297,
687
+ "learning_rate": 3.99252463472647e-05,
688
+ "loss": 1.0493,
689
  "step": 920
690
  },
691
  {
692
  "epoch": 4.251428571428572,
693
+ "grad_norm": 8.14920711517334,
694
+ "learning_rate": 3.9755351681957185e-05,
695
+ "loss": 1.0228,
696
  "step": 930
697
  },
698
  {
699
  "epoch": 4.297142857142857,
700
+ "grad_norm": 6.980301856994629,
701
+ "learning_rate": 3.958545701664968e-05,
702
+ "loss": 0.9977,
703
  "step": 940
704
  },
705
  {
706
  "epoch": 4.3428571428571425,
707
+ "grad_norm": 6.754297256469727,
708
+ "learning_rate": 3.941556235134217e-05,
709
+ "loss": 1.025,
710
  "step": 950
711
  },
712
  {
713
  "epoch": 4.388571428571429,
714
+ "grad_norm": 6.082598686218262,
715
+ "learning_rate": 3.9245667686034656e-05,
716
+ "loss": 0.9734,
717
  "step": 960
718
  },
719
  {
720
  "epoch": 4.434285714285714,
721
+ "grad_norm": 9.236204147338867,
722
+ "learning_rate": 3.907577302072715e-05,
723
+ "loss": 0.9897,
724
  "step": 970
725
  },
726
  {
727
  "epoch": 4.48,
728
+ "grad_norm": 6.07050085067749,
729
+ "learning_rate": 3.890587835541964e-05,
730
+ "loss": 1.0549,
731
  "step": 980
732
  },
733
  {
734
  "epoch": 4.525714285714286,
735
+ "grad_norm": 8.898210525512695,
736
+ "learning_rate": 3.8735983690112135e-05,
737
+ "loss": 1.0703,
738
  "step": 990
739
  },
740
  {
741
  "epoch": 4.571428571428571,
742
+ "grad_norm": 6.365227699279785,
743
+ "learning_rate": 3.856608902480463e-05,
744
+ "loss": 0.9674,
745
  "step": 1000
746
  },
747
  {
748
  "epoch": 4.617142857142857,
749
+ "grad_norm": 6.180821895599365,
750
+ "learning_rate": 3.8396194359497114e-05,
751
+ "loss": 0.9661,
752
  "step": 1010
753
  },
754
  {
755
  "epoch": 4.662857142857143,
756
+ "grad_norm": 7.927034854888916,
757
+ "learning_rate": 3.822629969418961e-05,
758
+ "loss": 1.025,
759
  "step": 1020
760
  },
761
  {
762
  "epoch": 4.708571428571428,
763
+ "grad_norm": 6.064193248748779,
764
+ "learning_rate": 3.80564050288821e-05,
765
+ "loss": 1.0494,
766
  "step": 1030
767
  },
768
  {
769
  "epoch": 4.7542857142857144,
770
+ "grad_norm": 7.2729668617248535,
771
+ "learning_rate": 3.7886510363574586e-05,
772
+ "loss": 1.0002,
773
  "step": 1040
774
  },
775
  {
776
  "epoch": 4.8,
777
+ "grad_norm": 8.703302383422852,
778
+ "learning_rate": 3.771661569826708e-05,
779
+ "loss": 1.001,
780
  "step": 1050
781
  },
782
  {
783
  "epoch": 4.845714285714286,
784
+ "grad_norm": 5.9366583824157715,
785
+ "learning_rate": 3.7546721032959565e-05,
786
+ "loss": 0.8903,
787
  "step": 1060
788
  },
789
  {
790
  "epoch": 4.8914285714285715,
791
+ "grad_norm": 10.52004337310791,
792
+ "learning_rate": 3.737682636765206e-05,
793
+ "loss": 1.0021,
794
  "step": 1070
795
  },
796
  {
797
  "epoch": 4.937142857142857,
798
+ "grad_norm": 6.183305740356445,
799
+ "learning_rate": 3.720693170234455e-05,
800
+ "loss": 0.9574,
801
  "step": 1080
802
  },
803
  {
804
  "epoch": 4.982857142857143,
805
+ "grad_norm": 7.366205215454102,
806
+ "learning_rate": 3.7037037037037037e-05,
807
+ "loss": 0.982,
808
  "step": 1090
809
  },
810
  {
811
+ "epoch": 4.996571428571428,
812
+ "eval_accuracy": 0.743231441048035,
813
+ "eval_loss": 0.8399370312690735,
814
+ "eval_runtime": 75.6387,
815
+ "eval_samples_per_second": 105.964,
816
+ "eval_steps_per_second": 3.318,
817
+ "step": 1093
818
  },
819
  {
820
+ "epoch": 5.0285714285714285,
821
+ "grad_norm": 6.932504177093506,
822
+ "learning_rate": 3.686714237172953e-05,
823
+ "loss": 1.0264,
824
+ "step": 1100
825
+ },
826
+ {
827
+ "epoch": 5.074285714285715,
828
+ "grad_norm": 5.84490966796875,
829
+ "learning_rate": 3.669724770642202e-05,
830
+ "loss": 0.9594,
831
+ "step": 1110
832
+ },
833
+ {
834
+ "epoch": 5.12,
835
+ "grad_norm": 6.8914475440979,
836
+ "learning_rate": 3.652735304111451e-05,
837
+ "loss": 0.918,
838
+ "step": 1120
839
+ },
840
+ {
841
+ "epoch": 5.1657142857142855,
842
+ "grad_norm": 6.1007161140441895,
843
+ "learning_rate": 3.6357458375807e-05,
844
+ "loss": 1.0036,
845
+ "step": 1130
846
+ },
847
+ {
848
+ "epoch": 5.211428571428572,
849
+ "grad_norm": 8.02859115600586,
850
+ "learning_rate": 3.6187563710499494e-05,
851
+ "loss": 0.9655,
852
+ "step": 1140
853
+ },
854
+ {
855
+ "epoch": 5.257142857142857,
856
+ "grad_norm": 7.857163906097412,
857
+ "learning_rate": 3.601766904519198e-05,
858
+ "loss": 0.9567,
859
+ "step": 1150
860
+ },
861
+ {
862
+ "epoch": 5.3028571428571425,
863
+ "grad_norm": 7.495018005371094,
864
+ "learning_rate": 3.584777437988447e-05,
865
+ "loss": 0.9226,
866
+ "step": 1160
867
+ },
868
+ {
869
+ "epoch": 5.348571428571429,
870
+ "grad_norm": 8.65245532989502,
871
+ "learning_rate": 3.567787971457696e-05,
872
+ "loss": 0.8992,
873
+ "step": 1170
874
+ },
875
+ {
876
+ "epoch": 5.394285714285714,
877
+ "grad_norm": 8.458373069763184,
878
+ "learning_rate": 3.550798504926945e-05,
879
+ "loss": 0.9539,
880
+ "step": 1180
881
+ },
882
+ {
883
+ "epoch": 5.44,
884
+ "grad_norm": 6.742293357849121,
885
+ "learning_rate": 3.5338090383961945e-05,
886
+ "loss": 0.9382,
887
+ "step": 1190
888
+ },
889
+ {
890
+ "epoch": 5.485714285714286,
891
+ "grad_norm": 6.219183921813965,
892
+ "learning_rate": 3.516819571865443e-05,
893
+ "loss": 0.9673,
894
+ "step": 1200
895
+ },
896
+ {
897
+ "epoch": 5.531428571428571,
898
+ "grad_norm": 6.372159004211426,
899
+ "learning_rate": 3.4998301053346924e-05,
900
+ "loss": 0.9558,
901
+ "step": 1210
902
+ },
903
+ {
904
+ "epoch": 5.577142857142857,
905
+ "grad_norm": 9.132979393005371,
906
+ "learning_rate": 3.482840638803942e-05,
907
+ "loss": 0.9738,
908
+ "step": 1220
909
+ },
910
+ {
911
+ "epoch": 5.622857142857143,
912
+ "grad_norm": 10.370519638061523,
913
+ "learning_rate": 3.465851172273191e-05,
914
+ "loss": 0.9576,
915
+ "step": 1230
916
+ },
917
+ {
918
+ "epoch": 5.668571428571429,
919
+ "grad_norm": 6.819110870361328,
920
+ "learning_rate": 3.44886170574244e-05,
921
+ "loss": 0.9086,
922
+ "step": 1240
923
+ },
924
+ {
925
+ "epoch": 5.714285714285714,
926
+ "grad_norm": 5.857959747314453,
927
+ "learning_rate": 3.431872239211689e-05,
928
+ "loss": 0.8758,
929
+ "step": 1250
930
+ },
931
+ {
932
+ "epoch": 5.76,
933
+ "grad_norm": 8.476204872131348,
934
+ "learning_rate": 3.414882772680938e-05,
935
+ "loss": 0.9147,
936
+ "step": 1260
937
+ },
938
+ {
939
+ "epoch": 5.805714285714286,
940
+ "grad_norm": 6.7867865562438965,
941
+ "learning_rate": 3.3978933061501874e-05,
942
+ "loss": 0.9843,
943
+ "step": 1270
944
+ },
945
+ {
946
+ "epoch": 5.851428571428571,
947
+ "grad_norm": 8.020210266113281,
948
+ "learning_rate": 3.380903839619436e-05,
949
+ "loss": 1.0334,
950
+ "step": 1280
951
+ },
952
+ {
953
+ "epoch": 5.897142857142857,
954
+ "grad_norm": 7.1987199783325195,
955
+ "learning_rate": 3.363914373088685e-05,
956
+ "loss": 1.0116,
957
+ "step": 1290
958
+ },
959
+ {
960
+ "epoch": 5.942857142857143,
961
+ "grad_norm": 6.633023738861084,
962
+ "learning_rate": 3.3469249065579346e-05,
963
+ "loss": 0.9476,
964
+ "step": 1300
965
+ },
966
+ {
967
+ "epoch": 5.988571428571428,
968
+ "grad_norm": 7.582513332366943,
969
+ "learning_rate": 3.329935440027183e-05,
970
+ "loss": 0.9472,
971
+ "step": 1310
972
+ },
973
+ {
974
+ "epoch": 5.997714285714285,
975
+ "eval_accuracy": 0.7535870243293824,
976
+ "eval_loss": 0.8126731514930725,
977
+ "eval_runtime": 71.4321,
978
+ "eval_samples_per_second": 112.204,
979
+ "eval_steps_per_second": 3.514,
980
+ "step": 1312
981
+ },
982
+ {
983
+ "epoch": 6.034285714285715,
984
+ "grad_norm": 8.917913436889648,
985
+ "learning_rate": 3.3129459734964325e-05,
986
+ "loss": 0.8892,
987
+ "step": 1320
988
+ },
989
+ {
990
+ "epoch": 6.08,
991
+ "grad_norm": 11.498839378356934,
992
+ "learning_rate": 3.295956506965682e-05,
993
+ "loss": 0.8989,
994
+ "step": 1330
995
+ },
996
+ {
997
+ "epoch": 6.1257142857142854,
998
+ "grad_norm": 6.905866622924805,
999
+ "learning_rate": 3.2789670404349304e-05,
1000
+ "loss": 0.8923,
1001
+ "step": 1340
1002
+ },
1003
+ {
1004
+ "epoch": 6.171428571428572,
1005
+ "grad_norm": 5.6410417556762695,
1006
+ "learning_rate": 3.26197757390418e-05,
1007
+ "loss": 0.9276,
1008
+ "step": 1350
1009
+ },
1010
+ {
1011
+ "epoch": 6.217142857142857,
1012
+ "grad_norm": 8.219051361083984,
1013
+ "learning_rate": 3.244988107373428e-05,
1014
+ "loss": 0.9223,
1015
+ "step": 1360
1016
+ },
1017
+ {
1018
+ "epoch": 6.2628571428571425,
1019
+ "grad_norm": 6.403746128082275,
1020
+ "learning_rate": 3.2279986408426776e-05,
1021
+ "loss": 0.9082,
1022
+ "step": 1370
1023
+ },
1024
+ {
1025
+ "epoch": 6.308571428571429,
1026
+ "grad_norm": 5.092130184173584,
1027
+ "learning_rate": 3.211009174311927e-05,
1028
+ "loss": 0.8609,
1029
+ "step": 1380
1030
+ },
1031
+ {
1032
+ "epoch": 6.354285714285714,
1033
+ "grad_norm": 7.074675559997559,
1034
+ "learning_rate": 3.1940197077811755e-05,
1035
+ "loss": 0.908,
1036
+ "step": 1390
1037
+ },
1038
+ {
1039
+ "epoch": 6.4,
1040
+ "grad_norm": 7.3398332595825195,
1041
+ "learning_rate": 3.177030241250425e-05,
1042
+ "loss": 0.9756,
1043
+ "step": 1400
1044
+ },
1045
+ {
1046
+ "epoch": 6.445714285714286,
1047
+ "grad_norm": 8.28463363647461,
1048
+ "learning_rate": 3.160040774719674e-05,
1049
+ "loss": 0.9251,
1050
+ "step": 1410
1051
+ },
1052
+ {
1053
+ "epoch": 6.491428571428571,
1054
+ "grad_norm": 11.502705574035645,
1055
+ "learning_rate": 3.1430513081889227e-05,
1056
+ "loss": 0.8992,
1057
+ "step": 1420
1058
+ },
1059
+ {
1060
+ "epoch": 6.537142857142857,
1061
+ "grad_norm": 6.942375659942627,
1062
+ "learning_rate": 3.126061841658172e-05,
1063
+ "loss": 0.878,
1064
+ "step": 1430
1065
+ },
1066
+ {
1067
+ "epoch": 6.582857142857143,
1068
+ "grad_norm": 6.975459098815918,
1069
+ "learning_rate": 3.1090723751274206e-05,
1070
+ "loss": 0.928,
1071
+ "step": 1440
1072
+ },
1073
+ {
1074
+ "epoch": 6.628571428571428,
1075
+ "grad_norm": 6.6826372146606445,
1076
+ "learning_rate": 3.09208290859667e-05,
1077
+ "loss": 0.9126,
1078
+ "step": 1450
1079
+ },
1080
+ {
1081
+ "epoch": 6.674285714285714,
1082
+ "grad_norm": 6.843193054199219,
1083
+ "learning_rate": 3.075093442065919e-05,
1084
+ "loss": 0.94,
1085
+ "step": 1460
1086
+ },
1087
+ {
1088
+ "epoch": 6.72,
1089
+ "grad_norm": 9.00674819946289,
1090
+ "learning_rate": 3.0581039755351684e-05,
1091
+ "loss": 0.9406,
1092
+ "step": 1470
1093
+ },
1094
+ {
1095
+ "epoch": 6.765714285714286,
1096
+ "grad_norm": 7.362587928771973,
1097
+ "learning_rate": 3.0411145090044174e-05,
1098
+ "loss": 0.9414,
1099
+ "step": 1480
1100
+ },
1101
+ {
1102
+ "epoch": 6.811428571428571,
1103
+ "grad_norm": 10.040757179260254,
1104
+ "learning_rate": 3.0241250424736666e-05,
1105
+ "loss": 0.9372,
1106
+ "step": 1490
1107
+ },
1108
+ {
1109
+ "epoch": 6.857142857142857,
1110
+ "grad_norm": 6.630922794342041,
1111
+ "learning_rate": 3.0071355759429153e-05,
1112
+ "loss": 0.918,
1113
+ "step": 1500
1114
+ },
1115
+ {
1116
+ "epoch": 6.902857142857143,
1117
+ "grad_norm": 6.996939182281494,
1118
+ "learning_rate": 2.9901461094121645e-05,
1119
+ "loss": 0.8634,
1120
+ "step": 1510
1121
+ },
1122
+ {
1123
+ "epoch": 6.948571428571428,
1124
+ "grad_norm": 6.364753723144531,
1125
+ "learning_rate": 2.9731566428814138e-05,
1126
+ "loss": 0.94,
1127
+ "step": 1520
1128
+ },
1129
+ {
1130
+ "epoch": 6.994285714285715,
1131
+ "grad_norm": 7.604902267456055,
1132
+ "learning_rate": 2.9561671763506628e-05,
1133
+ "loss": 0.8751,
1134
+ "step": 1530
1135
+ },
1136
+ {
1137
+ "epoch": 6.998857142857143,
1138
+ "eval_accuracy": 0.7639426076107299,
1139
+ "eval_loss": 0.7851645350456238,
1140
+ "eval_runtime": 71.3545,
1141
+ "eval_samples_per_second": 112.327,
1142
+ "eval_steps_per_second": 3.518,
1143
+ "step": 1531
1144
+ },
1145
+ {
1146
+ "epoch": 7.04,
1147
+ "grad_norm": 6.300110816955566,
1148
+ "learning_rate": 2.939177709819912e-05,
1149
+ "loss": 0.9023,
1150
+ "step": 1540
1151
+ },
1152
+ {
1153
+ "epoch": 7.085714285714285,
1154
+ "grad_norm": 6.873245716094971,
1155
+ "learning_rate": 2.9221882432891607e-05,
1156
+ "loss": 0.8445,
1157
+ "step": 1550
1158
+ },
1159
+ {
1160
+ "epoch": 7.131428571428572,
1161
+ "grad_norm": 6.225979328155518,
1162
+ "learning_rate": 2.90519877675841e-05,
1163
+ "loss": 0.8429,
1164
+ "step": 1560
1165
+ },
1166
+ {
1167
+ "epoch": 7.177142857142857,
1168
+ "grad_norm": 9.389466285705566,
1169
+ "learning_rate": 2.8882093102276592e-05,
1170
+ "loss": 0.8686,
1171
+ "step": 1570
1172
+ },
1173
+ {
1174
+ "epoch": 7.222857142857142,
1175
+ "grad_norm": 6.56587028503418,
1176
+ "learning_rate": 2.871219843696908e-05,
1177
+ "loss": 0.9168,
1178
+ "step": 1580
1179
+ },
1180
+ {
1181
+ "epoch": 7.268571428571429,
1182
+ "grad_norm": 7.2763447761535645,
1183
+ "learning_rate": 2.854230377166157e-05,
1184
+ "loss": 0.9107,
1185
+ "step": 1590
1186
+ },
1187
+ {
1188
+ "epoch": 7.314285714285714,
1189
+ "grad_norm": 10.647456169128418,
1190
+ "learning_rate": 2.8372409106354064e-05,
1191
+ "loss": 0.8785,
1192
+ "step": 1600
1193
+ },
1194
+ {
1195
+ "epoch": 7.36,
1196
+ "grad_norm": 9.847870826721191,
1197
+ "learning_rate": 2.820251444104655e-05,
1198
+ "loss": 0.8282,
1199
+ "step": 1610
1200
+ },
1201
+ {
1202
+ "epoch": 7.405714285714286,
1203
+ "grad_norm": 6.836136341094971,
1204
+ "learning_rate": 2.8032619775739043e-05,
1205
+ "loss": 0.881,
1206
+ "step": 1620
1207
+ },
1208
+ {
1209
+ "epoch": 7.451428571428571,
1210
+ "grad_norm": 7.8010687828063965,
1211
+ "learning_rate": 2.7862725110431533e-05,
1212
+ "loss": 0.8969,
1213
+ "step": 1630
1214
+ },
1215
+ {
1216
+ "epoch": 7.497142857142857,
1217
+ "grad_norm": 8.551609992980957,
1218
+ "learning_rate": 2.7692830445124026e-05,
1219
+ "loss": 0.9146,
1220
+ "step": 1640
1221
+ },
1222
+ {
1223
+ "epoch": 7.542857142857143,
1224
+ "grad_norm": 6.829668998718262,
1225
+ "learning_rate": 2.7522935779816515e-05,
1226
+ "loss": 0.8377,
1227
+ "step": 1650
1228
+ },
1229
+ {
1230
+ "epoch": 7.588571428571429,
1231
+ "grad_norm": 5.782020568847656,
1232
+ "learning_rate": 2.7353041114509004e-05,
1233
+ "loss": 0.8956,
1234
+ "step": 1660
1235
+ },
1236
+ {
1237
+ "epoch": 7.634285714285714,
1238
+ "grad_norm": 7.081970691680908,
1239
+ "learning_rate": 2.7183146449201497e-05,
1240
+ "loss": 0.8707,
1241
+ "step": 1670
1242
+ },
1243
+ {
1244
+ "epoch": 7.68,
1245
+ "grad_norm": 7.2627739906311035,
1246
+ "learning_rate": 2.701325178389399e-05,
1247
+ "loss": 0.8865,
1248
+ "step": 1680
1249
+ },
1250
+ {
1251
+ "epoch": 7.725714285714286,
1252
+ "grad_norm": 6.740649700164795,
1253
+ "learning_rate": 2.6843357118586476e-05,
1254
+ "loss": 0.8183,
1255
+ "step": 1690
1256
+ },
1257
+ {
1258
+ "epoch": 7.771428571428571,
1259
+ "grad_norm": 6.93267822265625,
1260
+ "learning_rate": 2.667346245327897e-05,
1261
+ "loss": 0.8898,
1262
+ "step": 1700
1263
+ },
1264
+ {
1265
+ "epoch": 7.817142857142857,
1266
+ "grad_norm": 8.19253921508789,
1267
+ "learning_rate": 2.6503567787971462e-05,
1268
+ "loss": 0.8982,
1269
+ "step": 1710
1270
+ },
1271
+ {
1272
+ "epoch": 7.862857142857143,
1273
+ "grad_norm": 5.707729816436768,
1274
+ "learning_rate": 2.6333673122663948e-05,
1275
+ "loss": 0.888,
1276
+ "step": 1720
1277
+ },
1278
+ {
1279
+ "epoch": 7.908571428571428,
1280
+ "grad_norm": 6.924159049987793,
1281
+ "learning_rate": 2.616377845735644e-05,
1282
+ "loss": 0.8658,
1283
+ "step": 1730
1284
+ },
1285
+ {
1286
+ "epoch": 7.954285714285715,
1287
+ "grad_norm": 8.230816841125488,
1288
+ "learning_rate": 2.5993883792048927e-05,
1289
+ "loss": 0.8772,
1290
+ "step": 1740
1291
+ },
1292
+ {
1293
+ "epoch": 8.0,
1294
+ "grad_norm": 7.3947906494140625,
1295
+ "learning_rate": 2.582398912674142e-05,
1296
+ "loss": 0.9107,
1297
+ "step": 1750
1298
+ },
1299
+ {
1300
+ "epoch": 8.0,
1301
+ "eval_accuracy": 0.7713038053649407,
1302
+ "eval_loss": 0.7643583416938782,
1303
+ "eval_runtime": 72.4816,
1304
+ "eval_samples_per_second": 110.58,
1305
+ "eval_steps_per_second": 3.463,
1306
+ "step": 1750
1307
+ },
1308
+ {
1309
+ "epoch": 8.045714285714286,
1310
+ "grad_norm": 6.424936294555664,
1311
+ "learning_rate": 2.5654094461433913e-05,
1312
+ "loss": 0.8515,
1313
+ "step": 1760
1314
+ },
1315
+ {
1316
+ "epoch": 8.09142857142857,
1317
+ "grad_norm": 7.372068881988525,
1318
+ "learning_rate": 2.5484199796126402e-05,
1319
+ "loss": 0.7997,
1320
+ "step": 1770
1321
+ },
1322
+ {
1323
+ "epoch": 8.137142857142857,
1324
+ "grad_norm": 6.683503150939941,
1325
+ "learning_rate": 2.5314305130818895e-05,
1326
+ "loss": 0.8021,
1327
+ "step": 1780
1328
+ },
1329
+ {
1330
+ "epoch": 8.182857142857143,
1331
+ "grad_norm": 7.226657390594482,
1332
+ "learning_rate": 2.5144410465511388e-05,
1333
+ "loss": 0.8367,
1334
+ "step": 1790
1335
+ },
1336
+ {
1337
+ "epoch": 8.228571428571428,
1338
+ "grad_norm": 6.432008743286133,
1339
+ "learning_rate": 2.4974515800203874e-05,
1340
+ "loss": 0.8582,
1341
+ "step": 1800
1342
+ },
1343
+ {
1344
+ "epoch": 8.274285714285714,
1345
+ "grad_norm": 6.051323890686035,
1346
+ "learning_rate": 2.4804621134896367e-05,
1347
+ "loss": 0.8166,
1348
+ "step": 1810
1349
+ },
1350
+ {
1351
+ "epoch": 8.32,
1352
+ "grad_norm": 10.92369270324707,
1353
+ "learning_rate": 2.4634726469588856e-05,
1354
+ "loss": 0.8797,
1355
+ "step": 1820
1356
+ },
1357
+ {
1358
+ "epoch": 8.365714285714287,
1359
+ "grad_norm": 9.526762962341309,
1360
+ "learning_rate": 2.4464831804281346e-05,
1361
+ "loss": 0.8032,
1362
+ "step": 1830
1363
+ },
1364
+ {
1365
+ "epoch": 8.411428571428571,
1366
+ "grad_norm": 7.965165138244629,
1367
+ "learning_rate": 2.4294937138973835e-05,
1368
+ "loss": 0.8453,
1369
+ "step": 1840
1370
+ },
1371
+ {
1372
+ "epoch": 8.457142857142857,
1373
+ "grad_norm": 7.171777248382568,
1374
+ "learning_rate": 2.4125042473666328e-05,
1375
+ "loss": 0.8668,
1376
+ "step": 1850
1377
+ },
1378
+ {
1379
+ "epoch": 8.502857142857144,
1380
+ "grad_norm": 7.443463325500488,
1381
+ "learning_rate": 2.3955147808358818e-05,
1382
+ "loss": 0.8412,
1383
+ "step": 1860
1384
+ },
1385
+ {
1386
+ "epoch": 8.548571428571428,
1387
+ "grad_norm": 5.963488578796387,
1388
+ "learning_rate": 2.3785253143051307e-05,
1389
+ "loss": 0.8126,
1390
+ "step": 1870
1391
+ },
1392
+ {
1393
+ "epoch": 8.594285714285714,
1394
+ "grad_norm": 7.679189682006836,
1395
+ "learning_rate": 2.36153584777438e-05,
1396
+ "loss": 0.8548,
1397
+ "step": 1880
1398
+ },
1399
+ {
1400
+ "epoch": 8.64,
1401
+ "grad_norm": 7.266505718231201,
1402
+ "learning_rate": 2.3445463812436293e-05,
1403
+ "loss": 0.8217,
1404
+ "step": 1890
1405
+ },
1406
+ {
1407
+ "epoch": 8.685714285714285,
1408
+ "grad_norm": 6.643305778503418,
1409
+ "learning_rate": 2.3275569147128782e-05,
1410
+ "loss": 0.8284,
1411
+ "step": 1900
1412
+ },
1413
+ {
1414
+ "epoch": 8.731428571428571,
1415
+ "grad_norm": 6.7400922775268555,
1416
+ "learning_rate": 2.3105674481821272e-05,
1417
+ "loss": 0.8595,
1418
+ "step": 1910
1419
+ },
1420
+ {
1421
+ "epoch": 8.777142857142858,
1422
+ "grad_norm": 9.218864440917969,
1423
+ "learning_rate": 2.2935779816513765e-05,
1424
+ "loss": 0.8636,
1425
+ "step": 1920
1426
+ },
1427
+ {
1428
+ "epoch": 8.822857142857142,
1429
+ "grad_norm": 6.2901434898376465,
1430
+ "learning_rate": 2.2765885151206254e-05,
1431
+ "loss": 0.8293,
1432
+ "step": 1930
1433
+ },
1434
+ {
1435
+ "epoch": 8.868571428571428,
1436
+ "grad_norm": 7.57029390335083,
1437
+ "learning_rate": 2.2595990485898744e-05,
1438
+ "loss": 0.8275,
1439
+ "step": 1940
1440
+ },
1441
+ {
1442
+ "epoch": 8.914285714285715,
1443
+ "grad_norm": 6.863813400268555,
1444
+ "learning_rate": 2.2426095820591233e-05,
1445
+ "loss": 0.7866,
1446
+ "step": 1950
1447
+ },
1448
+ {
1449
+ "epoch": 8.96,
1450
+ "grad_norm": 7.338642597198486,
1451
+ "learning_rate": 2.2256201155283726e-05,
1452
+ "loss": 0.8464,
1453
+ "step": 1960
1454
+ },
1455
+ {
1456
+ "epoch": 8.996571428571428,
1457
+ "eval_accuracy": 0.7830318153462258,
1458
+ "eval_loss": 0.7322039604187012,
1459
+ "eval_runtime": 69.8506,
1460
+ "eval_samples_per_second": 114.745,
1461
+ "eval_steps_per_second": 3.593,
1462
+ "step": 1968
1463
+ },
1464
+ {
1465
+ "epoch": 9.005714285714285,
1466
+ "grad_norm": 5.92184591293335,
1467
+ "learning_rate": 2.2086306489976216e-05,
1468
+ "loss": 0.7986,
1469
+ "step": 1970
1470
+ },
1471
+ {
1472
+ "epoch": 9.051428571428572,
1473
+ "grad_norm": 8.04157829284668,
1474
+ "learning_rate": 2.1916411824668705e-05,
1475
+ "loss": 0.8227,
1476
+ "step": 1980
1477
+ },
1478
+ {
1479
+ "epoch": 9.097142857142858,
1480
+ "grad_norm": 6.019360065460205,
1481
+ "learning_rate": 2.1746517159361194e-05,
1482
+ "loss": 0.7563,
1483
+ "step": 1990
1484
+ },
1485
+ {
1486
+ "epoch": 9.142857142857142,
1487
+ "grad_norm": 7.545748233795166,
1488
+ "learning_rate": 2.1576622494053687e-05,
1489
+ "loss": 0.8087,
1490
+ "step": 2000
1491
+ },
1492
+ {
1493
+ "epoch": 9.188571428571429,
1494
+ "grad_norm": 7.692215442657471,
1495
+ "learning_rate": 2.140672782874618e-05,
1496
+ "loss": 0.7899,
1497
+ "step": 2010
1498
+ },
1499
+ {
1500
+ "epoch": 9.234285714285715,
1501
+ "grad_norm": 7.4185566902160645,
1502
+ "learning_rate": 2.123683316343867e-05,
1503
+ "loss": 0.7823,
1504
+ "step": 2020
1505
+ },
1506
+ {
1507
+ "epoch": 9.28,
1508
+ "grad_norm": 8.05014705657959,
1509
+ "learning_rate": 2.106693849813116e-05,
1510
+ "loss": 0.767,
1511
+ "step": 2030
1512
+ },
1513
+ {
1514
+ "epoch": 9.325714285714286,
1515
+ "grad_norm": 7.103221893310547,
1516
+ "learning_rate": 2.0897043832823652e-05,
1517
+ "loss": 0.7722,
1518
+ "step": 2040
1519
+ },
1520
+ {
1521
+ "epoch": 9.371428571428572,
1522
+ "grad_norm": 6.289785861968994,
1523
+ "learning_rate": 2.072714916751614e-05,
1524
+ "loss": 0.7829,
1525
+ "step": 2050
1526
+ },
1527
+ {
1528
+ "epoch": 9.417142857142856,
1529
+ "grad_norm": 5.672107696533203,
1530
+ "learning_rate": 2.055725450220863e-05,
1531
+ "loss": 0.7582,
1532
+ "step": 2060
1533
+ },
1534
+ {
1535
+ "epoch": 9.462857142857143,
1536
+ "grad_norm": 7.584166049957275,
1537
+ "learning_rate": 2.038735983690112e-05,
1538
+ "loss": 0.8066,
1539
+ "step": 2070
1540
+ },
1541
+ {
1542
+ "epoch": 9.508571428571429,
1543
+ "grad_norm": 6.826247215270996,
1544
+ "learning_rate": 2.0217465171593613e-05,
1545
+ "loss": 0.8539,
1546
+ "step": 2080
1547
+ },
1548
+ {
1549
+ "epoch": 9.554285714285715,
1550
+ "grad_norm": 7.450297832489014,
1551
+ "learning_rate": 2.0047570506286103e-05,
1552
+ "loss": 0.8866,
1553
+ "step": 2090
1554
+ },
1555
+ {
1556
+ "epoch": 9.6,
1557
+ "grad_norm": 7.801323890686035,
1558
+ "learning_rate": 1.9877675840978592e-05,
1559
+ "loss": 0.7857,
1560
+ "step": 2100
1561
+ },
1562
+ {
1563
+ "epoch": 9.645714285714286,
1564
+ "grad_norm": 5.812144756317139,
1565
+ "learning_rate": 1.9707781175671085e-05,
1566
+ "loss": 0.7891,
1567
+ "step": 2110
1568
+ },
1569
+ {
1570
+ "epoch": 9.691428571428572,
1571
+ "grad_norm": 6.948761940002441,
1572
+ "learning_rate": 1.9537886510363575e-05,
1573
+ "loss": 0.8282,
1574
+ "step": 2120
1575
+ },
1576
+ {
1577
+ "epoch": 9.737142857142857,
1578
+ "grad_norm": 7.478163719177246,
1579
+ "learning_rate": 1.9367991845056068e-05,
1580
+ "loss": 0.7716,
1581
+ "step": 2130
1582
+ },
1583
+ {
1584
+ "epoch": 9.782857142857143,
1585
+ "grad_norm": 7.083802700042725,
1586
+ "learning_rate": 1.9198097179748557e-05,
1587
+ "loss": 0.8072,
1588
+ "step": 2140
1589
+ },
1590
+ {
1591
+ "epoch": 9.82857142857143,
1592
+ "grad_norm": 8.924737930297852,
1593
+ "learning_rate": 1.902820251444105e-05,
1594
+ "loss": 0.795,
1595
+ "step": 2150
1596
+ },
1597
+ {
1598
+ "epoch": 9.874285714285714,
1599
+ "grad_norm": 6.02655553817749,
1600
+ "learning_rate": 1.885830784913354e-05,
1601
+ "loss": 0.8785,
1602
+ "step": 2160
1603
+ },
1604
+ {
1605
+ "epoch": 9.92,
1606
+ "grad_norm": 6.204999923706055,
1607
+ "learning_rate": 1.868841318382603e-05,
1608
+ "loss": 0.8836,
1609
+ "step": 2170
1610
+ },
1611
+ {
1612
+ "epoch": 9.965714285714286,
1613
+ "grad_norm": 9.425383567810059,
1614
+ "learning_rate": 1.8518518518518518e-05,
1615
+ "loss": 0.8398,
1616
+ "step": 2180
1617
+ },
1618
+ {
1619
+ "epoch": 9.997714285714286,
1620
+ "eval_accuracy": 0.7797878976918278,
1621
+ "eval_loss": 0.7243014574050903,
1622
+ "eval_runtime": 70.1724,
1623
+ "eval_samples_per_second": 114.219,
1624
+ "eval_steps_per_second": 3.577,
1625
+ "step": 2187
1626
+ },
1627
+ {
1628
+ "epoch": 10.01142857142857,
1629
+ "grad_norm": 6.657647132873535,
1630
+ "learning_rate": 1.834862385321101e-05,
1631
+ "loss": 0.7659,
1632
+ "step": 2190
1633
+ },
1634
+ {
1635
+ "epoch": 10.057142857142857,
1636
+ "grad_norm": 6.205779075622559,
1637
+ "learning_rate": 1.81787291879035e-05,
1638
+ "loss": 0.7612,
1639
+ "step": 2200
1640
+ },
1641
+ {
1642
+ "epoch": 10.102857142857143,
1643
+ "grad_norm": 4.940152168273926,
1644
+ "learning_rate": 1.800883452259599e-05,
1645
+ "loss": 0.7277,
1646
+ "step": 2210
1647
+ },
1648
+ {
1649
+ "epoch": 10.14857142857143,
1650
+ "grad_norm": 6.750416278839111,
1651
+ "learning_rate": 1.783893985728848e-05,
1652
+ "loss": 0.7334,
1653
+ "step": 2220
1654
+ },
1655
+ {
1656
+ "epoch": 10.194285714285714,
1657
+ "grad_norm": 8.511019706726074,
1658
+ "learning_rate": 1.7669045191980972e-05,
1659
+ "loss": 0.846,
1660
+ "step": 2230
1661
+ },
1662
+ {
1663
+ "epoch": 10.24,
1664
+ "grad_norm": 6.814949989318848,
1665
+ "learning_rate": 1.7499150526673462e-05,
1666
+ "loss": 0.7721,
1667
+ "step": 2240
1668
+ },
1669
+ {
1670
+ "epoch": 10.285714285714286,
1671
+ "grad_norm": 8.27193546295166,
1672
+ "learning_rate": 1.7329255861365955e-05,
1673
+ "loss": 0.7794,
1674
+ "step": 2250
1675
+ },
1676
+ {
1677
+ "epoch": 10.331428571428571,
1678
+ "grad_norm": 6.475657939910889,
1679
+ "learning_rate": 1.7159361196058444e-05,
1680
+ "loss": 0.7965,
1681
+ "step": 2260
1682
+ },
1683
+ {
1684
+ "epoch": 10.377142857142857,
1685
+ "grad_norm": 7.63599967956543,
1686
+ "learning_rate": 1.6989466530750937e-05,
1687
+ "loss": 0.768,
1688
+ "step": 2270
1689
+ },
1690
+ {
1691
+ "epoch": 10.422857142857143,
1692
+ "grad_norm": 6.7202043533325195,
1693
+ "learning_rate": 1.6819571865443427e-05,
1694
+ "loss": 0.7671,
1695
+ "step": 2280
1696
+ },
1697
+ {
1698
+ "epoch": 10.468571428571428,
1699
+ "grad_norm": 11.612401962280273,
1700
+ "learning_rate": 1.6649677200135916e-05,
1701
+ "loss": 0.7752,
1702
+ "step": 2290
1703
+ },
1704
+ {
1705
+ "epoch": 10.514285714285714,
1706
+ "grad_norm": 10.456415176391602,
1707
+ "learning_rate": 1.647978253482841e-05,
1708
+ "loss": 0.8036,
1709
+ "step": 2300
1710
+ },
1711
+ {
1712
+ "epoch": 10.56,
1713
+ "grad_norm": 5.8782782554626465,
1714
+ "learning_rate": 1.63098878695209e-05,
1715
+ "loss": 0.7652,
1716
+ "step": 2310
1717
+ },
1718
+ {
1719
+ "epoch": 10.605714285714285,
1720
+ "grad_norm": 8.532390594482422,
1721
+ "learning_rate": 1.6139993204213388e-05,
1722
+ "loss": 0.8296,
1723
+ "step": 2320
1724
+ },
1725
+ {
1726
+ "epoch": 10.651428571428571,
1727
+ "grad_norm": 8.59847354888916,
1728
+ "learning_rate": 1.5970098538905877e-05,
1729
+ "loss": 0.8239,
1730
+ "step": 2330
1731
+ },
1732
+ {
1733
+ "epoch": 10.697142857142858,
1734
+ "grad_norm": 7.728725910186768,
1735
+ "learning_rate": 1.580020387359837e-05,
1736
+ "loss": 0.7657,
1737
+ "step": 2340
1738
+ },
1739
+ {
1740
+ "epoch": 10.742857142857144,
1741
+ "grad_norm": 7.599369049072266,
1742
+ "learning_rate": 1.563030920829086e-05,
1743
+ "loss": 0.734,
1744
+ "step": 2350
1745
+ },
1746
+ {
1747
+ "epoch": 10.788571428571428,
1748
+ "grad_norm": 7.236623287200928,
1749
+ "learning_rate": 1.546041454298335e-05,
1750
+ "loss": 0.7992,
1751
+ "step": 2360
1752
+ },
1753
+ {
1754
+ "epoch": 10.834285714285715,
1755
+ "grad_norm": 9.501078605651855,
1756
+ "learning_rate": 1.5290519877675842e-05,
1757
+ "loss": 0.7977,
1758
+ "step": 2370
1759
+ },
1760
+ {
1761
+ "epoch": 10.88,
1762
+ "grad_norm": 7.179370403289795,
1763
+ "learning_rate": 1.5120625212368333e-05,
1764
+ "loss": 0.8284,
1765
+ "step": 2380
1766
+ },
1767
+ {
1768
+ "epoch": 10.925714285714285,
1769
+ "grad_norm": 6.750663757324219,
1770
+ "learning_rate": 1.4950730547060823e-05,
1771
+ "loss": 0.8197,
1772
+ "step": 2390
1773
+ },
1774
+ {
1775
+ "epoch": 10.971428571428572,
1776
+ "grad_norm": 8.519887924194336,
1777
+ "learning_rate": 1.4780835881753314e-05,
1778
+ "loss": 0.7534,
1779
+ "step": 2400
1780
+ },
1781
+ {
1782
+ "epoch": 10.998857142857142,
1783
+ "eval_accuracy": 0.7845290081097941,
1784
+ "eval_loss": 0.708789050579071,
1785
+ "eval_runtime": 70.0816,
1786
+ "eval_samples_per_second": 114.367,
1787
+ "eval_steps_per_second": 3.582,
1788
+ "step": 2406
1789
+ },
1790
+ {
1791
+ "epoch": 11.017142857142858,
1792
+ "grad_norm": 8.514420509338379,
1793
+ "learning_rate": 1.4610941216445803e-05,
1794
+ "loss": 0.804,
1795
+ "step": 2410
1796
+ },
1797
+ {
1798
+ "epoch": 11.062857142857142,
1799
+ "grad_norm": 7.491757869720459,
1800
+ "learning_rate": 1.4441046551138296e-05,
1801
+ "loss": 0.8208,
1802
+ "step": 2420
1803
+ },
1804
+ {
1805
+ "epoch": 11.108571428571429,
1806
+ "grad_norm": 8.859854698181152,
1807
+ "learning_rate": 1.4271151885830786e-05,
1808
+ "loss": 0.7703,
1809
+ "step": 2430
1810
+ },
1811
+ {
1812
+ "epoch": 11.154285714285715,
1813
+ "grad_norm": 7.596364498138428,
1814
+ "learning_rate": 1.4101257220523275e-05,
1815
+ "loss": 0.7777,
1816
+ "step": 2440
1817
+ },
1818
+ {
1819
+ "epoch": 11.2,
1820
+ "grad_norm": 4.977694988250732,
1821
+ "learning_rate": 1.3931362555215766e-05,
1822
+ "loss": 0.7842,
1823
+ "step": 2450
1824
+ },
1825
+ {
1826
+ "epoch": 11.245714285714286,
1827
+ "grad_norm": 8.050721168518066,
1828
+ "learning_rate": 1.3761467889908258e-05,
1829
+ "loss": 0.7335,
1830
+ "step": 2460
1831
+ },
1832
+ {
1833
+ "epoch": 11.291428571428572,
1834
+ "grad_norm": 6.315252304077148,
1835
+ "learning_rate": 1.3591573224600749e-05,
1836
+ "loss": 0.7898,
1837
+ "step": 2470
1838
+ },
1839
+ {
1840
+ "epoch": 11.337142857142856,
1841
+ "grad_norm": 6.380728721618652,
1842
+ "learning_rate": 1.3421678559293238e-05,
1843
+ "loss": 0.7531,
1844
+ "step": 2480
1845
+ },
1846
+ {
1847
+ "epoch": 11.382857142857143,
1848
+ "grad_norm": 6.871670246124268,
1849
+ "learning_rate": 1.3251783893985731e-05,
1850
+ "loss": 0.7604,
1851
+ "step": 2490
1852
+ },
1853
+ {
1854
+ "epoch": 11.428571428571429,
1855
+ "grad_norm": 7.661756992340088,
1856
+ "learning_rate": 1.308188922867822e-05,
1857
+ "loss": 0.7772,
1858
+ "step": 2500
1859
+ },
1860
+ {
1861
+ "epoch": 11.474285714285715,
1862
+ "grad_norm": 8.346705436706543,
1863
+ "learning_rate": 1.291199456337071e-05,
1864
+ "loss": 0.7372,
1865
+ "step": 2510
1866
+ },
1867
+ {
1868
+ "epoch": 11.52,
1869
+ "grad_norm": 5.545016288757324,
1870
+ "learning_rate": 1.2742099898063201e-05,
1871
+ "loss": 0.7428,
1872
+ "step": 2520
1873
+ },
1874
+ {
1875
+ "epoch": 11.565714285714286,
1876
+ "grad_norm": 8.73310661315918,
1877
+ "learning_rate": 1.2572205232755694e-05,
1878
+ "loss": 0.7566,
1879
+ "step": 2530
1880
+ },
1881
+ {
1882
+ "epoch": 11.611428571428572,
1883
+ "grad_norm": 7.3945794105529785,
1884
+ "learning_rate": 1.2402310567448183e-05,
1885
+ "loss": 0.7988,
1886
+ "step": 2540
1887
+ },
1888
+ {
1889
+ "epoch": 11.657142857142857,
1890
+ "grad_norm": 5.604186534881592,
1891
+ "learning_rate": 1.2232415902140673e-05,
1892
+ "loss": 0.6912,
1893
+ "step": 2550
1894
+ },
1895
+ {
1896
+ "epoch": 11.702857142857143,
1897
+ "grad_norm": 6.901523590087891,
1898
+ "learning_rate": 1.2062521236833164e-05,
1899
+ "loss": 0.7438,
1900
+ "step": 2560
1901
+ },
1902
+ {
1903
+ "epoch": 11.748571428571429,
1904
+ "grad_norm": 7.090272426605225,
1905
+ "learning_rate": 1.1892626571525654e-05,
1906
+ "loss": 0.7502,
1907
+ "step": 2570
1908
+ },
1909
+ {
1910
+ "epoch": 11.794285714285714,
1911
+ "grad_norm": 9.939922332763672,
1912
+ "learning_rate": 1.1722731906218146e-05,
1913
+ "loss": 0.7574,
1914
+ "step": 2580
1915
+ },
1916
+ {
1917
+ "epoch": 11.84,
1918
+ "grad_norm": 7.832808017730713,
1919
+ "learning_rate": 1.1552837240910636e-05,
1920
+ "loss": 0.7763,
1921
+ "step": 2590
1922
+ },
1923
+ {
1924
+ "epoch": 11.885714285714286,
1925
+ "grad_norm": 7.921093940734863,
1926
+ "learning_rate": 1.1382942575603127e-05,
1927
+ "loss": 0.8136,
1928
+ "step": 2600
1929
+ },
1930
+ {
1931
+ "epoch": 11.93142857142857,
1932
+ "grad_norm": 8.13971996307373,
1933
+ "learning_rate": 1.1213047910295617e-05,
1934
+ "loss": 0.7735,
1935
+ "step": 2610
1936
+ },
1937
+ {
1938
+ "epoch": 11.977142857142857,
1939
+ "grad_norm": 5.870953559875488,
1940
+ "learning_rate": 1.1043153244988108e-05,
1941
+ "loss": 0.7051,
1942
+ "step": 2620
1943
+ },
1944
+ {
1945
+ "epoch": 12.0,
1946
+ "eval_accuracy": 0.793512164691204,
1947
+ "eval_loss": 0.6982392072677612,
1948
+ "eval_runtime": 74.4508,
1949
+ "eval_samples_per_second": 107.655,
1950
+ "eval_steps_per_second": 3.371,
1951
+ "step": 2625
1952
+ },
1953
+ {
1954
+ "epoch": 12.022857142857143,
1955
+ "grad_norm": 9.655831336975098,
1956
+ "learning_rate": 1.0873258579680597e-05,
1957
+ "loss": 0.7839,
1958
+ "step": 2630
1959
+ },
1960
+ {
1961
+ "epoch": 12.06857142857143,
1962
+ "grad_norm": 6.195824146270752,
1963
+ "learning_rate": 1.070336391437309e-05,
1964
+ "loss": 0.7916,
1965
+ "step": 2640
1966
+ },
1967
+ {
1968
+ "epoch": 12.114285714285714,
1969
+ "grad_norm": 7.92185115814209,
1970
+ "learning_rate": 1.053346924906558e-05,
1971
+ "loss": 0.7016,
1972
+ "step": 2650
1973
+ },
1974
+ {
1975
+ "epoch": 12.16,
1976
+ "grad_norm": 5.990954875946045,
1977
+ "learning_rate": 1.036357458375807e-05,
1978
+ "loss": 0.6903,
1979
+ "step": 2660
1980
+ },
1981
+ {
1982
+ "epoch": 12.205714285714286,
1983
+ "grad_norm": 5.883810520172119,
1984
+ "learning_rate": 1.019367991845056e-05,
1985
+ "loss": 0.7879,
1986
+ "step": 2670
1987
+ },
1988
+ {
1989
+ "epoch": 12.251428571428571,
1990
+ "grad_norm": 6.014761447906494,
1991
+ "learning_rate": 1.0023785253143051e-05,
1992
+ "loss": 0.7569,
1993
+ "step": 2680
1994
+ },
1995
+ {
1996
+ "epoch": 12.297142857142857,
1997
+ "grad_norm": 6.2539191246032715,
1998
+ "learning_rate": 9.853890587835543e-06,
1999
+ "loss": 0.7547,
2000
+ "step": 2690
2001
+ },
2002
+ {
2003
+ "epoch": 12.342857142857143,
2004
+ "grad_norm": 8.04623031616211,
2005
+ "learning_rate": 9.683995922528034e-06,
2006
+ "loss": 0.7636,
2007
+ "step": 2700
2008
+ },
2009
+ {
2010
+ "epoch": 12.388571428571428,
2011
+ "grad_norm": 7.5707106590271,
2012
+ "learning_rate": 9.514101257220525e-06,
2013
+ "loss": 0.7976,
2014
+ "step": 2710
2015
+ },
2016
+ {
2017
+ "epoch": 12.434285714285714,
2018
+ "grad_norm": 7.271738529205322,
2019
+ "learning_rate": 9.344206591913014e-06,
2020
+ "loss": 0.6919,
2021
+ "step": 2720
2022
+ },
2023
+ {
2024
+ "epoch": 12.48,
2025
+ "grad_norm": 6.238006591796875,
2026
+ "learning_rate": 9.174311926605506e-06,
2027
+ "loss": 0.7343,
2028
+ "step": 2730
2029
+ },
2030
+ {
2031
+ "epoch": 12.525714285714285,
2032
+ "grad_norm": 6.735348701477051,
2033
+ "learning_rate": 9.004417261297995e-06,
2034
+ "loss": 0.6853,
2035
+ "step": 2740
2036
+ },
2037
+ {
2038
+ "epoch": 12.571428571428571,
2039
+ "grad_norm": 7.480915069580078,
2040
+ "learning_rate": 8.834522595990486e-06,
2041
+ "loss": 0.7797,
2042
+ "step": 2750
2043
+ },
2044
+ {
2045
+ "epoch": 12.617142857142857,
2046
+ "grad_norm": 7.131129741668701,
2047
+ "learning_rate": 8.664627930682977e-06,
2048
+ "loss": 0.7023,
2049
+ "step": 2760
2050
+ },
2051
+ {
2052
+ "epoch": 12.662857142857142,
2053
+ "grad_norm": 6.145063400268555,
2054
+ "learning_rate": 8.494733265375469e-06,
2055
+ "loss": 0.801,
2056
+ "step": 2770
2057
+ },
2058
+ {
2059
+ "epoch": 12.708571428571428,
2060
+ "grad_norm": 8.46693229675293,
2061
+ "learning_rate": 8.324838600067958e-06,
2062
+ "loss": 0.7693,
2063
+ "step": 2780
2064
+ },
2065
+ {
2066
+ "epoch": 12.754285714285714,
2067
+ "grad_norm": 6.422353744506836,
2068
+ "learning_rate": 8.15494393476045e-06,
2069
+ "loss": 0.7248,
2070
+ "step": 2790
2071
+ },
2072
+ {
2073
+ "epoch": 12.8,
2074
+ "grad_norm": 10.295066833496094,
2075
+ "learning_rate": 7.985049269452939e-06,
2076
+ "loss": 0.8283,
2077
+ "step": 2800
2078
+ },
2079
+ {
2080
+ "epoch": 12.845714285714285,
2081
+ "grad_norm": 7.5840959548950195,
2082
+ "learning_rate": 7.81515460414543e-06,
2083
+ "loss": 0.7081,
2084
+ "step": 2810
2085
+ },
2086
+ {
2087
+ "epoch": 12.891428571428571,
2088
+ "grad_norm": 6.741255760192871,
2089
+ "learning_rate": 7.645259938837921e-06,
2090
+ "loss": 0.7079,
2091
+ "step": 2820
2092
+ },
2093
+ {
2094
+ "epoch": 12.937142857142858,
2095
+ "grad_norm": 7.525110721588135,
2096
+ "learning_rate": 7.475365273530411e-06,
2097
+ "loss": 0.7374,
2098
+ "step": 2830
2099
+ },
2100
+ {
2101
+ "epoch": 12.982857142857142,
2102
+ "grad_norm": 7.784526348114014,
2103
+ "learning_rate": 7.305470608222902e-06,
2104
+ "loss": 0.7359,
2105
+ "step": 2840
2106
+ },
2107
+ {
2108
+ "epoch": 12.996571428571428,
2109
+ "eval_accuracy": 0.7916406737367436,
2110
+ "eval_loss": 0.6984859704971313,
2111
+ "eval_runtime": 73.0055,
2112
+ "eval_samples_per_second": 109.786,
2113
+ "eval_steps_per_second": 3.438,
2114
+ "step": 2843
2115
+ },
2116
+ {
2117
+ "epoch": 13.028571428571428,
2118
+ "grad_norm": 7.309814929962158,
2119
+ "learning_rate": 7.135575942915393e-06,
2120
+ "loss": 0.709,
2121
+ "step": 2850
2122
+ },
2123
+ {
2124
+ "epoch": 13.074285714285715,
2125
+ "grad_norm": 10.355072021484375,
2126
+ "learning_rate": 6.965681277607883e-06,
2127
+ "loss": 0.7387,
2128
+ "step": 2860
2129
+ },
2130
+ {
2131
+ "epoch": 13.12,
2132
+ "grad_norm": 7.303361415863037,
2133
+ "learning_rate": 6.795786612300374e-06,
2134
+ "loss": 0.6999,
2135
+ "step": 2870
2136
+ },
2137
+ {
2138
+ "epoch": 13.165714285714285,
2139
+ "grad_norm": 8.104413032531738,
2140
+ "learning_rate": 6.6258919469928655e-06,
2141
+ "loss": 0.7602,
2142
+ "step": 2880
2143
+ },
2144
+ {
2145
+ "epoch": 13.211428571428572,
2146
+ "grad_norm": 6.764256000518799,
2147
+ "learning_rate": 6.455997281685355e-06,
2148
+ "loss": 0.7195,
2149
+ "step": 2890
2150
+ },
2151
+ {
2152
+ "epoch": 13.257142857142856,
2153
+ "grad_norm": 6.039324760437012,
2154
+ "learning_rate": 6.286102616377847e-06,
2155
+ "loss": 0.6927,
2156
+ "step": 2900
2157
+ },
2158
+ {
2159
+ "epoch": 13.302857142857142,
2160
+ "grad_norm": 5.957076549530029,
2161
+ "learning_rate": 6.1162079510703365e-06,
2162
+ "loss": 0.7369,
2163
+ "step": 2910
2164
+ },
2165
+ {
2166
+ "epoch": 13.348571428571429,
2167
+ "grad_norm": 6.061888694763184,
2168
+ "learning_rate": 5.946313285762827e-06,
2169
+ "loss": 0.734,
2170
+ "step": 2920
2171
+ },
2172
+ {
2173
+ "epoch": 13.394285714285715,
2174
+ "grad_norm": 8.270124435424805,
2175
+ "learning_rate": 5.776418620455318e-06,
2176
+ "loss": 0.7389,
2177
+ "step": 2930
2178
+ },
2179
+ {
2180
+ "epoch": 13.44,
2181
+ "grad_norm": 5.897438049316406,
2182
+ "learning_rate": 5.606523955147808e-06,
2183
+ "loss": 0.772,
2184
+ "step": 2940
2185
+ },
2186
+ {
2187
+ "epoch": 13.485714285714286,
2188
+ "grad_norm": 10.048014640808105,
2189
+ "learning_rate": 5.436629289840299e-06,
2190
+ "loss": 0.7129,
2191
+ "step": 2950
2192
+ },
2193
+ {
2194
+ "epoch": 13.531428571428572,
2195
+ "grad_norm": 6.532159805297852,
2196
+ "learning_rate": 5.26673462453279e-06,
2197
+ "loss": 0.7329,
2198
+ "step": 2960
2199
+ },
2200
+ {
2201
+ "epoch": 13.577142857142857,
2202
+ "grad_norm": 7.861076831817627,
2203
+ "learning_rate": 5.09683995922528e-06,
2204
+ "loss": 0.659,
2205
+ "step": 2970
2206
+ },
2207
+ {
2208
+ "epoch": 13.622857142857143,
2209
+ "grad_norm": 7.23004150390625,
2210
+ "learning_rate": 4.926945293917771e-06,
2211
+ "loss": 0.7383,
2212
+ "step": 2980
2213
+ },
2214
+ {
2215
+ "epoch": 13.668571428571429,
2216
+ "grad_norm": 7.737166881561279,
2217
+ "learning_rate": 4.7570506286102625e-06,
2218
+ "loss": 0.7243,
2219
+ "step": 2990
2220
+ },
2221
+ {
2222
+ "epoch": 13.714285714285714,
2223
+ "grad_norm": 6.211805820465088,
2224
+ "learning_rate": 4.587155963302753e-06,
2225
+ "loss": 0.748,
2226
+ "step": 3000
2227
+ },
2228
+ {
2229
+ "epoch": 13.76,
2230
+ "grad_norm": 6.718061447143555,
2231
+ "learning_rate": 4.417261297995243e-06,
2232
+ "loss": 0.7005,
2233
+ "step": 3010
2234
+ },
2235
+ {
2236
+ "epoch": 13.805714285714286,
2237
+ "grad_norm": 6.454185485839844,
2238
+ "learning_rate": 4.247366632687734e-06,
2239
+ "loss": 0.806,
2240
+ "step": 3020
2241
+ },
2242
+ {
2243
+ "epoch": 13.85142857142857,
2244
+ "grad_norm": 8.608590126037598,
2245
+ "learning_rate": 4.077471967380225e-06,
2246
+ "loss": 0.7342,
2247
+ "step": 3030
2248
+ },
2249
+ {
2250
+ "epoch": 13.897142857142857,
2251
+ "grad_norm": 6.505426406860352,
2252
+ "learning_rate": 3.907577302072715e-06,
2253
+ "loss": 0.7148,
2254
+ "step": 3040
2255
+ },
2256
+ {
2257
+ "epoch": 13.942857142857143,
2258
+ "grad_norm": 7.760762691497803,
2259
+ "learning_rate": 3.7376826367652057e-06,
2260
+ "loss": 0.7208,
2261
+ "step": 3050
2262
+ },
2263
+ {
2264
+ "epoch": 13.98857142857143,
2265
+ "grad_norm": 6.736624240875244,
2266
+ "learning_rate": 3.5677879714576964e-06,
2267
+ "loss": 0.7641,
2268
+ "step": 3060
2269
+ },
2270
+ {
2271
+ "epoch": 13.997714285714286,
2272
+ "eval_accuracy": 0.796756082345602,
2273
+ "eval_loss": 0.6838445663452148,
2274
+ "eval_runtime": 74.6047,
2275
+ "eval_samples_per_second": 107.433,
2276
+ "eval_steps_per_second": 3.364,
2277
+ "step": 3062
2278
+ },
2279
+ {
2280
+ "epoch": 14.034285714285714,
2281
+ "grad_norm": 6.665014266967773,
2282
+ "learning_rate": 3.397893306150187e-06,
2283
+ "loss": 0.6983,
2284
+ "step": 3070
2285
+ },
2286
+ {
2287
+ "epoch": 14.08,
2288
+ "grad_norm": 6.425662994384766,
2289
+ "learning_rate": 3.2279986408426775e-06,
2290
+ "loss": 0.6563,
2291
+ "step": 3080
2292
+ },
2293
+ {
2294
+ "epoch": 14.125714285714286,
2295
+ "grad_norm": 6.079023838043213,
2296
+ "learning_rate": 3.0581039755351682e-06,
2297
+ "loss": 0.6838,
2298
+ "step": 3090
2299
+ },
2300
+ {
2301
+ "epoch": 14.17142857142857,
2302
+ "grad_norm": 6.835630416870117,
2303
+ "learning_rate": 2.888209310227659e-06,
2304
+ "loss": 0.6923,
2305
+ "step": 3100
2306
+ },
2307
+ {
2308
+ "epoch": 14.217142857142857,
2309
+ "grad_norm": 7.853431224822998,
2310
+ "learning_rate": 2.7183146449201493e-06,
2311
+ "loss": 0.7237,
2312
+ "step": 3110
2313
+ },
2314
+ {
2315
+ "epoch": 14.262857142857143,
2316
+ "grad_norm": 6.308788299560547,
2317
+ "learning_rate": 2.54841997961264e-06,
2318
+ "loss": 0.7469,
2319
+ "step": 3120
2320
+ },
2321
+ {
2322
+ "epoch": 14.308571428571428,
2323
+ "grad_norm": 7.8088202476501465,
2324
+ "learning_rate": 2.3785253143051312e-06,
2325
+ "loss": 0.73,
2326
+ "step": 3130
2327
+ },
2328
+ {
2329
+ "epoch": 14.354285714285714,
2330
+ "grad_norm": 6.700632095336914,
2331
+ "learning_rate": 2.2086306489976216e-06,
2332
+ "loss": 0.7349,
2333
+ "step": 3140
2334
+ },
2335
+ {
2336
+ "epoch": 14.4,
2337
+ "grad_norm": 7.2199320793151855,
2338
+ "learning_rate": 2.0387359836901123e-06,
2339
+ "loss": 0.7093,
2340
+ "step": 3150
2341
+ },
2342
+ {
2343
+ "epoch": 14.445714285714285,
2344
+ "grad_norm": 5.401243686676025,
2345
+ "learning_rate": 1.8688413183826028e-06,
2346
+ "loss": 0.6662,
2347
+ "step": 3160
2348
+ },
2349
+ {
2350
+ "epoch": 14.491428571428571,
2351
+ "grad_norm": 6.991000652313232,
2352
+ "learning_rate": 1.6989466530750936e-06,
2353
+ "loss": 0.6859,
2354
+ "step": 3170
2355
+ },
2356
+ {
2357
+ "epoch": 14.537142857142857,
2358
+ "grad_norm": 5.750977993011475,
2359
+ "learning_rate": 1.5290519877675841e-06,
2360
+ "loss": 0.6917,
2361
+ "step": 3180
2362
+ },
2363
+ {
2364
+ "epoch": 14.582857142857144,
2365
+ "grad_norm": 8.291132926940918,
2366
+ "learning_rate": 1.3591573224600747e-06,
2367
+ "loss": 0.7237,
2368
+ "step": 3190
2369
+ },
2370
+ {
2371
+ "epoch": 14.628571428571428,
2372
+ "grad_norm": 8.148652076721191,
2373
+ "learning_rate": 1.1892626571525656e-06,
2374
+ "loss": 0.795,
2375
+ "step": 3200
2376
+ },
2377
+ {
2378
+ "epoch": 14.674285714285714,
2379
+ "grad_norm": 8.365762710571289,
2380
+ "learning_rate": 1.0193679918450562e-06,
2381
+ "loss": 0.7038,
2382
+ "step": 3210
2383
+ },
2384
+ {
2385
+ "epoch": 14.72,
2386
+ "grad_norm": 8.118247032165527,
2387
+ "learning_rate": 8.494733265375468e-07,
2388
+ "loss": 0.7334,
2389
+ "step": 3220
2390
+ },
2391
+ {
2392
+ "epoch": 14.765714285714285,
2393
+ "grad_norm": 7.585115432739258,
2394
+ "learning_rate": 6.795786612300373e-07,
2395
+ "loss": 0.7761,
2396
+ "step": 3230
2397
+ },
2398
+ {
2399
+ "epoch": 14.811428571428571,
2400
+ "grad_norm": 7.77222204208374,
2401
+ "learning_rate": 5.096839959225281e-07,
2402
+ "loss": 0.6876,
2403
+ "step": 3240
2404
+ },
2405
+ {
2406
+ "epoch": 14.857142857142858,
2407
+ "grad_norm": 7.689059734344482,
2408
+ "learning_rate": 3.3978933061501866e-07,
2409
+ "loss": 0.6827,
2410
+ "step": 3250
2411
+ },
2412
+ {
2413
+ "epoch": 14.902857142857142,
2414
+ "grad_norm": 7.31766414642334,
2415
+ "learning_rate": 1.6989466530750933e-07,
2416
+ "loss": 0.7361,
2417
+ "step": 3260
2418
+ },
2419
+ {
2420
+ "epoch": 14.948571428571428,
2421
+ "grad_norm": 6.6061015129089355,
2422
+ "learning_rate": 0.0,
2423
+ "loss": 0.7372,
2424
+ "step": 3270
2425
+ },
2426
+ {
2427
+ "epoch": 14.948571428571428,
2428
+ "eval_accuracy": 0.796756082345602,
2429
+ "eval_loss": 0.6781123280525208,
2430
+ "eval_runtime": 74.4538,
2431
+ "eval_samples_per_second": 107.651,
2432
+ "eval_steps_per_second": 3.371,
2433
+ "step": 3270
2434
+ },
2435
+ {
2436
+ "epoch": 14.948571428571428,
2437
+ "step": 3270,
2438
+ "total_flos": 1.0403963301155365e+19,
2439
+ "train_loss": 0.9863547705729073,
2440
+ "train_runtime": 7901.2254,
2441
+ "train_samples_per_second": 53.135,
2442
  "train_steps_per_second": 0.414
2443
  }
2444
  ],
2445
  "logging_steps": 10,
2446
+ "max_steps": 3270,
2447
  "num_input_tokens_seen": 0,
2448
+ "num_train_epochs": 15,
2449
  "save_steps": 500,
2450
  "stateful_callbacks": {
2451
  "TrainerControl": {
 
2459
  "attributes": {}
2460
  }
2461
  },
2462
+ "total_flos": 1.0403963301155365e+19,
2463
  "train_batch_size": 32,
2464
  "trial_name": null,
2465
  "trial_params": null