|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1506, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 41.35739895402257, |
|
"learning_rate": 7.193423539345941e-06, |
|
"loss": 0.5141, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 10.443694874625423, |
|
"learning_rate": 9.358859796204429e-06, |
|
"loss": 0.4195, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.720021591145773, |
|
"learning_rate": 1.0625558804168632e-05, |
|
"loss": 0.2851, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 7.59250257002781, |
|
"learning_rate": 1.1524296053062918e-05, |
|
"loss": 0.2174, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 11.521550428114184, |
|
"learning_rate": 1.2221410821833392e-05, |
|
"loss": 0.1817, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.027998448017936, |
|
"learning_rate": 1.2790995061027121e-05, |
|
"loss": 0.1886, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.929816146115093, |
|
"learning_rate": 1.3272571673439616e-05, |
|
"loss": 0.1553, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.3881515971871345, |
|
"learning_rate": 1.3689732309921406e-05, |
|
"loss": 0.129, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.441118636680569, |
|
"learning_rate": 1.4057694068991321e-05, |
|
"loss": 0.1433, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.085213496930297, |
|
"learning_rate": 1.4386847078691883e-05, |
|
"loss": 0.1092, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.713361075579101, |
|
"learning_rate": 1.4684602194465794e-05, |
|
"loss": 0.1231, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.8259500358417924, |
|
"learning_rate": 1.495643131788561e-05, |
|
"loss": 0.0697, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.67920682727699, |
|
"learning_rate": 1.5206489871327869e-05, |
|
"loss": 0.084, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 8.189491553913532, |
|
"learning_rate": 1.54380079302981e-05, |
|
"loss": 0.1023, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 12.000369384166694, |
|
"learning_rate": 1.5653546086656083e-05, |
|
"loss": 0.0972, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.3726942012239953, |
|
"learning_rate": 1.5855168566779895e-05, |
|
"loss": 0.1036, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.0242117206977044, |
|
"learning_rate": 1.604456377435124e-05, |
|
"loss": 0.1081, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.2261934744219967, |
|
"learning_rate": 1.6223130325849813e-05, |
|
"loss": 0.082, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.70421073268978, |
|
"learning_rate": 1.6392039793463407e-05, |
|
"loss": 0.0744, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 12.064244277626111, |
|
"learning_rate": 1.6552283335550368e-05, |
|
"loss": 0.0934, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.150990470911233, |
|
"learning_rate": 1.67047069382623e-05, |
|
"loss": 0.0737, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.074258308547766, |
|
"learning_rate": 1.6850038451324284e-05, |
|
"loss": 0.0841, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.6764310841274237, |
|
"learning_rate": 1.6988908609137504e-05, |
|
"loss": 0.0821, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.1391283710987063, |
|
"learning_rate": 1.71218675747441e-05, |
|
"loss": 0.0747, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.1103728720325754, |
|
"learning_rate": 1.7249398104320845e-05, |
|
"loss": 0.0907, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.354173276825422, |
|
"learning_rate": 1.7371926128186358e-05, |
|
"loss": 0.0765, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.8677808267225005, |
|
"learning_rate": 1.7489829333814013e-05, |
|
"loss": 0.0777, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.6124301773304874, |
|
"learning_rate": 1.760344418715659e-05, |
|
"loss": 0.0897, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.915238130503912, |
|
"learning_rate": 1.7713071721324668e-05, |
|
"loss": 0.0866, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.061775729833908, |
|
"learning_rate": 1.781898234351457e-05, |
|
"loss": 0.0804, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.5648704857906242, |
|
"learning_rate": 1.7921419853452233e-05, |
|
"loss": 0.0834, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.225837991191409, |
|
"learning_rate": 1.8020604823638384e-05, |
|
"loss": 0.0646, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.041622096074851, |
|
"learning_rate": 1.811673745928848e-05, |
|
"loss": 0.0636, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.21100008000919218, |
|
"learning_rate": 1.821000003120973e-05, |
|
"loss": 0.0656, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.2923304434569443, |
|
"learning_rate": 1.8300558955927067e-05, |
|
"loss": 0.0756, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.6524729986322013, |
|
"learning_rate": 1.83885665827083e-05, |
|
"loss": 0.0466, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.1793660025907284, |
|
"learning_rate": 1.847416273569235e-05, |
|
"loss": 0.0704, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 4.363503091034068, |
|
"learning_rate": 1.8557476050321896e-05, |
|
"loss": 0.0712, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 3.0942807753734476, |
|
"learning_rate": 1.863862513615056e-05, |
|
"loss": 0.0831, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.789230871814937, |
|
"learning_rate": 1.8717719592408857e-05, |
|
"loss": 0.078, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 3.4361028432853677, |
|
"learning_rate": 1.879486089815082e-05, |
|
"loss": 0.0663, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 3.690439378170989, |
|
"learning_rate": 1.8870143195120794e-05, |
|
"loss": 0.0738, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.159404934400987, |
|
"learning_rate": 1.8943653978491198e-05, |
|
"loss": 0.0768, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.8042070009727055, |
|
"learning_rate": 1.901547470818277e-05, |
|
"loss": 0.0777, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.70758347995601, |
|
"learning_rate": 1.9085681351478775e-05, |
|
"loss": 0.05, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.2113560599237037, |
|
"learning_rate": 1.9154344865995993e-05, |
|
"loss": 0.0715, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 2.5816071085486527, |
|
"learning_rate": 1.9221531630710657e-05, |
|
"loss": 0.0688, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.4447841193953872, |
|
"learning_rate": 1.9287303831602588e-05, |
|
"loss": 0.0659, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.2668557011358101, |
|
"learning_rate": 1.9351719807533285e-05, |
|
"loss": 0.0515, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.0353893651728865, |
|
"learning_rate": 1.9414834361179333e-05, |
|
"loss": 0.0687, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.005863891715338, |
|
"learning_rate": 1.947669903917393e-05, |
|
"loss": 0.0459, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.4330303529877615, |
|
"learning_rate": 1.9537362385044847e-05, |
|
"loss": 0.0557, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.8683959370360874, |
|
"learning_rate": 1.959687016805845e-05, |
|
"loss": 0.0656, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.5963373312962135, |
|
"learning_rate": 1.9655265590672502e-05, |
|
"loss": 0.0519, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.824110595633705, |
|
"learning_rate": 1.9712589476953243e-05, |
|
"loss": 0.0557, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 2.024299349665376, |
|
"learning_rate": 1.976888044401508e-05, |
|
"loss": 0.0617, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.0490238284158413, |
|
"learning_rate": 1.98241750582861e-05, |
|
"loss": 0.0529, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.2602675746455845, |
|
"learning_rate": 1.9878507978183157e-05, |
|
"loss": 0.0617, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.7353304044327778, |
|
"learning_rate": 1.9931912084590654e-05, |
|
"loss": 0.0464, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.043608410331547, |
|
"learning_rate": 1.998441860037306e-05, |
|
"loss": 0.0716, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9522735357022469, |
|
"learning_rate": 1.997786386275595e-05, |
|
"loss": 0.0496, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.3740017908881452, |
|
"learning_rate": 1.9940970300682533e-05, |
|
"loss": 0.0686, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.1533525092583308, |
|
"learning_rate": 1.9904076738609114e-05, |
|
"loss": 0.0562, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.564671647601224, |
|
"learning_rate": 1.9867183176535695e-05, |
|
"loss": 0.0553, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.8996693997352794, |
|
"learning_rate": 1.9830289614462276e-05, |
|
"loss": 0.0524, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.9340857910489522, |
|
"learning_rate": 1.979339605238886e-05, |
|
"loss": 0.0534, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.892682744806536, |
|
"learning_rate": 1.9756502490315442e-05, |
|
"loss": 0.0625, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.2063853192894494, |
|
"learning_rate": 1.9719608928242023e-05, |
|
"loss": 0.0519, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.984312822464147, |
|
"learning_rate": 1.9682715366168604e-05, |
|
"loss": 0.0494, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.6705446030280595, |
|
"learning_rate": 1.9645821804095185e-05, |
|
"loss": 0.059, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.7422620817223426, |
|
"learning_rate": 1.960892824202177e-05, |
|
"loss": 0.0454, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.3182961229762868, |
|
"learning_rate": 1.957203467994835e-05, |
|
"loss": 0.0581, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.330360819481426, |
|
"learning_rate": 1.9535141117874932e-05, |
|
"loss": 0.0575, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.9840741868184866, |
|
"learning_rate": 1.9498247555801517e-05, |
|
"loss": 0.063, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.6018064731760029, |
|
"learning_rate": 1.9461353993728094e-05, |
|
"loss": 0.0519, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.391561342963203, |
|
"learning_rate": 1.9424460431654675e-05, |
|
"loss": 0.0572, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.0319629863193043, |
|
"learning_rate": 1.938756686958126e-05, |
|
"loss": 0.0619, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.7398111190442345, |
|
"learning_rate": 1.935067330750784e-05, |
|
"loss": 0.05, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.675180749962208, |
|
"learning_rate": 1.9313779745434422e-05, |
|
"loss": 0.0654, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.486943722740635, |
|
"learning_rate": 1.9276886183361007e-05, |
|
"loss": 0.0463, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.1444156835752686, |
|
"learning_rate": 1.9239992621287588e-05, |
|
"loss": 0.0598, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.1260777714975718, |
|
"learning_rate": 1.920309905921417e-05, |
|
"loss": 0.039, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.328313971146618, |
|
"learning_rate": 1.916620549714075e-05, |
|
"loss": 0.0591, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.8805517771854091, |
|
"learning_rate": 1.912931193506733e-05, |
|
"loss": 0.0392, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9704226543415952, |
|
"learning_rate": 1.9092418372993916e-05, |
|
"loss": 0.056, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.4328262810995938, |
|
"learning_rate": 1.9055524810920497e-05, |
|
"loss": 0.0602, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 2.6133255890647167, |
|
"learning_rate": 1.9018631248847078e-05, |
|
"loss": 0.0507, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.4574352859865667, |
|
"learning_rate": 1.898173768677366e-05, |
|
"loss": 0.0667, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.9116869451062006, |
|
"learning_rate": 1.894484412470024e-05, |
|
"loss": 0.0533, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8152499611885836, |
|
"learning_rate": 1.890795056262682e-05, |
|
"loss": 0.0534, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 2.9440345318883696, |
|
"learning_rate": 1.8871057000553406e-05, |
|
"loss": 0.0419, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.7588896552457074, |
|
"learning_rate": 1.8834163438479987e-05, |
|
"loss": 0.0418, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.8886319208504987, |
|
"learning_rate": 1.8797269876406568e-05, |
|
"loss": 0.0397, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.8820615671771377, |
|
"learning_rate": 1.876037631433315e-05, |
|
"loss": 0.0454, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.2868214865586614, |
|
"learning_rate": 1.872348275225973e-05, |
|
"loss": 0.0373, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.016876778284122, |
|
"learning_rate": 1.8686589190186315e-05, |
|
"loss": 0.0619, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8992764743601639, |
|
"learning_rate": 1.8649695628112896e-05, |
|
"loss": 0.0403, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.42416507318131186, |
|
"learning_rate": 1.8612802066039477e-05, |
|
"loss": 0.067, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.8601423331120188, |
|
"learning_rate": 1.8575908503966062e-05, |
|
"loss": 0.066, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.3893250379275068, |
|
"learning_rate": 1.8539014941892643e-05, |
|
"loss": 0.0437, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 2.3240219930504376, |
|
"learning_rate": 1.850212137981922e-05, |
|
"loss": 0.0488, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8849896953543195, |
|
"learning_rate": 1.8465227817745805e-05, |
|
"loss": 0.0472, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8655212477880712, |
|
"learning_rate": 1.8428334255672386e-05, |
|
"loss": 0.0546, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.2016665548799828, |
|
"learning_rate": 1.8391440693598967e-05, |
|
"loss": 0.0956, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.492659569010543, |
|
"learning_rate": 1.8354547131525552e-05, |
|
"loss": 0.0449, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.4224837643985645, |
|
"learning_rate": 1.8317653569452133e-05, |
|
"loss": 0.0545, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.8405573730480247, |
|
"learning_rate": 1.8280760007378714e-05, |
|
"loss": 0.0328, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.3988661237020132, |
|
"learning_rate": 1.8243866445305295e-05, |
|
"loss": 0.0451, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5573396543240564, |
|
"learning_rate": 1.8206972883231876e-05, |
|
"loss": 0.0502, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.7855566721285819, |
|
"learning_rate": 1.817007932115846e-05, |
|
"loss": 0.0341, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.497005175064917, |
|
"learning_rate": 1.8133185759085042e-05, |
|
"loss": 0.0513, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.650975953086994, |
|
"learning_rate": 1.8096292197011623e-05, |
|
"loss": 0.0473, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8418054071334755, |
|
"learning_rate": 1.8059398634938204e-05, |
|
"loss": 0.0444, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.33651471077315, |
|
"learning_rate": 1.8022505072864785e-05, |
|
"loss": 0.0476, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.1529220423121023, |
|
"learning_rate": 1.7985611510791367e-05, |
|
"loss": 0.0379, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.6912013035706749, |
|
"learning_rate": 1.794871794871795e-05, |
|
"loss": 0.0526, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 2.806793867054328, |
|
"learning_rate": 1.7911824386644532e-05, |
|
"loss": 0.047, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 3.323409487413899, |
|
"learning_rate": 1.7874930824571113e-05, |
|
"loss": 0.0455, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.0877783561592873, |
|
"learning_rate": 1.7838037262497695e-05, |
|
"loss": 0.0555, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.9158165193439103, |
|
"learning_rate": 1.7801143700424276e-05, |
|
"loss": 0.0458, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.0863638939536142, |
|
"learning_rate": 1.776425013835086e-05, |
|
"loss": 0.0462, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.3398031954044978, |
|
"learning_rate": 1.772735657627744e-05, |
|
"loss": 0.032, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.9272917966743286, |
|
"learning_rate": 1.7690463014204022e-05, |
|
"loss": 0.0464, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.4608044079217914, |
|
"learning_rate": 1.7653569452130607e-05, |
|
"loss": 0.0544, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 2.0799767984416713, |
|
"learning_rate": 1.7616675890057188e-05, |
|
"loss": 0.0629, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.438242481318271, |
|
"learning_rate": 1.757978232798377e-05, |
|
"loss": 0.0586, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.22666963149041416, |
|
"learning_rate": 1.754288876591035e-05, |
|
"loss": 0.0419, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.6906855856145777, |
|
"learning_rate": 1.750599520383693e-05, |
|
"loss": 0.0417, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.8417294291584847, |
|
"learning_rate": 1.7469101641763513e-05, |
|
"loss": 0.0496, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 2.384648750559231, |
|
"learning_rate": 1.7432208079690097e-05, |
|
"loss": 0.037, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.0969098360012755, |
|
"learning_rate": 1.7395314517616678e-05, |
|
"loss": 0.0392, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.265947825753472, |
|
"learning_rate": 1.735842095554326e-05, |
|
"loss": 0.0434, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.0043877066833349, |
|
"learning_rate": 1.732152739346984e-05, |
|
"loss": 0.0423, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.3834891081373628, |
|
"learning_rate": 1.728463383139642e-05, |
|
"loss": 0.0423, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.8917610552257311, |
|
"learning_rate": 1.7247740269323006e-05, |
|
"loss": 0.0491, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.7777935423085822, |
|
"learning_rate": 1.7210846707249587e-05, |
|
"loss": 0.0465, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 2.882686457165806, |
|
"learning_rate": 1.717395314517617e-05, |
|
"loss": 0.0366, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.4070944899475468, |
|
"learning_rate": 1.713705958310275e-05, |
|
"loss": 0.0364, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.368387839772382, |
|
"learning_rate": 1.710016602102933e-05, |
|
"loss": 0.0302, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.2202288141019488, |
|
"learning_rate": 1.7063272458955912e-05, |
|
"loss": 0.0455, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.7419694841079256, |
|
"learning_rate": 1.7026378896882496e-05, |
|
"loss": 0.0402, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.5792094366611027, |
|
"learning_rate": 1.6989485334809077e-05, |
|
"loss": 0.0364, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.0092569099059323, |
|
"learning_rate": 1.695259177273566e-05, |
|
"loss": 0.0335, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.5374731387141227, |
|
"learning_rate": 1.691569821066224e-05, |
|
"loss": 0.0428, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.6268956058587998, |
|
"learning_rate": 1.687880464858882e-05, |
|
"loss": 0.0483, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.6628348966888566, |
|
"learning_rate": 1.6841911086515402e-05, |
|
"loss": 0.0487, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 8.037103442054311, |
|
"learning_rate": 1.6805017524441987e-05, |
|
"loss": 0.0679, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.620270154467724, |
|
"learning_rate": 1.6768123962368568e-05, |
|
"loss": 0.0437, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.0436875513091504, |
|
"learning_rate": 1.6731230400295152e-05, |
|
"loss": 0.0346, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.8374706362800531, |
|
"learning_rate": 1.6694336838221733e-05, |
|
"loss": 0.0517, |
|
"step": 1500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 6024, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 500, |
|
"total_flos": 0.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|