|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.815689149560117, |
|
"eval_steps": 500, |
|
"global_step": 79326, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.036656891495601175, |
|
"grad_norm": 3.9739036560058594, |
|
"learning_rate": 9.936968963517636e-05, |
|
"loss": 7.911, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.07331378299120235, |
|
"grad_norm": 3.5818135738372803, |
|
"learning_rate": 9.873937927035273e-05, |
|
"loss": 7.4132, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.10997067448680352, |
|
"grad_norm": 3.6124236583709717, |
|
"learning_rate": 9.810906890552909e-05, |
|
"loss": 7.2685, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.1466275659824047, |
|
"grad_norm": 3.609344720840454, |
|
"learning_rate": 9.747875854070544e-05, |
|
"loss": 7.1646, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.18328445747800587, |
|
"grad_norm": 3.878251075744629, |
|
"learning_rate": 9.684844817588181e-05, |
|
"loss": 7.112, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.21994134897360704, |
|
"grad_norm": 3.640087604522705, |
|
"learning_rate": 9.621813781105817e-05, |
|
"loss": 6.9647, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.2565982404692082, |
|
"grad_norm": 4.396575927734375, |
|
"learning_rate": 9.558782744623454e-05, |
|
"loss": 6.9285, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.2932551319648094, |
|
"grad_norm": 4.27196741104126, |
|
"learning_rate": 9.49575170814109e-05, |
|
"loss": 6.8817, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.32991202346041054, |
|
"grad_norm": 5.039891719818115, |
|
"learning_rate": 9.432720671658725e-05, |
|
"loss": 6.8308, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.36656891495601174, |
|
"grad_norm": 6.211440563201904, |
|
"learning_rate": 9.369689635176361e-05, |
|
"loss": 6.7133, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.4032258064516129, |
|
"grad_norm": 5.089609622955322, |
|
"learning_rate": 9.306658598693997e-05, |
|
"loss": 6.596, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.4398826979472141, |
|
"grad_norm": 7.917068004608154, |
|
"learning_rate": 9.243627562211632e-05, |
|
"loss": 6.5651, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.47653958944281527, |
|
"grad_norm": 4.634731292724609, |
|
"learning_rate": 9.18059652572927e-05, |
|
"loss": 6.4742, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.5131964809384164, |
|
"grad_norm": 5.389685153961182, |
|
"learning_rate": 9.117565489246905e-05, |
|
"loss": 6.3593, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.5498533724340176, |
|
"grad_norm": 6.881243705749512, |
|
"learning_rate": 9.054534452764542e-05, |
|
"loss": 6.2916, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.5865102639296188, |
|
"grad_norm": 6.123986721038818, |
|
"learning_rate": 8.991503416282178e-05, |
|
"loss": 6.2029, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.6231671554252199, |
|
"grad_norm": 6.262916088104248, |
|
"learning_rate": 8.928472379799813e-05, |
|
"loss": 6.1005, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.6598240469208211, |
|
"grad_norm": 7.570896625518799, |
|
"learning_rate": 8.86544134331745e-05, |
|
"loss": 6.0054, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.6964809384164223, |
|
"grad_norm": 6.367081642150879, |
|
"learning_rate": 8.802410306835086e-05, |
|
"loss": 5.8283, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.7331378299120235, |
|
"grad_norm": 6.622028350830078, |
|
"learning_rate": 8.739379270352722e-05, |
|
"loss": 5.7653, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.7697947214076246, |
|
"grad_norm": 6.616864204406738, |
|
"learning_rate": 8.676348233870359e-05, |
|
"loss": 5.6764, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.8064516129032258, |
|
"grad_norm": 7.048656463623047, |
|
"learning_rate": 8.613317197387994e-05, |
|
"loss": 5.5562, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.843108504398827, |
|
"grad_norm": 6.860437870025635, |
|
"learning_rate": 8.55028616090563e-05, |
|
"loss": 5.4657, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.8797653958944281, |
|
"grad_norm": 6.002134799957275, |
|
"learning_rate": 8.487255124423267e-05, |
|
"loss": 5.3649, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.9164222873900293, |
|
"grad_norm": 7.97310209274292, |
|
"learning_rate": 8.424224087940903e-05, |
|
"loss": 5.3526, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.9530791788856305, |
|
"grad_norm": 5.980320930480957, |
|
"learning_rate": 8.361193051458538e-05, |
|
"loss": 5.2898, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.9897360703812317, |
|
"grad_norm": 7.705748558044434, |
|
"learning_rate": 8.298162014976175e-05, |
|
"loss": 5.1842, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 5.10765266418457, |
|
"eval_runtime": 51.2669, |
|
"eval_samples_per_second": 751.187, |
|
"eval_steps_per_second": 46.95, |
|
"step": 13640 |
|
}, |
|
{ |
|
"epoch": 1.0263929618768328, |
|
"grad_norm": 7.542203426361084, |
|
"learning_rate": 8.23513097849381e-05, |
|
"loss": 5.1511, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.063049853372434, |
|
"grad_norm": 6.65974235534668, |
|
"learning_rate": 8.172099942011447e-05, |
|
"loss": 5.1524, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.099706744868035, |
|
"grad_norm": 6.591290473937988, |
|
"learning_rate": 8.109068905529082e-05, |
|
"loss": 5.0419, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.1363636363636362, |
|
"grad_norm": 6.578639507293701, |
|
"learning_rate": 8.046037869046718e-05, |
|
"loss": 4.9851, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.1730205278592376, |
|
"grad_norm": 6.83113956451416, |
|
"learning_rate": 7.983006832564355e-05, |
|
"loss": 4.9795, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.2096774193548387, |
|
"grad_norm": 6.948678493499756, |
|
"learning_rate": 7.919975796081991e-05, |
|
"loss": 4.9811, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.2463343108504399, |
|
"grad_norm": 6.949957847595215, |
|
"learning_rate": 7.856944759599628e-05, |
|
"loss": 4.9127, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.282991202346041, |
|
"grad_norm": 6.94000768661499, |
|
"learning_rate": 7.793913723117263e-05, |
|
"loss": 4.9336, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.3196480938416422, |
|
"grad_norm": 6.532993316650391, |
|
"learning_rate": 7.730882686634899e-05, |
|
"loss": 4.8546, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.3563049853372435, |
|
"grad_norm": 7.553442001342773, |
|
"learning_rate": 7.667851650152536e-05, |
|
"loss": 4.7791, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.3929618768328447, |
|
"grad_norm": 6.4063720703125, |
|
"learning_rate": 7.604820613670172e-05, |
|
"loss": 4.8218, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.4296187683284458, |
|
"grad_norm": 8.8884916305542, |
|
"learning_rate": 7.541789577187807e-05, |
|
"loss": 4.756, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.466275659824047, |
|
"grad_norm": 7.412301540374756, |
|
"learning_rate": 7.478758540705444e-05, |
|
"loss": 4.7164, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.502932551319648, |
|
"grad_norm": 5.796535491943359, |
|
"learning_rate": 7.41572750422308e-05, |
|
"loss": 4.7209, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.5395894428152492, |
|
"grad_norm": 6.591666221618652, |
|
"learning_rate": 7.352696467740716e-05, |
|
"loss": 4.6933, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.5762463343108504, |
|
"grad_norm": 8.342963218688965, |
|
"learning_rate": 7.289665431258353e-05, |
|
"loss": 4.6505, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.6129032258064515, |
|
"grad_norm": 7.765003681182861, |
|
"learning_rate": 7.226634394775988e-05, |
|
"loss": 4.5958, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.6495601173020527, |
|
"grad_norm": 8.44519329071045, |
|
"learning_rate": 7.163603358293624e-05, |
|
"loss": 4.5874, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.6862170087976538, |
|
"grad_norm": 10.166460037231445, |
|
"learning_rate": 7.100572321811261e-05, |
|
"loss": 4.588, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.7228739002932552, |
|
"grad_norm": 6.965490341186523, |
|
"learning_rate": 7.037541285328895e-05, |
|
"loss": 4.6057, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.7595307917888563, |
|
"grad_norm": 7.495148181915283, |
|
"learning_rate": 6.974510248846532e-05, |
|
"loss": 4.5788, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.7961876832844574, |
|
"grad_norm": 8.987942695617676, |
|
"learning_rate": 6.911479212364168e-05, |
|
"loss": 4.5289, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.8328445747800588, |
|
"grad_norm": 6.871776580810547, |
|
"learning_rate": 6.848448175881804e-05, |
|
"loss": 4.5326, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.86950146627566, |
|
"grad_norm": 6.7767744064331055, |
|
"learning_rate": 6.78541713939944e-05, |
|
"loss": 4.4945, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.906158357771261, |
|
"grad_norm": 10.17478084564209, |
|
"learning_rate": 6.722386102917076e-05, |
|
"loss": 4.4387, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.9428152492668622, |
|
"grad_norm": 8.18852424621582, |
|
"learning_rate": 6.659355066434713e-05, |
|
"loss": 4.4369, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 1.9794721407624634, |
|
"grad_norm": 10.012794494628906, |
|
"learning_rate": 6.596324029952349e-05, |
|
"loss": 4.3956, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 4.3589653968811035, |
|
"eval_runtime": 52.0275, |
|
"eval_samples_per_second": 740.205, |
|
"eval_steps_per_second": 46.264, |
|
"step": 27280 |
|
}, |
|
{ |
|
"epoch": 2.0161290322580645, |
|
"grad_norm": 7.973904132843018, |
|
"learning_rate": 6.533292993469985e-05, |
|
"loss": 4.4279, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 2.0527859237536656, |
|
"grad_norm": 7.848902225494385, |
|
"learning_rate": 6.470261956987622e-05, |
|
"loss": 4.3801, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 2.089442815249267, |
|
"grad_norm": 7.455059051513672, |
|
"learning_rate": 6.407230920505257e-05, |
|
"loss": 4.3868, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 2.126099706744868, |
|
"grad_norm": 8.987055778503418, |
|
"learning_rate": 6.344199884022893e-05, |
|
"loss": 4.3451, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 2.162756598240469, |
|
"grad_norm": 7.139368534088135, |
|
"learning_rate": 6.28116884754053e-05, |
|
"loss": 4.3601, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 2.19941348973607, |
|
"grad_norm": 7.4019317626953125, |
|
"learning_rate": 6.218137811058166e-05, |
|
"loss": 4.3565, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 2.236070381231672, |
|
"grad_norm": 8.461463928222656, |
|
"learning_rate": 6.155106774575801e-05, |
|
"loss": 4.2893, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 2.2727272727272725, |
|
"grad_norm": 8.040552139282227, |
|
"learning_rate": 6.0920757380934376e-05, |
|
"loss": 4.2938, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 2.309384164222874, |
|
"grad_norm": 8.279471397399902, |
|
"learning_rate": 6.029044701611074e-05, |
|
"loss": 4.2922, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 2.346041055718475, |
|
"grad_norm": 7.4184699058532715, |
|
"learning_rate": 5.96601366512871e-05, |
|
"loss": 4.2655, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.3826979472140764, |
|
"grad_norm": 9.337892532348633, |
|
"learning_rate": 5.902982628646345e-05, |
|
"loss": 4.2428, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 2.4193548387096775, |
|
"grad_norm": 8.708230018615723, |
|
"learning_rate": 5.8399515921639816e-05, |
|
"loss": 4.2081, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 2.4560117302052786, |
|
"grad_norm": 7.0168938636779785, |
|
"learning_rate": 5.776920555681617e-05, |
|
"loss": 4.2089, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 2.4926686217008798, |
|
"grad_norm": 9.225556373596191, |
|
"learning_rate": 5.7138895191992536e-05, |
|
"loss": 4.226, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 2.529325513196481, |
|
"grad_norm": 7.094930648803711, |
|
"learning_rate": 5.65085848271689e-05, |
|
"loss": 4.1786, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 2.565982404692082, |
|
"grad_norm": 7.8396711349487305, |
|
"learning_rate": 5.587827446234526e-05, |
|
"loss": 4.2071, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 2.602639296187683, |
|
"grad_norm": 8.818264961242676, |
|
"learning_rate": 5.524796409752162e-05, |
|
"loss": 4.1563, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 2.6392961876832843, |
|
"grad_norm": 7.717746734619141, |
|
"learning_rate": 5.461765373269798e-05, |
|
"loss": 4.165, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 2.6759530791788855, |
|
"grad_norm": 6.068331718444824, |
|
"learning_rate": 5.3987343367874346e-05, |
|
"loss": 4.1111, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 2.712609970674487, |
|
"grad_norm": 8.796777725219727, |
|
"learning_rate": 5.33570330030507e-05, |
|
"loss": 4.1574, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 2.7492668621700878, |
|
"grad_norm": 8.890911102294922, |
|
"learning_rate": 5.2726722638227066e-05, |
|
"loss": 4.1259, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 2.7859237536656893, |
|
"grad_norm": 8.333062171936035, |
|
"learning_rate": 5.209641227340343e-05, |
|
"loss": 4.1107, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 2.8225806451612905, |
|
"grad_norm": 8.116930961608887, |
|
"learning_rate": 5.146610190857979e-05, |
|
"loss": 4.1155, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 2.8592375366568916, |
|
"grad_norm": 11.292548179626465, |
|
"learning_rate": 5.083579154375615e-05, |
|
"loss": 4.0813, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 2.8958944281524928, |
|
"grad_norm": 8.076095581054688, |
|
"learning_rate": 5.020548117893251e-05, |
|
"loss": 4.1091, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 2.932551319648094, |
|
"grad_norm": 6.882072925567627, |
|
"learning_rate": 4.957517081410887e-05, |
|
"loss": 4.082, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.969208211143695, |
|
"grad_norm": 8.762887954711914, |
|
"learning_rate": 4.894486044928523e-05, |
|
"loss": 4.0442, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 3.9874393939971924, |
|
"eval_runtime": 51.3732, |
|
"eval_samples_per_second": 749.632, |
|
"eval_steps_per_second": 46.853, |
|
"step": 40920 |
|
}, |
|
{ |
|
"epoch": 3.005865102639296, |
|
"grad_norm": 6.891060829162598, |
|
"learning_rate": 4.831455008446159e-05, |
|
"loss": 4.0661, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 3.0425219941348973, |
|
"grad_norm": 9.079097747802734, |
|
"learning_rate": 4.768423971963795e-05, |
|
"loss": 3.999, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 3.0791788856304985, |
|
"grad_norm": 9.445289611816406, |
|
"learning_rate": 4.7053929354814315e-05, |
|
"loss": 4.045, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 3.1158357771260996, |
|
"grad_norm": 7.992264270782471, |
|
"learning_rate": 4.642361898999067e-05, |
|
"loss": 4.0385, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 3.1524926686217007, |
|
"grad_norm": 8.101459503173828, |
|
"learning_rate": 4.5793308625167035e-05, |
|
"loss": 3.9818, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 3.189149560117302, |
|
"grad_norm": 7.8059492111206055, |
|
"learning_rate": 4.51629982603434e-05, |
|
"loss": 3.9905, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 3.225806451612903, |
|
"grad_norm": 10.241538047790527, |
|
"learning_rate": 4.4532687895519755e-05, |
|
"loss": 3.9456, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 3.2624633431085046, |
|
"grad_norm": 8.502156257629395, |
|
"learning_rate": 4.390237753069611e-05, |
|
"loss": 3.9347, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 3.2991202346041058, |
|
"grad_norm": 7.6501641273498535, |
|
"learning_rate": 4.3272067165872475e-05, |
|
"loss": 3.9469, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 3.335777126099707, |
|
"grad_norm": 8.457347869873047, |
|
"learning_rate": 4.264175680104884e-05, |
|
"loss": 3.9397, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 3.372434017595308, |
|
"grad_norm": 8.348608016967773, |
|
"learning_rate": 4.20114464362252e-05, |
|
"loss": 3.9413, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 3.409090909090909, |
|
"grad_norm": 10.004755020141602, |
|
"learning_rate": 4.138113607140156e-05, |
|
"loss": 3.9057, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 3.4457478005865103, |
|
"grad_norm": 7.643105983734131, |
|
"learning_rate": 4.075082570657792e-05, |
|
"loss": 3.9165, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 3.4824046920821115, |
|
"grad_norm": 9.214630126953125, |
|
"learning_rate": 4.0120515341754285e-05, |
|
"loss": 3.8871, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 3.5190615835777126, |
|
"grad_norm": 7.3644700050354, |
|
"learning_rate": 3.949020497693065e-05, |
|
"loss": 3.9115, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 3.5557184750733137, |
|
"grad_norm": 8.032934188842773, |
|
"learning_rate": 3.8859894612107e-05, |
|
"loss": 3.8775, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 3.592375366568915, |
|
"grad_norm": 8.48763370513916, |
|
"learning_rate": 3.822958424728336e-05, |
|
"loss": 3.8719, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 3.629032258064516, |
|
"grad_norm": 7.765323162078857, |
|
"learning_rate": 3.7599273882459725e-05, |
|
"loss": 3.8727, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 3.665689149560117, |
|
"grad_norm": 9.104835510253906, |
|
"learning_rate": 3.696896351763609e-05, |
|
"loss": 3.847, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 3.7023460410557183, |
|
"grad_norm": 7.518016815185547, |
|
"learning_rate": 3.6338653152812445e-05, |
|
"loss": 3.8503, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 3.73900293255132, |
|
"grad_norm": 9.076459884643555, |
|
"learning_rate": 3.570834278798881e-05, |
|
"loss": 3.8496, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 3.7756598240469206, |
|
"grad_norm": 9.251144409179688, |
|
"learning_rate": 3.507803242316517e-05, |
|
"loss": 3.8394, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 3.812316715542522, |
|
"grad_norm": 9.594878196716309, |
|
"learning_rate": 3.444772205834153e-05, |
|
"loss": 3.8104, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 3.8489736070381233, |
|
"grad_norm": 7.103595733642578, |
|
"learning_rate": 3.381741169351789e-05, |
|
"loss": 3.8329, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 3.8856304985337244, |
|
"grad_norm": 7.397706985473633, |
|
"learning_rate": 3.3187101328694254e-05, |
|
"loss": 3.8172, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 3.9222873900293256, |
|
"grad_norm": 8.312359809875488, |
|
"learning_rate": 3.255679096387061e-05, |
|
"loss": 3.8246, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 3.9589442815249267, |
|
"grad_norm": 7.361260414123535, |
|
"learning_rate": 3.192648059904697e-05, |
|
"loss": 3.8078, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 3.995601173020528, |
|
"grad_norm": 9.614564895629883, |
|
"learning_rate": 3.129617023422333e-05, |
|
"loss": 3.7865, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 3.7747433185577393, |
|
"eval_runtime": 51.2384, |
|
"eval_samples_per_second": 751.604, |
|
"eval_steps_per_second": 46.976, |
|
"step": 54560 |
|
}, |
|
{ |
|
"epoch": 4.032258064516129, |
|
"grad_norm": 9.029661178588867, |
|
"learning_rate": 3.0665859869399694e-05, |
|
"loss": 3.7978, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 4.068914956011731, |
|
"grad_norm": 9.358321189880371, |
|
"learning_rate": 3.0035549504576054e-05, |
|
"loss": 3.821, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 4.105571847507331, |
|
"grad_norm": 9.089591979980469, |
|
"learning_rate": 2.9405239139752418e-05, |
|
"loss": 3.7474, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 4.142228739002933, |
|
"grad_norm": 9.139451026916504, |
|
"learning_rate": 2.8774928774928778e-05, |
|
"loss": 3.7571, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 4.178885630498534, |
|
"grad_norm": 9.728781700134277, |
|
"learning_rate": 2.8144618410105137e-05, |
|
"loss": 3.8133, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 4.215542521994135, |
|
"grad_norm": 8.330384254455566, |
|
"learning_rate": 2.75143080452815e-05, |
|
"loss": 3.7412, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 4.252199413489736, |
|
"grad_norm": 9.436174392700195, |
|
"learning_rate": 2.6883997680457857e-05, |
|
"loss": 3.7338, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 4.288856304985337, |
|
"grad_norm": 12.170429229736328, |
|
"learning_rate": 2.6253687315634217e-05, |
|
"loss": 3.7215, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 4.325513196480938, |
|
"grad_norm": 8.019417762756348, |
|
"learning_rate": 2.562337695081058e-05, |
|
"loss": 3.7096, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 4.36217008797654, |
|
"grad_norm": 7.6509480476379395, |
|
"learning_rate": 2.499306658598694e-05, |
|
"loss": 3.7279, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 4.39882697947214, |
|
"grad_norm": 8.255581855773926, |
|
"learning_rate": 2.43627562211633e-05, |
|
"loss": 3.749, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 4.435483870967742, |
|
"grad_norm": 9.91395378112793, |
|
"learning_rate": 2.3732445856339664e-05, |
|
"loss": 3.6773, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 4.472140762463344, |
|
"grad_norm": 9.325223922729492, |
|
"learning_rate": 2.3102135491516024e-05, |
|
"loss": 3.7204, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 4.508797653958944, |
|
"grad_norm": 9.393867492675781, |
|
"learning_rate": 2.2471825126692384e-05, |
|
"loss": 3.6893, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 4.545454545454545, |
|
"grad_norm": 8.97889232635498, |
|
"learning_rate": 2.1841514761868744e-05, |
|
"loss": 3.6765, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 4.5821114369501466, |
|
"grad_norm": 7.196798324584961, |
|
"learning_rate": 2.1211204397045107e-05, |
|
"loss": 3.6679, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 4.618768328445748, |
|
"grad_norm": 9.22948932647705, |
|
"learning_rate": 2.0580894032221467e-05, |
|
"loss": 3.6591, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 4.655425219941349, |
|
"grad_norm": 8.780097961425781, |
|
"learning_rate": 1.9950583667397827e-05, |
|
"loss": 3.6391, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 4.69208211143695, |
|
"grad_norm": 10.625210762023926, |
|
"learning_rate": 1.9320273302574187e-05, |
|
"loss": 3.6819, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 4.728739002932551, |
|
"grad_norm": 7.736958980560303, |
|
"learning_rate": 1.868996293775055e-05, |
|
"loss": 3.6486, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 4.765395894428153, |
|
"grad_norm": 8.629966735839844, |
|
"learning_rate": 1.805965257292691e-05, |
|
"loss": 3.6629, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 4.802052785923753, |
|
"grad_norm": 7.5463762283325195, |
|
"learning_rate": 1.7429342208103274e-05, |
|
"loss": 3.6398, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 4.838709677419355, |
|
"grad_norm": 7.877403736114502, |
|
"learning_rate": 1.679903184327963e-05, |
|
"loss": 3.6593, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 4.875366568914956, |
|
"grad_norm": 8.22919750213623, |
|
"learning_rate": 1.6168721478455993e-05, |
|
"loss": 3.6139, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 4.912023460410557, |
|
"grad_norm": 9.631176948547363, |
|
"learning_rate": 1.5538411113632353e-05, |
|
"loss": 3.6452, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 4.948680351906159, |
|
"grad_norm": 8.381847381591797, |
|
"learning_rate": 1.4908100748808715e-05, |
|
"loss": 3.6273, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 4.9853372434017595, |
|
"grad_norm": 9.712096214294434, |
|
"learning_rate": 1.4277790383985077e-05, |
|
"loss": 3.5952, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 3.599792003631592, |
|
"eval_runtime": 51.3354, |
|
"eval_samples_per_second": 750.184, |
|
"eval_steps_per_second": 46.888, |
|
"step": 68200 |
|
}, |
|
{ |
|
"epoch": 5.021994134897361, |
|
"grad_norm": 9.814529418945312, |
|
"learning_rate": 1.3647480019161435e-05, |
|
"loss": 3.6124, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 5.058651026392962, |
|
"grad_norm": 9.232531547546387, |
|
"learning_rate": 1.3017169654337797e-05, |
|
"loss": 3.6207, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 5.095307917888563, |
|
"grad_norm": 9.92640495300293, |
|
"learning_rate": 1.2386859289514158e-05, |
|
"loss": 3.6176, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 5.131964809384164, |
|
"grad_norm": 9.3385591506958, |
|
"learning_rate": 1.1756548924690518e-05, |
|
"loss": 3.6151, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 5.168621700879766, |
|
"grad_norm": 8.617002487182617, |
|
"learning_rate": 1.112623855986688e-05, |
|
"loss": 3.621, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 5.205278592375366, |
|
"grad_norm": 8.497230529785156, |
|
"learning_rate": 1.049592819504324e-05, |
|
"loss": 3.6085, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 5.241935483870968, |
|
"grad_norm": 8.745635986328125, |
|
"learning_rate": 9.8656178302196e-06, |
|
"loss": 3.5636, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 5.278592375366569, |
|
"grad_norm": 7.370077610015869, |
|
"learning_rate": 9.235307465395961e-06, |
|
"loss": 3.5896, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 5.31524926686217, |
|
"grad_norm": 9.268915176391602, |
|
"learning_rate": 8.604997100572321e-06, |
|
"loss": 3.5571, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 5.351906158357771, |
|
"grad_norm": 8.88790225982666, |
|
"learning_rate": 7.974686735748683e-06, |
|
"loss": 3.5424, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 5.3885630498533725, |
|
"grad_norm": 6.400046348571777, |
|
"learning_rate": 7.344376370925044e-06, |
|
"loss": 3.5482, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 5.425219941348973, |
|
"grad_norm": 8.085587501525879, |
|
"learning_rate": 6.714066006101405e-06, |
|
"loss": 3.5732, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 5.461876832844575, |
|
"grad_norm": 8.79615306854248, |
|
"learning_rate": 6.083755641277765e-06, |
|
"loss": 3.5689, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 5.4985337243401755, |
|
"grad_norm": 9.852959632873535, |
|
"learning_rate": 5.453445276454126e-06, |
|
"loss": 3.542, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 5.535190615835777, |
|
"grad_norm": 9.936841011047363, |
|
"learning_rate": 4.823134911630487e-06, |
|
"loss": 3.5407, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 5.571847507331379, |
|
"grad_norm": 9.2828950881958, |
|
"learning_rate": 4.192824546806848e-06, |
|
"loss": 3.547, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 5.608504398826979, |
|
"grad_norm": 8.38242244720459, |
|
"learning_rate": 3.5625141819832086e-06, |
|
"loss": 3.5334, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 5.645161290322581, |
|
"grad_norm": 7.721536159515381, |
|
"learning_rate": 2.9322038171595694e-06, |
|
"loss": 3.5499, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 5.681818181818182, |
|
"grad_norm": 8.407939910888672, |
|
"learning_rate": 2.30189345233593e-06, |
|
"loss": 3.4661, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 5.718475073313783, |
|
"grad_norm": 9.764538764953613, |
|
"learning_rate": 1.671583087512291e-06, |
|
"loss": 3.5483, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 5.755131964809384, |
|
"grad_norm": 8.228140830993652, |
|
"learning_rate": 1.0412727226886518e-06, |
|
"loss": 3.5607, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 5.7917888563049855, |
|
"grad_norm": 11.577902793884277, |
|
"learning_rate": 4.1096235786501275e-07, |
|
"loss": 3.5589, |
|
"step": 79000 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 79326, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 6, |
|
"save_steps": 8192, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.5090344956154816e+16, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|