|
{ |
|
"best_metric": 0.19655267894268036, |
|
"best_model_checkpoint": "saves/Orca/SFT/checkpoint-2000", |
|
"epoch": 2.0, |
|
"eval_steps": 250, |
|
"global_step": 2062, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004849660523763337, |
|
"grad_norm": 1.569652795791626, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 1.763, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009699321047526674, |
|
"grad_norm": 1.4889137744903564, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 1.7673, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.014548981571290009, |
|
"grad_norm": 1.3860485553741455, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.7696, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.019398642095053348, |
|
"grad_norm": 1.4874159097671509, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.7723, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02424830261881668, |
|
"grad_norm": 1.4718151092529297, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 1.7295, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.029097963142580018, |
|
"grad_norm": 1.5262246131896973, |
|
"learning_rate": 5e-06, |
|
"loss": 1.7863, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03394762366634336, |
|
"grad_norm": 1.6720690727233887, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 1.709, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.038797284190106696, |
|
"grad_norm": 1.462561845779419, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.6664, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04364694471387003, |
|
"grad_norm": 1.6730310916900635, |
|
"learning_rate": 7.5e-06, |
|
"loss": 1.6421, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04849660523763336, |
|
"grad_norm": 1.538442850112915, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.6169, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0533462657613967, |
|
"grad_norm": 1.7747137546539307, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 1.5235, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.058195926285160036, |
|
"grad_norm": 1.600160837173462, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4392, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06304558680892337, |
|
"grad_norm": 1.3897379636764526, |
|
"learning_rate": 1.0833333333333334e-05, |
|
"loss": 1.3295, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06789524733268672, |
|
"grad_norm": 1.3479865789413452, |
|
"learning_rate": 1.1666666666666668e-05, |
|
"loss": 1.1758, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07274490785645005, |
|
"grad_norm": 1.0993175506591797, |
|
"learning_rate": 1.25e-05, |
|
"loss": 1.1012, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07759456838021339, |
|
"grad_norm": 1.0077180862426758, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.9917, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08244422890397672, |
|
"grad_norm": 0.8057171106338501, |
|
"learning_rate": 1.4166666666666668e-05, |
|
"loss": 0.8798, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08729388942774007, |
|
"grad_norm": 0.6777159571647644, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.7895, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0921435499515034, |
|
"grad_norm": 0.5001732707023621, |
|
"learning_rate": 1.5833333333333333e-05, |
|
"loss": 0.7364, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09699321047526673, |
|
"grad_norm": 0.4005896747112274, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.6936, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.10184287099903007, |
|
"grad_norm": 0.3276143968105316, |
|
"learning_rate": 1.75e-05, |
|
"loss": 0.6595, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1066925315227934, |
|
"grad_norm": 0.29122063517570496, |
|
"learning_rate": 1.8333333333333333e-05, |
|
"loss": 0.6316, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11154219204655674, |
|
"grad_norm": 0.2869010269641876, |
|
"learning_rate": 1.9166666666666667e-05, |
|
"loss": 0.6033, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.11639185257032007, |
|
"grad_norm": 0.25405701994895935, |
|
"learning_rate": 2e-05, |
|
"loss": 0.5716, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12124151309408342, |
|
"grad_norm": 0.24002915620803833, |
|
"learning_rate": 2.0833333333333336e-05, |
|
"loss": 0.5691, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.12609117361784675, |
|
"grad_norm": 0.2358996719121933, |
|
"learning_rate": 2.1666666666666667e-05, |
|
"loss": 0.554, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1309408341416101, |
|
"grad_norm": 0.21090734004974365, |
|
"learning_rate": 2.25e-05, |
|
"loss": 0.5194, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.13579049466537343, |
|
"grad_norm": 0.20737610757350922, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 0.4918, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.14064015518913675, |
|
"grad_norm": 0.1901807337999344, |
|
"learning_rate": 2.4166666666666667e-05, |
|
"loss": 0.4705, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.1454898157129001, |
|
"grad_norm": 0.19849984347820282, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.4595, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15033947623666344, |
|
"grad_norm": 0.18864648044109344, |
|
"learning_rate": 2.5833333333333336e-05, |
|
"loss": 0.4407, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.15518913676042678, |
|
"grad_norm": 0.17015086114406586, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.4214, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1600387972841901, |
|
"grad_norm": 0.17595945298671722, |
|
"learning_rate": 2.7500000000000004e-05, |
|
"loss": 0.4343, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.16488845780795344, |
|
"grad_norm": 0.16629475355148315, |
|
"learning_rate": 2.8333333333333335e-05, |
|
"loss": 0.4045, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1697381183317168, |
|
"grad_norm": 0.16392391920089722, |
|
"learning_rate": 2.916666666666667e-05, |
|
"loss": 0.3803, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.17458777885548013, |
|
"grad_norm": 0.1675371527671814, |
|
"learning_rate": 3e-05, |
|
"loss": 0.3762, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.17943743937924345, |
|
"grad_norm": 0.15912921726703644, |
|
"learning_rate": 3.0833333333333335e-05, |
|
"loss": 0.3698, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1842870999030068, |
|
"grad_norm": 0.1546890288591385, |
|
"learning_rate": 3.1666666666666666e-05, |
|
"loss": 0.359, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.18913676042677013, |
|
"grad_norm": 0.14289158582687378, |
|
"learning_rate": 3.2500000000000004e-05, |
|
"loss": 0.349, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.19398642095053345, |
|
"grad_norm": 0.13693265616893768, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.3145, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1988360814742968, |
|
"grad_norm": 0.15631736814975739, |
|
"learning_rate": 3.4166666666666666e-05, |
|
"loss": 0.3266, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.20368574199806014, |
|
"grad_norm": 0.13729828596115112, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.3368, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.20853540252182348, |
|
"grad_norm": 0.13479730486869812, |
|
"learning_rate": 3.5833333333333335e-05, |
|
"loss": 0.3025, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.2133850630455868, |
|
"grad_norm": 0.13599981367588043, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 0.3121, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.21823472356935014, |
|
"grad_norm": 0.1292022317647934, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.2982, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.22308438409311349, |
|
"grad_norm": 0.13855475187301636, |
|
"learning_rate": 3.8333333333333334e-05, |
|
"loss": 0.2709, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.22793404461687683, |
|
"grad_norm": 0.12373723834753036, |
|
"learning_rate": 3.9166666666666665e-05, |
|
"loss": 0.2903, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.23278370514064015, |
|
"grad_norm": 0.1207897961139679, |
|
"learning_rate": 4e-05, |
|
"loss": 0.3064, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2376333656644035, |
|
"grad_norm": 0.12190013378858566, |
|
"learning_rate": 4.0833333333333334e-05, |
|
"loss": 0.3189, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.24248302618816683, |
|
"grad_norm": 0.11036136001348495, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 0.2823, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.24248302618816683, |
|
"eval_loss": 0.2783246636390686, |
|
"eval_runtime": 48.1034, |
|
"eval_samples_per_second": 14.011, |
|
"eval_steps_per_second": 0.894, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.24733268671193018, |
|
"grad_norm": 0.11338551342487335, |
|
"learning_rate": 4.25e-05, |
|
"loss": 0.2865, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2521823472356935, |
|
"grad_norm": 0.1275234967470169, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 0.279, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.25703200775945684, |
|
"grad_norm": 0.1382416933774948, |
|
"learning_rate": 4.4166666666666665e-05, |
|
"loss": 0.2578, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2618816682832202, |
|
"grad_norm": 0.13280175626277924, |
|
"learning_rate": 4.5e-05, |
|
"loss": 0.3027, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2667313288069835, |
|
"grad_norm": 0.11413029581308365, |
|
"learning_rate": 4.5833333333333334e-05, |
|
"loss": 0.2647, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.27158098933074687, |
|
"grad_norm": 0.12923401594161987, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 0.2984, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.27643064985451016, |
|
"grad_norm": 0.11887092143297195, |
|
"learning_rate": 4.75e-05, |
|
"loss": 0.2683, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2812803103782735, |
|
"grad_norm": 0.12000833451747894, |
|
"learning_rate": 4.8333333333333334e-05, |
|
"loss": 0.2747, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.28612997090203685, |
|
"grad_norm": 0.11771322786808014, |
|
"learning_rate": 4.9166666666666665e-05, |
|
"loss": 0.233, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.2909796314258002, |
|
"grad_norm": 0.10835352540016174, |
|
"learning_rate": 5e-05, |
|
"loss": 0.2444, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.29582929194956353, |
|
"grad_norm": 0.12001065164804459, |
|
"learning_rate": 4.999900657500545e-05, |
|
"loss": 0.2234, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.3006789524733269, |
|
"grad_norm": 0.11877791583538055, |
|
"learning_rate": 4.9996026378973266e-05, |
|
"loss": 0.2467, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.3055286129970902, |
|
"grad_norm": 0.12458127737045288, |
|
"learning_rate": 4.999105964875153e-05, |
|
"loss": 0.2559, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.31037827352085356, |
|
"grad_norm": 0.11346752941608429, |
|
"learning_rate": 4.9984106779066174e-05, |
|
"loss": 0.2523, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.31522793404461685, |
|
"grad_norm": 0.13425442576408386, |
|
"learning_rate": 4.9975168322489554e-05, |
|
"loss": 0.2697, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.3200775945683802, |
|
"grad_norm": 0.11504397541284561, |
|
"learning_rate": 4.996424498939656e-05, |
|
"loss": 0.2553, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.32492725509214354, |
|
"grad_norm": 0.12158407270908356, |
|
"learning_rate": 4.9951337647908165e-05, |
|
"loss": 0.2477, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.3297769156159069, |
|
"grad_norm": 0.12656843662261963, |
|
"learning_rate": 4.9936447323822424e-05, |
|
"loss": 0.2328, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.33462657613967023, |
|
"grad_norm": 0.1163104996085167, |
|
"learning_rate": 4.991957520053294e-05, |
|
"loss": 0.2509, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.3394762366634336, |
|
"grad_norm": 0.12447489798069, |
|
"learning_rate": 4.990072261893484e-05, |
|
"loss": 0.2589, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3443258971871969, |
|
"grad_norm": 0.11267515271902084, |
|
"learning_rate": 4.9879891077318176e-05, |
|
"loss": 0.2446, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.34917555771096026, |
|
"grad_norm": 0.13075344264507294, |
|
"learning_rate": 4.985708223124888e-05, |
|
"loss": 0.2587, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.35402521823472355, |
|
"grad_norm": 0.10673461854457855, |
|
"learning_rate": 4.9832297893437186e-05, |
|
"loss": 0.2457, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.3588748787584869, |
|
"grad_norm": 0.12786507606506348, |
|
"learning_rate": 4.980554003359354e-05, |
|
"loss": 0.2369, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.36372453928225024, |
|
"grad_norm": 0.1437879055738449, |
|
"learning_rate": 4.9776810778272075e-05, |
|
"loss": 0.2736, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3685741998060136, |
|
"grad_norm": 0.12847022712230682, |
|
"learning_rate": 4.9746112410701625e-05, |
|
"loss": 0.2252, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3734238603297769, |
|
"grad_norm": 0.11457338184118271, |
|
"learning_rate": 4.9713447370604236e-05, |
|
"loss": 0.2508, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.37827352085354027, |
|
"grad_norm": 0.11042597144842148, |
|
"learning_rate": 4.967881825400129e-05, |
|
"loss": 0.2306, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3831231813773036, |
|
"grad_norm": 0.12592245638370514, |
|
"learning_rate": 4.964222781300719e-05, |
|
"loss": 0.2599, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.3879728419010669, |
|
"grad_norm": 0.11546192318201065, |
|
"learning_rate": 4.960367895561063e-05, |
|
"loss": 0.2225, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.39282250242483024, |
|
"grad_norm": 0.12539242208003998, |
|
"learning_rate": 4.956317474544348e-05, |
|
"loss": 0.2432, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.3976721629485936, |
|
"grad_norm": 0.10763729363679886, |
|
"learning_rate": 4.952071840153732e-05, |
|
"loss": 0.2307, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.40252182347235693, |
|
"grad_norm": 0.11994805932044983, |
|
"learning_rate": 4.947631329806761e-05, |
|
"loss": 0.2282, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.4073714839961203, |
|
"grad_norm": 0.13727760314941406, |
|
"learning_rate": 4.9429962964085517e-05, |
|
"loss": 0.2225, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.4122211445198836, |
|
"grad_norm": 0.1300915628671646, |
|
"learning_rate": 4.9381671083237476e-05, |
|
"loss": 0.2393, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.41707080504364696, |
|
"grad_norm": 0.12672416865825653, |
|
"learning_rate": 4.9331441493472395e-05, |
|
"loss": 0.2296, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.4219204655674103, |
|
"grad_norm": 0.13302814960479736, |
|
"learning_rate": 4.9279278186736674e-05, |
|
"loss": 0.2242, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.4267701260911736, |
|
"grad_norm": 0.12973704934120178, |
|
"learning_rate": 4.922518530865693e-05, |
|
"loss": 0.2329, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.43161978661493694, |
|
"grad_norm": 0.12950606644153595, |
|
"learning_rate": 4.916916715821053e-05, |
|
"loss": 0.2132, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4364694471387003, |
|
"grad_norm": 0.12280883640050888, |
|
"learning_rate": 4.911122818738394e-05, |
|
"loss": 0.2255, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.44131910766246363, |
|
"grad_norm": 0.12878793478012085, |
|
"learning_rate": 4.9051373000818887e-05, |
|
"loss": 0.2268, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.44616876818622697, |
|
"grad_norm": 0.12943603098392487, |
|
"learning_rate": 4.898960635544647e-05, |
|
"loss": 0.2145, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4510184287099903, |
|
"grad_norm": 0.13643978536128998, |
|
"learning_rate": 4.8925933160109016e-05, |
|
"loss": 0.2197, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.45586808923375366, |
|
"grad_norm": 0.1563604176044464, |
|
"learning_rate": 4.886035847517003e-05, |
|
"loss": 0.2449, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.46071774975751695, |
|
"grad_norm": 0.13257759809494019, |
|
"learning_rate": 4.879288751211199e-05, |
|
"loss": 0.2307, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4655674102812803, |
|
"grad_norm": 0.13240696489810944, |
|
"learning_rate": 4.8723525633122193e-05, |
|
"loss": 0.2136, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.47041707080504364, |
|
"grad_norm": 0.14693856239318848, |
|
"learning_rate": 4.8652278350666566e-05, |
|
"loss": 0.2388, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.475266731328807, |
|
"grad_norm": 0.14097219705581665, |
|
"learning_rate": 4.857915132705162e-05, |
|
"loss": 0.2178, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.4801163918525703, |
|
"grad_norm": 0.1185833290219307, |
|
"learning_rate": 4.850415037397439e-05, |
|
"loss": 0.2283, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.48496605237633367, |
|
"grad_norm": 0.1279655396938324, |
|
"learning_rate": 4.8427281452060594e-05, |
|
"loss": 0.2186, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.48496605237633367, |
|
"eval_loss": 0.2229408472776413, |
|
"eval_runtime": 48.0836, |
|
"eval_samples_per_second": 14.017, |
|
"eval_steps_per_second": 0.894, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.489815712900097, |
|
"grad_norm": 0.14021804928779602, |
|
"learning_rate": 4.834855067039088e-05, |
|
"loss": 0.227, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.49466537342386036, |
|
"grad_norm": 0.13102716207504272, |
|
"learning_rate": 4.826796428601538e-05, |
|
"loss": 0.2302, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.49951503394762364, |
|
"grad_norm": 0.11888754367828369, |
|
"learning_rate": 4.818552870345635e-05, |
|
"loss": 0.2245, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.504364694471387, |
|
"grad_norm": 0.13540378212928772, |
|
"learning_rate": 4.810125047419926e-05, |
|
"loss": 0.233, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5092143549951503, |
|
"grad_norm": 0.12920723855495453, |
|
"learning_rate": 4.8015136296172055e-05, |
|
"loss": 0.2225, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.5140640155189137, |
|
"grad_norm": 0.12302406132221222, |
|
"learning_rate": 4.792719301321289e-05, |
|
"loss": 0.2155, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.518913676042677, |
|
"grad_norm": 0.12846626341342926, |
|
"learning_rate": 4.783742761452619e-05, |
|
"loss": 0.2285, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5237633365664404, |
|
"grad_norm": 0.12521494925022125, |
|
"learning_rate": 4.7745847234127206e-05, |
|
"loss": 0.2112, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5286129970902037, |
|
"grad_norm": 0.12830866873264313, |
|
"learning_rate": 4.7652459150275055e-05, |
|
"loss": 0.2246, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.533462657613967, |
|
"grad_norm": 0.14525972306728363, |
|
"learning_rate": 4.7557270784894276e-05, |
|
"loss": 0.2293, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5383123181377304, |
|
"grad_norm": 0.14833694696426392, |
|
"learning_rate": 4.746028970298497e-05, |
|
"loss": 0.2366, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5431619786614937, |
|
"grad_norm": 0.13993489742279053, |
|
"learning_rate": 4.7361523612021616e-05, |
|
"loss": 0.2456, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5480116391852571, |
|
"grad_norm": 0.125143900513649, |
|
"learning_rate": 4.726098036134046e-05, |
|
"loss": 0.2365, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5528612997090203, |
|
"grad_norm": 0.1390179991722107, |
|
"learning_rate": 4.715866794151578e-05, |
|
"loss": 0.233, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5577109602327837, |
|
"grad_norm": 0.1502588987350464, |
|
"learning_rate": 4.705459448372478e-05, |
|
"loss": 0.234, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.562560620756547, |
|
"grad_norm": 0.1441139429807663, |
|
"learning_rate": 4.6948768259101394e-05, |
|
"loss": 0.2085, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5674102812803103, |
|
"grad_norm": 0.136013925075531, |
|
"learning_rate": 4.6841197678078965e-05, |
|
"loss": 0.1938, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5722599418040737, |
|
"grad_norm": 0.12127859890460968, |
|
"learning_rate": 4.6731891289721786e-05, |
|
"loss": 0.2009, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.577109602327837, |
|
"grad_norm": 0.14969481527805328, |
|
"learning_rate": 4.6620857781045715e-05, |
|
"loss": 0.2071, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5819592628516004, |
|
"grad_norm": 0.13548845052719116, |
|
"learning_rate": 4.65081059763278e-05, |
|
"loss": 0.2265, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5868089233753637, |
|
"grad_norm": 0.12766653299331665, |
|
"learning_rate": 4.639364483640489e-05, |
|
"loss": 0.2212, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.5916585838991271, |
|
"grad_norm": 0.13482384383678436, |
|
"learning_rate": 4.627748345796158e-05, |
|
"loss": 0.2145, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5965082444228904, |
|
"grad_norm": 0.13684765994548798, |
|
"learning_rate": 4.615963107280722e-05, |
|
"loss": 0.2097, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.6013579049466538, |
|
"grad_norm": 0.1494571417570114, |
|
"learning_rate": 4.604009704714219e-05, |
|
"loss": 0.2282, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.6062075654704171, |
|
"grad_norm": 0.14703498780727386, |
|
"learning_rate": 4.591889088081363e-05, |
|
"loss": 0.1963, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.6110572259941804, |
|
"grad_norm": 0.151000514626503, |
|
"learning_rate": 4.579602220656032e-05, |
|
"loss": 0.2267, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.6159068865179438, |
|
"grad_norm": 0.1505574882030487, |
|
"learning_rate": 4.567150078924723e-05, |
|
"loss": 0.2298, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.6207565470417071, |
|
"grad_norm": 0.1409265249967575, |
|
"learning_rate": 4.5545336525089444e-05, |
|
"loss": 0.2051, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6256062075654704, |
|
"grad_norm": 0.14167818427085876, |
|
"learning_rate": 4.5417539440865616e-05, |
|
"loss": 0.2302, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6304558680892337, |
|
"grad_norm": 0.14084312319755554, |
|
"learning_rate": 4.528811969312117e-05, |
|
"loss": 0.2164, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.635305528612997, |
|
"grad_norm": 0.14464280009269714, |
|
"learning_rate": 4.515708756736108e-05, |
|
"loss": 0.1948, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.6401551891367604, |
|
"grad_norm": 0.141952782869339, |
|
"learning_rate": 4.5024453477232444e-05, |
|
"loss": 0.2193, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6450048496605237, |
|
"grad_norm": 0.1446189135313034, |
|
"learning_rate": 4.4890227963696895e-05, |
|
"loss": 0.2141, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.6498545101842871, |
|
"grad_norm": 0.15042971074581146, |
|
"learning_rate": 4.4754421694192835e-05, |
|
"loss": 0.2125, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6547041707080504, |
|
"grad_norm": 0.15916042029857635, |
|
"learning_rate": 4.4617045461787665e-05, |
|
"loss": 0.2215, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.6595538312318138, |
|
"grad_norm": 0.15135568380355835, |
|
"learning_rate": 4.447811018432002e-05, |
|
"loss": 0.1974, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6644034917555771, |
|
"grad_norm": 0.12838397920131683, |
|
"learning_rate": 4.4337626903532076e-05, |
|
"loss": 0.2231, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.6692531522793405, |
|
"grad_norm": 0.145597442984581, |
|
"learning_rate": 4.419560678419203e-05, |
|
"loss": 0.2193, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6741028128031038, |
|
"grad_norm": 0.14090026915073395, |
|
"learning_rate": 4.40520611132068e-05, |
|
"loss": 0.2196, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.6789524733268671, |
|
"grad_norm": 0.1343528777360916, |
|
"learning_rate": 4.390700129872497e-05, |
|
"loss": 0.1998, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6838021338506305, |
|
"grad_norm": 0.14349782466888428, |
|
"learning_rate": 4.376043886923015e-05, |
|
"loss": 0.2073, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6886517943743938, |
|
"grad_norm": 0.15432004630565643, |
|
"learning_rate": 4.361238547262484e-05, |
|
"loss": 0.2184, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6935014548981572, |
|
"grad_norm": 0.13481660187244415, |
|
"learning_rate": 4.346285287530458e-05, |
|
"loss": 0.2248, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6983511154219205, |
|
"grad_norm": 0.1398806869983673, |
|
"learning_rate": 4.3311852961222966e-05, |
|
"loss": 0.2384, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.7032007759456838, |
|
"grad_norm": 0.13174192607402802, |
|
"learning_rate": 4.315939773094709e-05, |
|
"loss": 0.213, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.7080504364694471, |
|
"grad_norm": 0.12232716381549835, |
|
"learning_rate": 4.300549930070387e-05, |
|
"loss": 0.2283, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.7129000969932104, |
|
"grad_norm": 0.14730586111545563, |
|
"learning_rate": 4.2850169901417084e-05, |
|
"loss": 0.2098, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.7177497575169738, |
|
"grad_norm": 0.13813212513923645, |
|
"learning_rate": 4.269342187773532e-05, |
|
"loss": 0.2062, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.7225994180407371, |
|
"grad_norm": 0.13183937966823578, |
|
"learning_rate": 4.253526768705097e-05, |
|
"loss": 0.2034, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.7274490785645005, |
|
"grad_norm": 0.14278775453567505, |
|
"learning_rate": 4.237571989851011e-05, |
|
"loss": 0.2227, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.7274490785645005, |
|
"eval_loss": 0.21041834354400635, |
|
"eval_runtime": 48.0966, |
|
"eval_samples_per_second": 14.013, |
|
"eval_steps_per_second": 0.894, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.7322987390882638, |
|
"grad_norm": 0.16159804165363312, |
|
"learning_rate": 4.221479119201362e-05, |
|
"loss": 0.2191, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.7371483996120272, |
|
"grad_norm": 0.13423971831798553, |
|
"learning_rate": 4.205249435720943e-05, |
|
"loss": 0.1993, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.7419980601357905, |
|
"grad_norm": 0.14568765461444855, |
|
"learning_rate": 4.1888842292476126e-05, |
|
"loss": 0.2023, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.7468477206595538, |
|
"grad_norm": 0.13105235993862152, |
|
"learning_rate": 4.172384800389784e-05, |
|
"loss": 0.2048, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7516973811833172, |
|
"grad_norm": 0.14081120491027832, |
|
"learning_rate": 4.155752460423059e-05, |
|
"loss": 0.2078, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.7565470417070805, |
|
"grad_norm": 0.13213010132312775, |
|
"learning_rate": 4.138988531186016e-05, |
|
"loss": 0.2183, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7613967022308439, |
|
"grad_norm": 0.1357748806476593, |
|
"learning_rate": 4.1220943449751606e-05, |
|
"loss": 0.2016, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.7662463627546072, |
|
"grad_norm": 0.13532240688800812, |
|
"learning_rate": 4.105071244439039e-05, |
|
"loss": 0.2113, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7710960232783706, |
|
"grad_norm": 0.14828477799892426, |
|
"learning_rate": 4.0879205824715384e-05, |
|
"loss": 0.2159, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.7759456838021338, |
|
"grad_norm": 0.1488686501979828, |
|
"learning_rate": 4.070643722104358e-05, |
|
"loss": 0.2124, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.7807953443258971, |
|
"grad_norm": 0.1575794368982315, |
|
"learning_rate": 4.053242036398692e-05, |
|
"loss": 0.2065, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.7856450048496605, |
|
"grad_norm": 0.13456198573112488, |
|
"learning_rate": 4.035716908336102e-05, |
|
"loss": 0.201, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.7904946653734238, |
|
"grad_norm": 0.13958740234375, |
|
"learning_rate": 4.01806973070861e-05, |
|
"loss": 0.2093, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.7953443258971872, |
|
"grad_norm": 0.14714553952217102, |
|
"learning_rate": 4.000301906008001e-05, |
|
"loss": 0.2082, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.8001939864209505, |
|
"grad_norm": 0.1589927226305008, |
|
"learning_rate": 3.9824148463143685e-05, |
|
"loss": 0.2279, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.8050436469447139, |
|
"grad_norm": 0.15132376551628113, |
|
"learning_rate": 3.964409973183886e-05, |
|
"loss": 0.204, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.8098933074684772, |
|
"grad_norm": 0.156170055270195, |
|
"learning_rate": 3.946288717535833e-05, |
|
"loss": 0.2051, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.8147429679922406, |
|
"grad_norm": 0.12180455029010773, |
|
"learning_rate": 3.928052519538874e-05, |
|
"loss": 0.2064, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.8195926285160039, |
|
"grad_norm": 0.1337326467037201, |
|
"learning_rate": 3.9097028284966e-05, |
|
"loss": 0.2025, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.8244422890397672, |
|
"grad_norm": 0.13768617808818817, |
|
"learning_rate": 3.891241102732348e-05, |
|
"loss": 0.2037, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.8292919495635306, |
|
"grad_norm": 0.15049096941947937, |
|
"learning_rate": 3.872668809473304e-05, |
|
"loss": 0.2028, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.8341416100872939, |
|
"grad_norm": 0.14802470803260803, |
|
"learning_rate": 3.8539874247338956e-05, |
|
"loss": 0.2137, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.8389912706110573, |
|
"grad_norm": 0.13950683176517487, |
|
"learning_rate": 3.835198433198484e-05, |
|
"loss": 0.2375, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.8438409311348206, |
|
"grad_norm": 0.13893653452396393, |
|
"learning_rate": 3.816303328103374e-05, |
|
"loss": 0.204, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.8486905916585838, |
|
"grad_norm": 0.1808776557445526, |
|
"learning_rate": 3.7973036111181405e-05, |
|
"loss": 0.2043, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.8535402521823472, |
|
"grad_norm": 0.1470966786146164, |
|
"learning_rate": 3.7782007922262827e-05, |
|
"loss": 0.2117, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.8583899127061105, |
|
"grad_norm": 0.15209127962589264, |
|
"learning_rate": 3.758996389605222e-05, |
|
"loss": 0.2187, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.8632395732298739, |
|
"grad_norm": 0.1539296954870224, |
|
"learning_rate": 3.739691929505641e-05, |
|
"loss": 0.2062, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8680892337536372, |
|
"grad_norm": 0.13484741747379303, |
|
"learning_rate": 3.720288946130197e-05, |
|
"loss": 0.1985, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.8729388942774006, |
|
"grad_norm": 0.15806835889816284, |
|
"learning_rate": 3.7007889815115796e-05, |
|
"loss": 0.2144, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8777885548011639, |
|
"grad_norm": 0.16019117832183838, |
|
"learning_rate": 3.681193585389969e-05, |
|
"loss": 0.1912, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.8826382153249273, |
|
"grad_norm": 0.1545192003250122, |
|
"learning_rate": 3.6615043150898674e-05, |
|
"loss": 0.2093, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8874878758486906, |
|
"grad_norm": 0.1420498490333557, |
|
"learning_rate": 3.641722735396336e-05, |
|
"loss": 0.205, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.8923375363724539, |
|
"grad_norm": 0.15403462946414948, |
|
"learning_rate": 3.6218504184306295e-05, |
|
"loss": 0.1907, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.8971871968962173, |
|
"grad_norm": 0.1699294149875641, |
|
"learning_rate": 3.6018889435252574e-05, |
|
"loss": 0.2399, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.9020368574199806, |
|
"grad_norm": 0.1434181183576584, |
|
"learning_rate": 3.581839897098468e-05, |
|
"loss": 0.2033, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.906886517943744, |
|
"grad_norm": 0.14518964290618896, |
|
"learning_rate": 3.561704872528169e-05, |
|
"loss": 0.1908, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.9117361784675073, |
|
"grad_norm": 0.1436786949634552, |
|
"learning_rate": 3.5414854700252945e-05, |
|
"loss": 0.202, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.9165858389912707, |
|
"grad_norm": 0.1517830491065979, |
|
"learning_rate": 3.521183296506628e-05, |
|
"loss": 0.2088, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.9214354995150339, |
|
"grad_norm": 0.1423126757144928, |
|
"learning_rate": 3.5007999654671004e-05, |
|
"loss": 0.2026, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.9262851600387972, |
|
"grad_norm": 0.13826879858970642, |
|
"learning_rate": 3.4803370968515535e-05, |
|
"loss": 0.2048, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.9311348205625606, |
|
"grad_norm": 0.15552102029323578, |
|
"learning_rate": 3.4597963169259994e-05, |
|
"loss": 0.2074, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.9359844810863239, |
|
"grad_norm": 0.1478404849767685, |
|
"learning_rate": 3.4391792581483724e-05, |
|
"loss": 0.2, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.9408341416100873, |
|
"grad_norm": 0.15055781602859497, |
|
"learning_rate": 3.4184875590387924e-05, |
|
"loss": 0.2032, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.9456838021338506, |
|
"grad_norm": 0.14816974103450775, |
|
"learning_rate": 3.397722864049347e-05, |
|
"loss": 0.2095, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.950533462657614, |
|
"grad_norm": 0.15103183686733246, |
|
"learning_rate": 3.376886823433395e-05, |
|
"loss": 0.2133, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.9553831231813773, |
|
"grad_norm": 0.15531527996063232, |
|
"learning_rate": 3.35598109311442e-05, |
|
"loss": 0.2125, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.9602327837051406, |
|
"grad_norm": 0.13943400979042053, |
|
"learning_rate": 3.335007334554423e-05, |
|
"loss": 0.1981, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.965082444228904, |
|
"grad_norm": 0.14304272830486298, |
|
"learning_rate": 3.3139672146218846e-05, |
|
"loss": 0.2037, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.9699321047526673, |
|
"grad_norm": 0.16258351504802704, |
|
"learning_rate": 3.2928624054592873e-05, |
|
"loss": 0.1965, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.9699321047526673, |
|
"eval_loss": 0.20380738377571106, |
|
"eval_runtime": 48.08, |
|
"eval_samples_per_second": 14.018, |
|
"eval_steps_per_second": 0.894, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.9747817652764307, |
|
"grad_norm": 0.14396609365940094, |
|
"learning_rate": 3.2716945843502224e-05, |
|
"loss": 0.2143, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.979631425800194, |
|
"grad_norm": 0.1358533650636673, |
|
"learning_rate": 3.2504654335860986e-05, |
|
"loss": 0.2189, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.9844810863239574, |
|
"grad_norm": 0.18899835646152496, |
|
"learning_rate": 3.229176640332433e-05, |
|
"loss": 0.2089, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.9893307468477207, |
|
"grad_norm": 0.14681367576122284, |
|
"learning_rate": 3.2078298964947715e-05, |
|
"loss": 0.1817, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.9941804073714839, |
|
"grad_norm": 0.13952337205410004, |
|
"learning_rate": 3.186426898584225e-05, |
|
"loss": 0.2128, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.9990300678952473, |
|
"grad_norm": 0.14799629151821136, |
|
"learning_rate": 3.164969347582639e-05, |
|
"loss": 0.2014, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.0038797284190106, |
|
"grad_norm": 0.16220593452453613, |
|
"learning_rate": 3.143458948807414e-05, |
|
"loss": 0.22, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.008729388942774, |
|
"grad_norm": 0.1490844190120697, |
|
"learning_rate": 3.1218974117759714e-05, |
|
"loss": 0.2157, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.0135790494665373, |
|
"grad_norm": 0.1579083800315857, |
|
"learning_rate": 3.100286450069897e-05, |
|
"loss": 0.1989, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.0184287099903007, |
|
"grad_norm": 0.16225625574588776, |
|
"learning_rate": 3.0786277811987505e-05, |
|
"loss": 0.1976, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.023278370514064, |
|
"grad_norm": 0.14916008710861206, |
|
"learning_rate": 3.0569231264635756e-05, |
|
"loss": 0.2004, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.0281280310378274, |
|
"grad_norm": 0.14502666890621185, |
|
"learning_rate": 3.0351742108200888e-05, |
|
"loss": 0.2043, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.0329776915615907, |
|
"grad_norm": 0.13275770843029022, |
|
"learning_rate": 3.0133827627416057e-05, |
|
"loss": 0.1969, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.037827352085354, |
|
"grad_norm": 0.14670969545841217, |
|
"learning_rate": 2.9915505140816597e-05, |
|
"loss": 0.2154, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.0426770126091174, |
|
"grad_norm": 0.15070156753063202, |
|
"learning_rate": 2.9696791999363727e-05, |
|
"loss": 0.2084, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.0475266731328807, |
|
"grad_norm": 0.14457589387893677, |
|
"learning_rate": 2.9477705585065546e-05, |
|
"loss": 0.216, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.052376333656644, |
|
"grad_norm": 0.1555938869714737, |
|
"learning_rate": 2.925826330959564e-05, |
|
"loss": 0.2024, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.0572259941804074, |
|
"grad_norm": 0.15301556885242462, |
|
"learning_rate": 2.9038482612909335e-05, |
|
"loss": 0.2119, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.0620756547041708, |
|
"grad_norm": 0.14304795861244202, |
|
"learning_rate": 2.8818380961857615e-05, |
|
"loss": 0.1878, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.066925315227934, |
|
"grad_norm": 0.1455308496952057, |
|
"learning_rate": 2.8597975848798992e-05, |
|
"loss": 0.2073, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.0717749757516974, |
|
"grad_norm": 0.15965287387371063, |
|
"learning_rate": 2.837728479020933e-05, |
|
"loss": 0.2036, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.0766246362754608, |
|
"grad_norm": 0.1591757833957672, |
|
"learning_rate": 2.8156325325289717e-05, |
|
"loss": 0.1807, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.0814742967992241, |
|
"grad_norm": 0.14703893661499023, |
|
"learning_rate": 2.7935115014572593e-05, |
|
"loss": 0.2166, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.0863239573229875, |
|
"grad_norm": 0.14766725897789001, |
|
"learning_rate": 2.7713671438526073e-05, |
|
"loss": 0.2137, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.0911736178467508, |
|
"grad_norm": 0.16928435862064362, |
|
"learning_rate": 2.7492012196156835e-05, |
|
"loss": 0.2041, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.0960232783705142, |
|
"grad_norm": 0.16884630918502808, |
|
"learning_rate": 2.7270154903611393e-05, |
|
"loss": 0.1964, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.1008729388942773, |
|
"grad_norm": 0.15490330755710602, |
|
"learning_rate": 2.7048117192776146e-05, |
|
"loss": 0.2096, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.1057225994180406, |
|
"grad_norm": 0.15356089174747467, |
|
"learning_rate": 2.6825916709876013e-05, |
|
"loss": 0.2006, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.110572259941804, |
|
"grad_norm": 0.14378608763217926, |
|
"learning_rate": 2.6603571114072066e-05, |
|
"loss": 0.2045, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.1154219204655673, |
|
"grad_norm": 0.1541430503129959, |
|
"learning_rate": 2.638109807605811e-05, |
|
"loss": 0.2145, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.1202715809893307, |
|
"grad_norm": 0.14816121757030487, |
|
"learning_rate": 2.6158515276656253e-05, |
|
"loss": 0.197, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.125121241513094, |
|
"grad_norm": 0.15891754627227783, |
|
"learning_rate": 2.5935840405411804e-05, |
|
"loss": 0.2186, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.1299709020368573, |
|
"grad_norm": 0.16058792173862457, |
|
"learning_rate": 2.571309115918738e-05, |
|
"loss": 0.2147, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.1348205625606207, |
|
"grad_norm": 0.14306621253490448, |
|
"learning_rate": 2.549028524075649e-05, |
|
"loss": 0.2056, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.139670223084384, |
|
"grad_norm": 0.1553443819284439, |
|
"learning_rate": 2.5267440357396588e-05, |
|
"loss": 0.1925, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.1445198836081474, |
|
"grad_norm": 0.15235552191734314, |
|
"learning_rate": 2.5044574219481832e-05, |
|
"loss": 0.2173, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.1493695441319107, |
|
"grad_norm": 0.13989904522895813, |
|
"learning_rate": 2.4821704539075577e-05, |
|
"loss": 0.1786, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.154219204655674, |
|
"grad_norm": 0.1479206681251526, |
|
"learning_rate": 2.4598849028522713e-05, |
|
"loss": 0.2044, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.1590688651794374, |
|
"grad_norm": 0.17453940212726593, |
|
"learning_rate": 2.4376025399041985e-05, |
|
"loss": 0.2086, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.1639185257032008, |
|
"grad_norm": 0.15538154542446136, |
|
"learning_rate": 2.4153251359318404e-05, |
|
"loss": 0.1891, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.168768186226964, |
|
"grad_norm": 0.14030437171459198, |
|
"learning_rate": 2.3930544614095944e-05, |
|
"loss": 0.1813, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.1736178467507274, |
|
"grad_norm": 0.15048657357692719, |
|
"learning_rate": 2.370792286277035e-05, |
|
"loss": 0.2068, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.1784675072744908, |
|
"grad_norm": 0.14109192788600922, |
|
"learning_rate": 2.3485403797982586e-05, |
|
"loss": 0.1992, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.1833171677982541, |
|
"grad_norm": 0.1549970656633377, |
|
"learning_rate": 2.326300510421273e-05, |
|
"loss": 0.2067, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.1881668283220175, |
|
"grad_norm": 0.15575067698955536, |
|
"learning_rate": 2.3040744456374462e-05, |
|
"loss": 0.1806, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.1930164888457808, |
|
"grad_norm": 0.14550498127937317, |
|
"learning_rate": 2.2818639518410414e-05, |
|
"loss": 0.2088, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.1978661493695442, |
|
"grad_norm": 0.15763843059539795, |
|
"learning_rate": 2.259670794188833e-05, |
|
"loss": 0.2121, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.2027158098933075, |
|
"grad_norm": 0.17174339294433594, |
|
"learning_rate": 2.2374967364598222e-05, |
|
"loss": 0.1971, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.2075654704170709, |
|
"grad_norm": 0.13719911873340607, |
|
"learning_rate": 2.2153435409150635e-05, |
|
"loss": 0.1851, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.2124151309408342, |
|
"grad_norm": 0.1528056263923645, |
|
"learning_rate": 2.1932129681576105e-05, |
|
"loss": 0.2001, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.2124151309408342, |
|
"eval_loss": 0.20025967061519623, |
|
"eval_runtime": 48.0897, |
|
"eval_samples_per_second": 14.015, |
|
"eval_steps_per_second": 0.894, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.2172647914645975, |
|
"grad_norm": 0.16801750659942627, |
|
"learning_rate": 2.1711067769925914e-05, |
|
"loss": 0.2184, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.2221144519883609, |
|
"grad_norm": 0.1539103388786316, |
|
"learning_rate": 2.149026724287435e-05, |
|
"loss": 0.2176, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.2269641125121242, |
|
"grad_norm": 0.15657076239585876, |
|
"learning_rate": 2.126974564832239e-05, |
|
"loss": 0.1989, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.2318137730358876, |
|
"grad_norm": 0.15106332302093506, |
|
"learning_rate": 2.104952051200315e-05, |
|
"loss": 0.2045, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.236663433559651, |
|
"grad_norm": 0.14375294744968414, |
|
"learning_rate": 2.082960933608903e-05, |
|
"loss": 0.1917, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.2415130940834143, |
|
"grad_norm": 0.14703327417373657, |
|
"learning_rate": 2.061002959780069e-05, |
|
"loss": 0.1977, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.2463627546071776, |
|
"grad_norm": 0.1680404096841812, |
|
"learning_rate": 2.03907987480182e-05, |
|
"loss": 0.2127, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.251212415130941, |
|
"grad_norm": 0.14668302237987518, |
|
"learning_rate": 2.0171934209893994e-05, |
|
"loss": 0.2012, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.2560620756547043, |
|
"grad_norm": 0.14603851735591888, |
|
"learning_rate": 1.9953453377468282e-05, |
|
"loss": 0.1996, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.2609117361784676, |
|
"grad_norm": 0.1648787260055542, |
|
"learning_rate": 1.973537361428665e-05, |
|
"loss": 0.2164, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.265761396702231, |
|
"grad_norm": 0.16897331178188324, |
|
"learning_rate": 1.95177122520201e-05, |
|
"loss": 0.1999, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.270611057225994, |
|
"grad_norm": 0.16626134514808655, |
|
"learning_rate": 1.9300486589087645e-05, |
|
"loss": 0.1934, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.2754607177497574, |
|
"grad_norm": 0.15668809413909912, |
|
"learning_rate": 1.908371388928152e-05, |
|
"loss": 0.2078, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.2803103782735208, |
|
"grad_norm": 0.14479213953018188, |
|
"learning_rate": 1.8867411380395177e-05, |
|
"loss": 0.198, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.2851600387972841, |
|
"grad_norm": 0.1500495970249176, |
|
"learning_rate": 1.865159625285412e-05, |
|
"loss": 0.2103, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.2900096993210475, |
|
"grad_norm": 0.1683105230331421, |
|
"learning_rate": 1.8436285658349694e-05, |
|
"loss": 0.2045, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.2948593598448108, |
|
"grad_norm": 0.16197173297405243, |
|
"learning_rate": 1.822149670847599e-05, |
|
"loss": 0.189, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.2997090203685742, |
|
"grad_norm": 0.1493465155363083, |
|
"learning_rate": 1.800724647336992e-05, |
|
"loss": 0.1998, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.3045586808923375, |
|
"grad_norm": 0.15609502792358398, |
|
"learning_rate": 1.779355198035458e-05, |
|
"loss": 0.1944, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.3094083414161009, |
|
"grad_norm": 0.16641376912593842, |
|
"learning_rate": 1.758043021258601e-05, |
|
"loss": 0.2032, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.3142580019398642, |
|
"grad_norm": 0.16449637711048126, |
|
"learning_rate": 1.7367898107703497e-05, |
|
"loss": 0.2099, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.3191076624636275, |
|
"grad_norm": 0.14618897438049316, |
|
"learning_rate": 1.7155972556483424e-05, |
|
"loss": 0.1924, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.3239573229873909, |
|
"grad_norm": 0.1407703310251236, |
|
"learning_rate": 1.6944670401496997e-05, |
|
"loss": 0.1909, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.3288069835111542, |
|
"grad_norm": 0.17186439037322998, |
|
"learning_rate": 1.673400843577156e-05, |
|
"loss": 0.2035, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.3336566440349176, |
|
"grad_norm": 0.14018838107585907, |
|
"learning_rate": 1.652400340145609e-05, |
|
"loss": 0.1949, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.338506304558681, |
|
"grad_norm": 0.14712657034397125, |
|
"learning_rate": 1.6314671988490603e-05, |
|
"loss": 0.1878, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.3433559650824443, |
|
"grad_norm": 0.15526027977466583, |
|
"learning_rate": 1.610603083327971e-05, |
|
"loss": 0.1971, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.3482056256062076, |
|
"grad_norm": 0.14195865392684937, |
|
"learning_rate": 1.5898096517370498e-05, |
|
"loss": 0.196, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.353055286129971, |
|
"grad_norm": 0.16893810033798218, |
|
"learning_rate": 1.5690885566134696e-05, |
|
"loss": 0.1897, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.3579049466537343, |
|
"grad_norm": 0.16904611885547638, |
|
"learning_rate": 1.5484414447455348e-05, |
|
"loss": 0.1959, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.3627546071774976, |
|
"grad_norm": 0.16385570168495178, |
|
"learning_rate": 1.5278699570418056e-05, |
|
"loss": 0.1971, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.367604267701261, |
|
"grad_norm": 0.16677437722682953, |
|
"learning_rate": 1.5073757284006868e-05, |
|
"loss": 0.1823, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.3724539282250243, |
|
"grad_norm": 0.1567675769329071, |
|
"learning_rate": 1.4869603875804955e-05, |
|
"loss": 0.1941, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.3773035887487877, |
|
"grad_norm": 0.1708156019449234, |
|
"learning_rate": 1.4666255570700202e-05, |
|
"loss": 0.1997, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.3821532492725508, |
|
"grad_norm": 0.14904044568538666, |
|
"learning_rate": 1.4463728529595719e-05, |
|
"loss": 0.1885, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.3870029097963141, |
|
"grad_norm": 0.15045498311519623, |
|
"learning_rate": 1.4262038848125475e-05, |
|
"loss": 0.2168, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.3918525703200775, |
|
"grad_norm": 0.1507643461227417, |
|
"learning_rate": 1.4061202555375142e-05, |
|
"loss": 0.1959, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.3967022308438408, |
|
"grad_norm": 0.15534117817878723, |
|
"learning_rate": 1.3861235612608142e-05, |
|
"loss": 0.1976, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.4015518913676042, |
|
"grad_norm": 0.14476318657398224, |
|
"learning_rate": 1.366215391199721e-05, |
|
"loss": 0.1878, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.4064015518913675, |
|
"grad_norm": 0.144943967461586, |
|
"learning_rate": 1.346397327536134e-05, |
|
"loss": 0.1977, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.4112512124151309, |
|
"grad_norm": 0.1705586314201355, |
|
"learning_rate": 1.3266709452908348e-05, |
|
"loss": 0.2113, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.4161008729388942, |
|
"grad_norm": 0.14363010227680206, |
|
"learning_rate": 1.3070378121983174e-05, |
|
"loss": 0.1902, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.4209505334626575, |
|
"grad_norm": 0.17444470524787903, |
|
"learning_rate": 1.2874994885821933e-05, |
|
"loss": 0.1994, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.4258001939864209, |
|
"grad_norm": 0.15788498520851135, |
|
"learning_rate": 1.268057527231186e-05, |
|
"loss": 0.1999, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.4306498545101842, |
|
"grad_norm": 0.1490585207939148, |
|
"learning_rate": 1.2487134732757219e-05, |
|
"loss": 0.1904, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.4354995150339476, |
|
"grad_norm": 0.1542252153158188, |
|
"learning_rate": 1.2294688640651355e-05, |
|
"loss": 0.2078, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.440349175557711, |
|
"grad_norm": 0.1651746928691864, |
|
"learning_rate": 1.2103252290454942e-05, |
|
"loss": 0.1843, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.4451988360814743, |
|
"grad_norm": 0.15188032388687134, |
|
"learning_rate": 1.191284089638037e-05, |
|
"loss": 0.2107, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.4500484966052376, |
|
"grad_norm": 0.15589340031147003, |
|
"learning_rate": 1.1723469591182675e-05, |
|
"loss": 0.1961, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.454898157129001, |
|
"grad_norm": 0.1623036116361618, |
|
"learning_rate": 1.1535153424956922e-05, |
|
"loss": 0.2087, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.454898157129001, |
|
"eval_loss": 0.197998046875, |
|
"eval_runtime": 48.0919, |
|
"eval_samples_per_second": 14.015, |
|
"eval_steps_per_second": 0.894, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.4597478176527643, |
|
"grad_norm": 0.1391475796699524, |
|
"learning_rate": 1.134790736394199e-05, |
|
"loss": 0.1829, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.4645974781765276, |
|
"grad_norm": 0.15713264048099518, |
|
"learning_rate": 1.116174628933127e-05, |
|
"loss": 0.1862, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.469447138700291, |
|
"grad_norm": 0.14922243356704712, |
|
"learning_rate": 1.0976684996089911e-05, |
|
"loss": 0.2022, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.4742967992240543, |
|
"grad_norm": 0.15391802787780762, |
|
"learning_rate": 1.0792738191779055e-05, |
|
"loss": 0.199, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.4791464597478177, |
|
"grad_norm": 0.16635461151599884, |
|
"learning_rate": 1.0609920495386963e-05, |
|
"loss": 0.183, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.483996120271581, |
|
"grad_norm": 0.14732369780540466, |
|
"learning_rate": 1.0428246436167136e-05, |
|
"loss": 0.1946, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.4888457807953444, |
|
"grad_norm": 0.15745453536510468, |
|
"learning_rate": 1.0247730452483683e-05, |
|
"loss": 0.1999, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.4936954413191077, |
|
"grad_norm": 0.1535121500492096, |
|
"learning_rate": 1.0068386890663828e-05, |
|
"loss": 0.2131, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.498545101842871, |
|
"grad_norm": 0.16908058524131775, |
|
"learning_rate": 9.890230003857704e-06, |
|
"loss": 0.1893, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.5033947623666344, |
|
"grad_norm": 0.16990558803081512, |
|
"learning_rate": 9.71327395090566e-06, |
|
"loss": 0.2033, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.5082444228903977, |
|
"grad_norm": 0.1563047617673874, |
|
"learning_rate": 9.537532795212988e-06, |
|
"loss": 0.1975, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.513094083414161, |
|
"grad_norm": 0.16167347133159637, |
|
"learning_rate": 9.363020503632197e-06, |
|
"loss": 0.1923, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.5179437439379244, |
|
"grad_norm": 0.1589316427707672, |
|
"learning_rate": 9.189750945353082e-06, |
|
"loss": 0.2131, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.5227934044616878, |
|
"grad_norm": 0.15486431121826172, |
|
"learning_rate": 9.01773789080045e-06, |
|
"loss": 0.1961, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.527643064985451, |
|
"grad_norm": 0.17133159935474396, |
|
"learning_rate": 8.846995010539707e-06, |
|
"loss": 0.179, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.5324927255092144, |
|
"grad_norm": 0.1626712530851364, |
|
"learning_rate": 8.67753587419046e-06, |
|
"loss": 0.1977, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.5373423860329778, |
|
"grad_norm": 0.15872539579868317, |
|
"learning_rate": 8.509373949348012e-06, |
|
"loss": 0.1981, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.5421920465567411, |
|
"grad_norm": 0.15292125940322876, |
|
"learning_rate": 8.34252260051314e-06, |
|
"loss": 0.1971, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.5470417070805045, |
|
"grad_norm": 0.15812063217163086, |
|
"learning_rate": 8.176995088029852e-06, |
|
"loss": 0.1964, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.5518913676042678, |
|
"grad_norm": 0.15867389738559723, |
|
"learning_rate": 8.01280456703159e-06, |
|
"loss": 0.1971, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.5567410281280312, |
|
"grad_norm": 0.1688418984413147, |
|
"learning_rate": 7.84996408639576e-06, |
|
"loss": 0.2117, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.5615906886517945, |
|
"grad_norm": 0.16065895557403564, |
|
"learning_rate": 7.688486587706661e-06, |
|
"loss": 0.199, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.5664403491755579, |
|
"grad_norm": 0.15269528329372406, |
|
"learning_rate": 7.528384904226932e-06, |
|
"loss": 0.1894, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.5712900096993212, |
|
"grad_norm": 0.16433826088905334, |
|
"learning_rate": 7.369671759877705e-06, |
|
"loss": 0.2061, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.5761396702230845, |
|
"grad_norm": 0.15707933902740479, |
|
"learning_rate": 7.212359768227345e-06, |
|
"loss": 0.201, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.5809893307468477, |
|
"grad_norm": 0.15439623594284058, |
|
"learning_rate": 7.056461431489015e-06, |
|
"loss": 0.1866, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.585838991270611, |
|
"grad_norm": 0.1509786695241928, |
|
"learning_rate": 6.901989139527048e-06, |
|
"loss": 0.1877, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.5906886517943744, |
|
"grad_norm": 0.16154807806015015, |
|
"learning_rate": 6.748955168872312e-06, |
|
"loss": 0.1974, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.5955383123181377, |
|
"grad_norm": 0.17238792777061462, |
|
"learning_rate": 6.5973716817465365e-06, |
|
"loss": 0.2212, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.600387972841901, |
|
"grad_norm": 0.1707492172718048, |
|
"learning_rate": 6.4472507250956994e-06, |
|
"loss": 0.1852, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.6052376333656644, |
|
"grad_norm": 0.16531886160373688, |
|
"learning_rate": 6.2986042296326296e-06, |
|
"loss": 0.1953, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.6100872938894277, |
|
"grad_norm": 0.154957577586174, |
|
"learning_rate": 6.1514440088888735e-06, |
|
"loss": 0.2059, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.614936954413191, |
|
"grad_norm": 0.16685739159584045, |
|
"learning_rate": 6.005781758275731e-06, |
|
"loss": 0.2111, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.6197866149369544, |
|
"grad_norm": 0.16281373798847198, |
|
"learning_rate": 5.861629054154852e-06, |
|
"loss": 0.2048, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.6246362754607178, |
|
"grad_norm": 0.16481256484985352, |
|
"learning_rate": 5.7189973529181705e-06, |
|
"loss": 0.2057, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.629485935984481, |
|
"grad_norm": 0.15931367874145508, |
|
"learning_rate": 5.577897990077458e-06, |
|
"loss": 0.209, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.6343355965082444, |
|
"grad_norm": 0.14951331913471222, |
|
"learning_rate": 5.438342179363418e-06, |
|
"loss": 0.2012, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 1.6391852570320078, |
|
"grad_norm": 0.1307000368833542, |
|
"learning_rate": 5.300341011834475e-06, |
|
"loss": 0.1821, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.6440349175557711, |
|
"grad_norm": 0.16035383939743042, |
|
"learning_rate": 5.163905454995372e-06, |
|
"loss": 0.1899, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.6488845780795345, |
|
"grad_norm": 0.15784330666065216, |
|
"learning_rate": 5.0290463519254925e-06, |
|
"loss": 0.1949, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.6537342386032978, |
|
"grad_norm": 0.1625281125307083, |
|
"learning_rate": 4.895774420417124e-06, |
|
"loss": 0.2106, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.658583899127061, |
|
"grad_norm": 0.15070758759975433, |
|
"learning_rate": 4.764100252123702e-06, |
|
"loss": 0.1786, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.6634335596508243, |
|
"grad_norm": 0.13678708672523499, |
|
"learning_rate": 4.634034311718022e-06, |
|
"loss": 0.1944, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 1.6682832201745876, |
|
"grad_norm": 0.16914202272891998, |
|
"learning_rate": 4.505586936060566e-06, |
|
"loss": 0.2035, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.673132880698351, |
|
"grad_norm": 0.15594607591629028, |
|
"learning_rate": 4.378768333378017e-06, |
|
"loss": 0.1883, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.6779825412221143, |
|
"grad_norm": 0.15200097858905792, |
|
"learning_rate": 4.253588582451954e-06, |
|
"loss": 0.1786, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.6828322017458777, |
|
"grad_norm": 0.1636827439069748, |
|
"learning_rate": 4.130057631817838e-06, |
|
"loss": 0.191, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 1.687681862269641, |
|
"grad_norm": 0.14671088755130768, |
|
"learning_rate": 4.008185298974393e-06, |
|
"loss": 0.1992, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.6925315227934044, |
|
"grad_norm": 0.1701505482196808, |
|
"learning_rate": 3.887981269603333e-06, |
|
"loss": 0.1913, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 1.6973811833171677, |
|
"grad_norm": 0.15805399417877197, |
|
"learning_rate": 3.7694550967996565e-06, |
|
"loss": 0.1932, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.6973811833171677, |
|
"eval_loss": 0.19688038527965546, |
|
"eval_runtime": 48.0858, |
|
"eval_samples_per_second": 14.017, |
|
"eval_steps_per_second": 0.894, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.702230843840931, |
|
"grad_norm": 0.15937374532222748, |
|
"learning_rate": 3.6526162003123615e-06, |
|
"loss": 0.1776, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.7070805043646944, |
|
"grad_norm": 0.15453214943408966, |
|
"learning_rate": 3.5374738657958454e-06, |
|
"loss": 0.2025, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.7119301648884577, |
|
"grad_norm": 0.15527448058128357, |
|
"learning_rate": 3.4240372440719594e-06, |
|
"loss": 0.2062, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 1.716779825412221, |
|
"grad_norm": 0.1499599665403366, |
|
"learning_rate": 3.3123153504027367e-06, |
|
"loss": 0.1999, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.7216294859359844, |
|
"grad_norm": 0.1412288248538971, |
|
"learning_rate": 3.202317063773891e-06, |
|
"loss": 0.1763, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.7264791464597478, |
|
"grad_norm": 0.16038767993450165, |
|
"learning_rate": 3.0940511261892154e-06, |
|
"loss": 0.1941, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.731328806983511, |
|
"grad_norm": 0.15138490498065948, |
|
"learning_rate": 2.987526141975783e-06, |
|
"loss": 0.1969, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.7361784675072744, |
|
"grad_norm": 0.15238988399505615, |
|
"learning_rate": 2.882750577100149e-06, |
|
"loss": 0.1865, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.7410281280310378, |
|
"grad_norm": 0.16951079666614532, |
|
"learning_rate": 2.7797327584955014e-06, |
|
"loss": 0.1931, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.7458777885548011, |
|
"grad_norm": 0.1534978598356247, |
|
"learning_rate": 2.6784808733999163e-06, |
|
"loss": 0.194, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.7507274490785645, |
|
"grad_norm": 0.15956641733646393, |
|
"learning_rate": 2.579002968705668e-06, |
|
"loss": 0.1865, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 1.7555771096023278, |
|
"grad_norm": 0.1396978348493576, |
|
"learning_rate": 2.481306950319706e-06, |
|
"loss": 0.1871, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.7604267701260912, |
|
"grad_norm": 0.1500706672668457, |
|
"learning_rate": 2.3854005825353467e-06, |
|
"loss": 0.206, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 1.7652764306498545, |
|
"grad_norm": 0.15206393599510193, |
|
"learning_rate": 2.29129148741524e-06, |
|
"loss": 0.204, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.7701260911736179, |
|
"grad_norm": 0.15631067752838135, |
|
"learning_rate": 2.198987144185552e-06, |
|
"loss": 0.2022, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.7749757516973812, |
|
"grad_norm": 0.16704493761062622, |
|
"learning_rate": 2.1084948886416295e-06, |
|
"loss": 0.2087, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.7798254122211445, |
|
"grad_norm": 0.14626185595989227, |
|
"learning_rate": 2.0198219125649396e-06, |
|
"loss": 0.2053, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 1.7846750727449079, |
|
"grad_norm": 0.1546330451965332, |
|
"learning_rate": 1.9329752631515513e-06, |
|
"loss": 0.1893, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.7895247332686712, |
|
"grad_norm": 0.1591494381427765, |
|
"learning_rate": 1.8479618424520422e-06, |
|
"loss": 0.1943, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 1.7943743937924346, |
|
"grad_norm": 0.1502322554588318, |
|
"learning_rate": 1.7647884068229676e-06, |
|
"loss": 0.1899, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.799224054316198, |
|
"grad_norm": 0.1708429455757141, |
|
"learning_rate": 1.6834615663899155e-06, |
|
"loss": 0.214, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 1.8040737148399613, |
|
"grad_norm": 0.15139105916023254, |
|
"learning_rate": 1.6039877845221735e-06, |
|
"loss": 0.1921, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.8089233753637246, |
|
"grad_norm": 0.15141288936138153, |
|
"learning_rate": 1.5263733773190347e-06, |
|
"loss": 0.2192, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 1.813773035887488, |
|
"grad_norm": 0.18391959369182587, |
|
"learning_rate": 1.4506245131078733e-06, |
|
"loss": 0.2072, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.8186226964112513, |
|
"grad_norm": 0.15688742697238922, |
|
"learning_rate": 1.376747211953891e-06, |
|
"loss": 0.1824, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.8234723569350146, |
|
"grad_norm": 0.15558670461177826, |
|
"learning_rate": 1.304747345181681e-06, |
|
"loss": 0.1975, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.828322017458778, |
|
"grad_norm": 0.18685834109783173, |
|
"learning_rate": 1.2346306349086262e-06, |
|
"loss": 0.1957, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 1.8331716779825413, |
|
"grad_norm": 0.16459019482135773, |
|
"learning_rate": 1.1664026535901318e-06, |
|
"loss": 0.2068, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.8380213385063047, |
|
"grad_norm": 0.156924769282341, |
|
"learning_rate": 1.1000688235767525e-06, |
|
"loss": 0.2086, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 1.842870999030068, |
|
"grad_norm": 0.16359376907348633, |
|
"learning_rate": 1.0356344166832648e-06, |
|
"loss": 0.2163, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.8477206595538314, |
|
"grad_norm": 0.16292302310466766, |
|
"learning_rate": 9.73104553769691e-07, |
|
"loss": 0.2034, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.8525703200775947, |
|
"grad_norm": 0.17051053047180176, |
|
"learning_rate": 9.124842043343407e-07, |
|
"loss": 0.2168, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.857419980601358, |
|
"grad_norm": 0.17267897725105286, |
|
"learning_rate": 8.537781861188282e-07, |
|
"loss": 0.2234, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 1.8622696411251214, |
|
"grad_norm": 0.16032524406909943, |
|
"learning_rate": 7.969911647252193e-07, |
|
"loss": 0.1986, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.8671193016488847, |
|
"grad_norm": 0.1543329358100891, |
|
"learning_rate": 7.421276532452337e-07, |
|
"loss": 0.173, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.871968962172648, |
|
"grad_norm": 0.15669037401676178, |
|
"learning_rate": 6.891920119015571e-07, |
|
"loss": 0.1987, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.8768186226964112, |
|
"grad_norm": 0.1615551859140396, |
|
"learning_rate": 6.381884477013239e-07, |
|
"loss": 0.1831, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 1.8816682832201745, |
|
"grad_norm": 0.1686321347951889, |
|
"learning_rate": 5.891210141017733e-07, |
|
"loss": 0.2029, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.8865179437439379, |
|
"grad_norm": 0.15177270770072937, |
|
"learning_rate": 5.419936106881018e-07, |
|
"loss": 0.2034, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 1.8913676042677012, |
|
"grad_norm": 0.155603289604187, |
|
"learning_rate": 4.968099828635525e-07, |
|
"loss": 0.1962, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.8962172647914646, |
|
"grad_norm": 0.14843294024467468, |
|
"learning_rate": 4.535737215517366e-07, |
|
"loss": 0.1949, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 1.901066925315228, |
|
"grad_norm": 0.15314973890781403, |
|
"learning_rate": 4.122882629112701e-07, |
|
"loss": 0.2104, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.9059165858389913, |
|
"grad_norm": 0.1589522510766983, |
|
"learning_rate": 3.729568880626755e-07, |
|
"loss": 0.193, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 1.9107662463627546, |
|
"grad_norm": 0.14796093106269836, |
|
"learning_rate": 3.355827228276182e-07, |
|
"loss": 0.1712, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.915615906886518, |
|
"grad_norm": 0.1589411497116089, |
|
"learning_rate": 3.00168737480494e-07, |
|
"loss": 0.1878, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.9204655674102813, |
|
"grad_norm": 0.14060987532138824, |
|
"learning_rate": 2.6671774651235437e-07, |
|
"loss": 0.1762, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.9253152279340446, |
|
"grad_norm": 0.15994738042354584, |
|
"learning_rate": 2.3523240840724325e-07, |
|
"loss": 0.1924, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 1.930164888457808, |
|
"grad_norm": 0.15940771996974945, |
|
"learning_rate": 2.0571522543090526e-07, |
|
"loss": 0.2053, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.9350145489815713, |
|
"grad_norm": 0.14268319308757782, |
|
"learning_rate": 1.7816854343193357e-07, |
|
"loss": 0.1847, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 1.9398642095053347, |
|
"grad_norm": 0.15468433499336243, |
|
"learning_rate": 1.5259455165531068e-07, |
|
"loss": 0.2044, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.9398642095053347, |
|
"eval_loss": 0.19655267894268036, |
|
"eval_runtime": 48.0808, |
|
"eval_samples_per_second": 14.018, |
|
"eval_steps_per_second": 0.894, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.944713870029098, |
|
"grad_norm": 0.1507444828748703, |
|
"learning_rate": 1.2899528256845873e-07, |
|
"loss": 0.1824, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 1.9495635305528611, |
|
"grad_norm": 0.14235784113407135, |
|
"learning_rate": 1.073726116996715e-07, |
|
"loss": 0.1954, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.9544131910766245, |
|
"grad_norm": 0.14553847908973694, |
|
"learning_rate": 8.77282574890892e-08, |
|
"loss": 0.2163, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 1.9592628516003878, |
|
"grad_norm": 0.16772744059562683, |
|
"learning_rate": 7.006378115210766e-08, |
|
"loss": 0.1822, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.9641125121241512, |
|
"grad_norm": 0.15165811777114868, |
|
"learning_rate": 5.43805865553082e-08, |
|
"loss": 0.1862, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.9689621726479145, |
|
"grad_norm": 0.1540442407131195, |
|
"learning_rate": 4.067992010489685e-08, |
|
"loss": 0.2106, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.9738118331716779, |
|
"grad_norm": 0.14102330803871155, |
|
"learning_rate": 2.8962870647630814e-08, |
|
"loss": 0.1879, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 1.9786614936954412, |
|
"grad_norm": 0.14310438930988312, |
|
"learning_rate": 1.9230369384293233e-08, |
|
"loss": 0.1789, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.9835111542192045, |
|
"grad_norm": 0.1545654982328415, |
|
"learning_rate": 1.148318979568297e-08, |
|
"loss": 0.193, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 1.9883608147429679, |
|
"grad_norm": 0.15825237333774567, |
|
"learning_rate": 5.7219475811526316e-09, |
|
"loss": 0.1901, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.9932104752667312, |
|
"grad_norm": 0.1504404991865158, |
|
"learning_rate": 1.947100609661634e-09, |
|
"loss": 0.2034, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 1.9980601357904946, |
|
"grad_norm": 0.14141124486923218, |
|
"learning_rate": 1.5894888339418324e-10, |
|
"loss": 0.1773, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 2062, |
|
"total_flos": 2.6820605084455404e+18, |
|
"train_loss": 0.2813772309560665, |
|
"train_runtime": 15083.292, |
|
"train_samples_per_second": 4.374, |
|
"train_steps_per_second": 0.137 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2062, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 250, |
|
"total_flos": 2.6820605084455404e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|