| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 804, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.018656716417910446, |
| "grad_norm": 1.9214146781432249, |
| "learning_rate": 4.8780487804878055e-06, |
| "loss": 0.857, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.03731343283582089, |
| "grad_norm": 1.7625595046451557, |
| "learning_rate": 1.0975609756097562e-05, |
| "loss": 0.8038, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.055970149253731345, |
| "grad_norm": 0.6948661270341572, |
| "learning_rate": 1.707317073170732e-05, |
| "loss": 0.7448, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.07462686567164178, |
| "grad_norm": 0.5136824696543728, |
| "learning_rate": 2.3170731707317075e-05, |
| "loss": 0.7055, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.09328358208955224, |
| "grad_norm": 0.4018338347498522, |
| "learning_rate": 2.926829268292683e-05, |
| "loss": 0.6712, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.11194029850746269, |
| "grad_norm": 0.28711501314318655, |
| "learning_rate": 3.5365853658536584e-05, |
| "loss": 0.6599, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.13059701492537312, |
| "grad_norm": 0.2894771986907634, |
| "learning_rate": 4.146341463414634e-05, |
| "loss": 0.6395, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.14925373134328357, |
| "grad_norm": 0.2821436189327092, |
| "learning_rate": 4.75609756097561e-05, |
| "loss": 0.627, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.16791044776119404, |
| "grad_norm": 0.3279211667096163, |
| "learning_rate": 4.999828351434079e-05, |
| "loss": 0.6245, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.1865671641791045, |
| "grad_norm": 0.2537209910093848, |
| "learning_rate": 4.998779482816942e-05, |
| "loss": 0.6048, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.20522388059701493, |
| "grad_norm": 0.30352094230828874, |
| "learning_rate": 4.996777549883426e-05, |
| "loss": 0.6056, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.22388059701492538, |
| "grad_norm": 0.35358905732065365, |
| "learning_rate": 4.9938234010808136e-05, |
| "loss": 0.5995, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.24253731343283583, |
| "grad_norm": 0.5881353923808288, |
| "learning_rate": 4.989918288418841e-05, |
| "loss": 0.5898, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.26119402985074625, |
| "grad_norm": 0.4365964873532165, |
| "learning_rate": 4.9850638669390816e-05, |
| "loss": 0.5856, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.2798507462686567, |
| "grad_norm": 0.3310629283404945, |
| "learning_rate": 4.97926219401351e-05, |
| "loss": 0.583, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.29850746268656714, |
| "grad_norm": 0.39095858353376106, |
| "learning_rate": 4.9725157284725665e-05, |
| "loss": 0.5818, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.31716417910447764, |
| "grad_norm": 0.4130266981460891, |
| "learning_rate": 4.964827329563061e-05, |
| "loss": 0.5777, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.3358208955223881, |
| "grad_norm": 0.3251791282337107, |
| "learning_rate": 4.956200255736394e-05, |
| "loss": 0.5802, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.35447761194029853, |
| "grad_norm": 0.2800299594598508, |
| "learning_rate": 4.9466381632675714e-05, |
| "loss": 0.5777, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.373134328358209, |
| "grad_norm": 0.30351899813854083, |
| "learning_rate": 4.936145104705629e-05, |
| "loss": 0.5732, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.3917910447761194, |
| "grad_norm": 0.3184280675590268, |
| "learning_rate": 4.9247255271560994e-05, |
| "loss": 0.5693, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.41044776119402987, |
| "grad_norm": 0.35433866607808917, |
| "learning_rate": 4.9123842703962754e-05, |
| "loss": 0.5762, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.4291044776119403, |
| "grad_norm": 0.28639350303291805, |
| "learning_rate": 4.899126564824033e-05, |
| "loss": 0.5666, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.44776119402985076, |
| "grad_norm": 0.3274186200975153, |
| "learning_rate": 4.884958029241127e-05, |
| "loss": 0.5674, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.4664179104477612, |
| "grad_norm": 0.336381141981706, |
| "learning_rate": 4.869884668471853e-05, |
| "loss": 0.5641, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.48507462686567165, |
| "grad_norm": 0.4812221145192079, |
| "learning_rate": 4.8539128708181276e-05, |
| "loss": 0.563, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.503731343283582, |
| "grad_norm": 0.2985625122934817, |
| "learning_rate": 4.8370494053520316e-05, |
| "loss": 0.5599, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.5223880597014925, |
| "grad_norm": 0.40904102656943087, |
| "learning_rate": 4.8193014190469815e-05, |
| "loss": 0.5651, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.5410447761194029, |
| "grad_norm": 0.30462346527690765, |
| "learning_rate": 4.800676433748746e-05, |
| "loss": 0.5571, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.5597014925373134, |
| "grad_norm": 0.29542772666907074, |
| "learning_rate": 4.781182342987577e-05, |
| "loss": 0.5548, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.5783582089552238, |
| "grad_norm": 0.3051473053785931, |
| "learning_rate": 4.7608274086328275e-05, |
| "loss": 0.555, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.5970149253731343, |
| "grad_norm": 0.27345084859980273, |
| "learning_rate": 4.739620257391446e-05, |
| "loss": 0.5525, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.6156716417910447, |
| "grad_norm": 0.289181458801349, |
| "learning_rate": 4.7175698771518656e-05, |
| "loss": 0.551, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.6343283582089553, |
| "grad_norm": 0.41307993000424736, |
| "learning_rate": 4.6946856131748076e-05, |
| "loss": 0.5517, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.6529850746268657, |
| "grad_norm": 0.5340748917517905, |
| "learning_rate": 4.6709771641326244e-05, |
| "loss": 0.5509, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.6716417910447762, |
| "grad_norm": 0.3291048059480862, |
| "learning_rate": 4.6464545779988757e-05, |
| "loss": 0.5428, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.6902985074626866, |
| "grad_norm": 0.28986748904908194, |
| "learning_rate": 4.621128247789846e-05, |
| "loss": 0.5456, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.7089552238805971, |
| "grad_norm": 0.25439473449314687, |
| "learning_rate": 4.595008907159847e-05, |
| "loss": 0.5485, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.7276119402985075, |
| "grad_norm": 0.22286184197901884, |
| "learning_rate": 4.568107625852136e-05, |
| "loss": 0.5508, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.746268656716418, |
| "grad_norm": 0.2642945823026863, |
| "learning_rate": 4.5404358050074115e-05, |
| "loss": 0.5448, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.7649253731343284, |
| "grad_norm": 0.27448365288562576, |
| "learning_rate": 4.512005172331842e-05, |
| "loss": 0.5424, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.7835820895522388, |
| "grad_norm": 0.2971424901345131, |
| "learning_rate": 4.482827777126706e-05, |
| "loss": 0.5471, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.8022388059701493, |
| "grad_norm": 0.35310755203104544, |
| "learning_rate": 4.4529159851817255e-05, |
| "loss": 0.541, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.8208955223880597, |
| "grad_norm": 0.308625434473984, |
| "learning_rate": 4.422282473534271e-05, |
| "loss": 0.5405, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.8395522388059702, |
| "grad_norm": 0.31816940842512614, |
| "learning_rate": 4.3909402250966534e-05, |
| "loss": 0.546, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.8582089552238806, |
| "grad_norm": 0.22827227012944756, |
| "learning_rate": 4.358902523153791e-05, |
| "loss": 0.542, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.8768656716417911, |
| "grad_norm": 0.24079886565005823, |
| "learning_rate": 4.326182945733555e-05, |
| "loss": 0.5416, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.8955223880597015, |
| "grad_norm": 0.3638342950483874, |
| "learning_rate": 4.292795359852221e-05, |
| "loss": 0.5372, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.914179104477612, |
| "grad_norm": 0.3256070901197167, |
| "learning_rate": 4.2587539156374295e-05, |
| "loss": 0.5405, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.9328358208955224, |
| "grad_norm": 0.3165450063227312, |
| "learning_rate": 4.2240730403311586e-05, |
| "loss": 0.5364, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.9514925373134329, |
| "grad_norm": 0.30669208285819877, |
| "learning_rate": 4.188767432175263e-05, |
| "loss": 0.5322, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.9701492537313433, |
| "grad_norm": 0.257272912960744, |
| "learning_rate": 4.1528520541821506e-05, |
| "loss": 0.5353, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.9888059701492538, |
| "grad_norm": 0.30370874605896786, |
| "learning_rate": 4.116342127793245e-05, |
| "loss": 0.5385, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.007462686567164, |
| "grad_norm": 0.23367419347190874, |
| "learning_rate": 4.0792531264279285e-05, |
| "loss": 0.5246, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.0261194029850746, |
| "grad_norm": 0.28960025979101867, |
| "learning_rate": 4.041600768925687e-05, |
| "loss": 0.5175, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.044776119402985, |
| "grad_norm": 0.3544154485769114, |
| "learning_rate": 4.0034010128842484e-05, |
| "loss": 0.5183, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.0634328358208955, |
| "grad_norm": 0.2830781283593527, |
| "learning_rate": 3.964670047896525e-05, |
| "loss": 0.5181, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.0820895522388059, |
| "grad_norm": 0.26140038642658264, |
| "learning_rate": 3.925424288689239e-05, |
| "loss": 0.5087, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.1007462686567164, |
| "grad_norm": 0.2617191990469776, |
| "learning_rate": 3.8856803681661296e-05, |
| "loss": 0.516, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.1194029850746268, |
| "grad_norm": 0.26589479989678716, |
| "learning_rate": 3.8454551303586964e-05, |
| "loss": 0.5111, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.1380597014925373, |
| "grad_norm": 0.2406368600470144, |
| "learning_rate": 3.8047656232874624e-05, |
| "loss": 0.5139, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.1567164179104479, |
| "grad_norm": 0.25755826798857223, |
| "learning_rate": 3.763629091736781e-05, |
| "loss": 0.5116, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.1753731343283582, |
| "grad_norm": 0.33892425739707405, |
| "learning_rate": 3.722062969946254e-05, |
| "loss": 0.5118, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.1940298507462686, |
| "grad_norm": 0.23443314308723368, |
| "learning_rate": 3.6800848742218644e-05, |
| "loss": 0.5133, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.212686567164179, |
| "grad_norm": 0.23884530654624148, |
| "learning_rate": 3.6377125954699254e-05, |
| "loss": 0.5113, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.2313432835820897, |
| "grad_norm": 0.2090085180372967, |
| "learning_rate": 3.5949640916570566e-05, |
| "loss": 0.5085, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.25, |
| "grad_norm": 0.23291376449474052, |
| "learning_rate": 3.551857480199336e-05, |
| "loss": 0.5135, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.2686567164179103, |
| "grad_norm": 0.24011296587040026, |
| "learning_rate": 3.5084110302838916e-05, |
| "loss": 0.505, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.287313432835821, |
| "grad_norm": 0.21935005312866615, |
| "learning_rate": 3.464643155126162e-05, |
| "loss": 0.5104, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.3059701492537314, |
| "grad_norm": 0.2261621862735514, |
| "learning_rate": 3.4205724041661135e-05, |
| "loss": 0.5081, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.3246268656716418, |
| "grad_norm": 0.2574439184051825, |
| "learning_rate": 3.376217455206732e-05, |
| "loss": 0.5115, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.3432835820895521, |
| "grad_norm": 0.2129876722579311, |
| "learning_rate": 3.3315971064981025e-05, |
| "loss": 0.506, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.3619402985074627, |
| "grad_norm": 0.20753843021299664, |
| "learning_rate": 3.286730268770452e-05, |
| "loss": 0.5038, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.3805970149253732, |
| "grad_norm": 0.3217141035376458, |
| "learning_rate": 3.2416359572195155e-05, |
| "loss": 0.5132, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.3992537313432836, |
| "grad_norm": 0.21539693831207604, |
| "learning_rate": 3.1963332834476247e-05, |
| "loss": 0.5081, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.417910447761194, |
| "grad_norm": 0.27467469680387524, |
| "learning_rate": 3.150841447363948e-05, |
| "loss": 0.5067, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.4365671641791045, |
| "grad_norm": 0.23691311945378055, |
| "learning_rate": 3.1051797290472966e-05, |
| "loss": 0.5018, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.455223880597015, |
| "grad_norm": 0.30787370613511683, |
| "learning_rate": 3.059367480574958e-05, |
| "loss": 0.5035, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.4738805970149254, |
| "grad_norm": 0.21214521272783723, |
| "learning_rate": 3.0134241178210103e-05, |
| "loss": 0.5004, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.4925373134328357, |
| "grad_norm": 0.2567066053575565, |
| "learning_rate": 2.9673691122276086e-05, |
| "loss": 0.5041, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.5111940298507462, |
| "grad_norm": 0.23880722667167875, |
| "learning_rate": 2.9212219825527075e-05, |
| "loss": 0.4981, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.5298507462686568, |
| "grad_norm": 0.19930339353323281, |
| "learning_rate": 2.8750022865977443e-05, |
| "loss": 0.502, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.5485074626865671, |
| "grad_norm": 0.24784475664597616, |
| "learning_rate": 2.82872961291876e-05, |
| "loss": 0.5068, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.5671641791044775, |
| "grad_norm": 0.2637097189189239, |
| "learning_rate": 2.7824235725245042e-05, |
| "loss": 0.5031, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.585820895522388, |
| "grad_norm": 0.20097295957360145, |
| "learning_rate": 2.7361037905650032e-05, |
| "loss": 0.5051, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.6044776119402986, |
| "grad_norm": 0.1855097808926682, |
| "learning_rate": 2.689789898014155e-05, |
| "loss": 0.5043, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.623134328358209, |
| "grad_norm": 0.19387268984078115, |
| "learning_rate": 2.6435015233498443e-05, |
| "loss": 0.503, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.6417910447761193, |
| "grad_norm": 0.1959979856590692, |
| "learning_rate": 2.5972582842351156e-05, |
| "loss": 0.5031, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.6604477611940298, |
| "grad_norm": 0.2130968015995863, |
| "learning_rate": 2.551079779203932e-05, |
| "loss": 0.5056, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.6791044776119404, |
| "grad_norm": 0.19562804614219742, |
| "learning_rate": 2.504985579355047e-05, |
| "loss": 0.4971, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.6977611940298507, |
| "grad_norm": 0.20132877928156498, |
| "learning_rate": 2.458995220057491e-05, |
| "loss": 0.5048, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.716417910447761, |
| "grad_norm": 0.19111009428414005, |
| "learning_rate": 2.4131281926712146e-05, |
| "loss": 0.5051, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.7350746268656716, |
| "grad_norm": 0.19713565236023642, |
| "learning_rate": 2.3674039362863687e-05, |
| "loss": 0.5052, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.7537313432835822, |
| "grad_norm": 0.2278886953059889, |
| "learning_rate": 2.3218418294847517e-05, |
| "loss": 0.503, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.7723880597014925, |
| "grad_norm": 0.21076902236368567, |
| "learning_rate": 2.2764611821268918e-05, |
| "loss": 0.5004, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.7910447761194028, |
| "grad_norm": 0.2018936557220544, |
| "learning_rate": 2.231281227168257e-05, |
| "loss": 0.4972, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.8097014925373134, |
| "grad_norm": 0.1900377713824044, |
| "learning_rate": 2.18632111250806e-05, |
| "loss": 0.4987, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.828358208955224, |
| "grad_norm": 0.1818444743971112, |
| "learning_rate": 2.141599892874107e-05, |
| "loss": 0.4988, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.8470149253731343, |
| "grad_norm": 0.1843911681060789, |
| "learning_rate": 2.09713652174714e-05, |
| "loss": 0.5045, |
| "step": 495 |
| }, |
| { |
| "epoch": 1.8656716417910446, |
| "grad_norm": 0.1756356146626833, |
| "learning_rate": 2.0529498433280807e-05, |
| "loss": 0.5062, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.8843283582089554, |
| "grad_norm": 0.17919268852808168, |
| "learning_rate": 2.0090585845516012e-05, |
| "loss": 0.5011, |
| "step": 505 |
| }, |
| { |
| "epoch": 1.9029850746268657, |
| "grad_norm": 0.1861488051548517, |
| "learning_rate": 1.965481347149376e-05, |
| "loss": 0.4964, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.921641791044776, |
| "grad_norm": 0.1808039014665401, |
| "learning_rate": 1.9222365997664165e-05, |
| "loss": 0.5038, |
| "step": 515 |
| }, |
| { |
| "epoch": 1.9402985074626866, |
| "grad_norm": 0.18185075285557611, |
| "learning_rate": 1.8793426701337947e-05, |
| "loss": 0.5033, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.9589552238805972, |
| "grad_norm": 0.1802698151327418, |
| "learning_rate": 1.8368177373010954e-05, |
| "loss": 0.498, |
| "step": 525 |
| }, |
| { |
| "epoch": 1.9776119402985075, |
| "grad_norm": 0.1839298648566157, |
| "learning_rate": 1.7946798239318775e-05, |
| "loss": 0.5033, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.9962686567164178, |
| "grad_norm": 0.1787192449198445, |
| "learning_rate": 1.75294678866542e-05, |
| "loss": 0.4937, |
| "step": 535 |
| }, |
| { |
| "epoch": 2.014925373134328, |
| "grad_norm": 0.19908015949978794, |
| "learning_rate": 1.7116363185479754e-05, |
| "loss": 0.4861, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.033582089552239, |
| "grad_norm": 0.17330047242909324, |
| "learning_rate": 1.670765921536755e-05, |
| "loss": 0.4807, |
| "step": 545 |
| }, |
| { |
| "epoch": 2.0522388059701493, |
| "grad_norm": 0.1887761738499672, |
| "learning_rate": 1.6303529190798088e-05, |
| "loss": 0.4807, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.0708955223880596, |
| "grad_norm": 0.18086274895322357, |
| "learning_rate": 1.590414438774954e-05, |
| "loss": 0.478, |
| "step": 555 |
| }, |
| { |
| "epoch": 2.08955223880597, |
| "grad_norm": 0.1666312790289245, |
| "learning_rate": 1.550967407110856e-05, |
| "loss": 0.4801, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.1082089552238807, |
| "grad_norm": 0.16804347810729678, |
| "learning_rate": 1.5120285422933478e-05, |
| "loss": 0.4814, |
| "step": 565 |
| }, |
| { |
| "epoch": 2.126865671641791, |
| "grad_norm": 0.1741311542160614, |
| "learning_rate": 1.4736143471600173e-05, |
| "loss": 0.4841, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.1455223880597014, |
| "grad_norm": 0.16178175453073618, |
| "learning_rate": 1.4357411021860773e-05, |
| "loss": 0.4768, |
| "step": 575 |
| }, |
| { |
| "epoch": 2.1641791044776117, |
| "grad_norm": 0.16621312000057425, |
| "learning_rate": 1.3984248585844645e-05, |
| "loss": 0.4804, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.1828358208955225, |
| "grad_norm": 0.16578503722378288, |
| "learning_rate": 1.3616814315031146e-05, |
| "loss": 0.4806, |
| "step": 585 |
| }, |
| { |
| "epoch": 2.201492537313433, |
| "grad_norm": 0.17421634398822453, |
| "learning_rate": 1.3255263933222833e-05, |
| "loss": 0.477, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.220149253731343, |
| "grad_norm": 0.1584162460344385, |
| "learning_rate": 1.2899750670547473e-05, |
| "loss": 0.4761, |
| "step": 595 |
| }, |
| { |
| "epoch": 2.2388059701492535, |
| "grad_norm": 0.1669406945768043, |
| "learning_rate": 1.2550425198516973e-05, |
| "loss": 0.4807, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.2574626865671643, |
| "grad_norm": 0.1755571617029938, |
| "learning_rate": 1.2207435566170722e-05, |
| "loss": 0.4836, |
| "step": 605 |
| }, |
| { |
| "epoch": 2.2761194029850746, |
| "grad_norm": 0.16049791652334905, |
| "learning_rate": 1.1870927137330267e-05, |
| "loss": 0.48, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.294776119402985, |
| "grad_norm": 0.14841988134789108, |
| "learning_rate": 1.1541042528992152e-05, |
| "loss": 0.4797, |
| "step": 615 |
| }, |
| { |
| "epoch": 2.3134328358208958, |
| "grad_norm": 0.14669420324350554, |
| "learning_rate": 1.1217921550884774e-05, |
| "loss": 0.4781, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.332089552238806, |
| "grad_norm": 0.16275970644355697, |
| "learning_rate": 1.0901701146215085e-05, |
| "loss": 0.4677, |
| "step": 625 |
| }, |
| { |
| "epoch": 2.3507462686567164, |
| "grad_norm": 0.15639224942974775, |
| "learning_rate": 1.0592515333630128e-05, |
| "loss": 0.4795, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.3694029850746268, |
| "grad_norm": 0.17837315275471302, |
| "learning_rate": 1.029049515041808e-05, |
| "loss": 0.4757, |
| "step": 635 |
| }, |
| { |
| "epoch": 2.388059701492537, |
| "grad_norm": 0.15999076690352537, |
| "learning_rate": 9.99576859697277e-06, |
| "loss": 0.483, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.406716417910448, |
| "grad_norm": 0.14623430655091096, |
| "learning_rate": 9.708460582545337e-06, |
| "loss": 0.479, |
| "step": 645 |
| }, |
| { |
| "epoch": 2.425373134328358, |
| "grad_norm": 0.14446705361316536, |
| "learning_rate": 9.428692872305925e-06, |
| "loss": 0.4782, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.4440298507462686, |
| "grad_norm": 0.1499556136205738, |
| "learning_rate": 9.15658403573792e-06, |
| "loss": 0.4771, |
| "step": 655 |
| }, |
| { |
| "epoch": 2.4626865671641793, |
| "grad_norm": 0.13688966492338145, |
| "learning_rate": 8.892249396386513e-06, |
| "loss": 0.477, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.4813432835820897, |
| "grad_norm": 0.1405824099115518, |
| "learning_rate": 8.635800982982958e-06, |
| "loss": 0.4801, |
| "step": 665 |
| }, |
| { |
| "epoch": 2.5, |
| "grad_norm": 0.1556802805035451, |
| "learning_rate": 8.387347481965244e-06, |
| "loss": 0.4767, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.5186567164179103, |
| "grad_norm": 0.14957588217337456, |
| "learning_rate": 8.14699419141525e-06, |
| "loss": 0.4796, |
| "step": 675 |
| }, |
| { |
| "epoch": 2.5373134328358207, |
| "grad_norm": 0.15266928757717518, |
| "learning_rate": 7.914842976431932e-06, |
| "loss": 0.4752, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.5559701492537314, |
| "grad_norm": 0.15337919631830368, |
| "learning_rate": 7.690992225959465e-06, |
| "loss": 0.4805, |
| "step": 685 |
| }, |
| { |
| "epoch": 2.574626865671642, |
| "grad_norm": 0.14478875983801728, |
| "learning_rate": 7.4755368110886366e-06, |
| "loss": 0.4768, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.593283582089552, |
| "grad_norm": 0.15398254303144288, |
| "learning_rate": 7.268568044849132e-06, |
| "loss": 0.4765, |
| "step": 695 |
| }, |
| { |
| "epoch": 2.611940298507463, |
| "grad_norm": 0.15439199576307303, |
| "learning_rate": 7.0701736435098155e-06, |
| "loss": 0.4782, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.6305970149253732, |
| "grad_norm": 0.14978173171132997, |
| "learning_rate": 6.880437689403316e-06, |
| "loss": 0.4741, |
| "step": 705 |
| }, |
| { |
| "epoch": 2.6492537313432836, |
| "grad_norm": 0.1412522116274614, |
| "learning_rate": 6.699440595290754e-06, |
| "loss": 0.4778, |
| "step": 710 |
| }, |
| { |
| "epoch": 2.667910447761194, |
| "grad_norm": 0.15039092893602207, |
| "learning_rate": 6.527259070281722e-06, |
| "loss": 0.4816, |
| "step": 715 |
| }, |
| { |
| "epoch": 2.6865671641791042, |
| "grad_norm": 0.14048749084616627, |
| "learning_rate": 6.363966087323844e-06, |
| "loss": 0.48, |
| "step": 720 |
| }, |
| { |
| "epoch": 2.705223880597015, |
| "grad_norm": 0.14837195306929193, |
| "learning_rate": 6.209630852275836e-06, |
| "loss": 0.4715, |
| "step": 725 |
| }, |
| { |
| "epoch": 2.7238805970149254, |
| "grad_norm": 0.14480369337511617, |
| "learning_rate": 6.06431877457709e-06, |
| "loss": 0.4752, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.7425373134328357, |
| "grad_norm": 0.14464453043475206, |
| "learning_rate": 5.928091439526226e-06, |
| "loss": 0.4758, |
| "step": 735 |
| }, |
| { |
| "epoch": 2.7611940298507465, |
| "grad_norm": 0.14318518863987956, |
| "learning_rate": 5.801006582180398e-06, |
| "loss": 0.4795, |
| "step": 740 |
| }, |
| { |
| "epoch": 2.779850746268657, |
| "grad_norm": 0.1378858672961454, |
| "learning_rate": 5.683118062886346e-06, |
| "loss": 0.478, |
| "step": 745 |
| }, |
| { |
| "epoch": 2.798507462686567, |
| "grad_norm": 0.1473158474351985, |
| "learning_rate": 5.574475844453634e-06, |
| "loss": 0.477, |
| "step": 750 |
| }, |
| { |
| "epoch": 2.8171641791044775, |
| "grad_norm": 0.13519605107478774, |
| "learning_rate": 5.475125970979702e-06, |
| "loss": 0.4777, |
| "step": 755 |
| }, |
| { |
| "epoch": 2.835820895522388, |
| "grad_norm": 0.1469374974433458, |
| "learning_rate": 5.385110548335753e-06, |
| "loss": 0.4746, |
| "step": 760 |
| }, |
| { |
| "epoch": 2.8544776119402986, |
| "grad_norm": 0.13682187728455042, |
| "learning_rate": 5.30446772632166e-06, |
| "loss": 0.4778, |
| "step": 765 |
| }, |
| { |
| "epoch": 2.873134328358209, |
| "grad_norm": 0.1468534460053792, |
| "learning_rate": 5.233231682497572e-06, |
| "loss": 0.4791, |
| "step": 770 |
| }, |
| { |
| "epoch": 2.8917910447761193, |
| "grad_norm": 0.1381366037912041, |
| "learning_rate": 5.171432607698975e-06, |
| "loss": 0.4737, |
| "step": 775 |
| }, |
| { |
| "epoch": 2.91044776119403, |
| "grad_norm": 0.13873604296833256, |
| "learning_rate": 5.119096693241395e-06, |
| "loss": 0.4785, |
| "step": 780 |
| }, |
| { |
| "epoch": 2.9291044776119404, |
| "grad_norm": 0.1354800953615682, |
| "learning_rate": 5.07624611982014e-06, |
| "loss": 0.4719, |
| "step": 785 |
| }, |
| { |
| "epoch": 2.9477611940298507, |
| "grad_norm": 0.1412434132198824, |
| "learning_rate": 5.0428990481098275e-06, |
| "loss": 0.481, |
| "step": 790 |
| }, |
| { |
| "epoch": 2.966417910447761, |
| "grad_norm": 0.14472205562980991, |
| "learning_rate": 5.01906961106762e-06, |
| "loss": 0.4742, |
| "step": 795 |
| }, |
| { |
| "epoch": 2.9850746268656714, |
| "grad_norm": 0.1389514632202056, |
| "learning_rate": 5.004767907943488e-06, |
| "loss": 0.4824, |
| "step": 800 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 804, |
| "total_flos": 1465863748190208.0, |
| "train_loss": 0.52242068924121, |
| "train_runtime": 15658.3478, |
| "train_samples_per_second": 6.572, |
| "train_steps_per_second": 0.051 |
| } |
| ], |
| "logging_steps": 5, |
| "max_steps": 804, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 100, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1465863748190208.0, |
| "train_batch_size": 16, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|